diff --git a/01-ai/Yi-1.5-9B-32K/result_2024-07-04 10:40:00.json b/01-ai/Yi-1.5-9B-32K/result_2024-07-04 10:40:00.json new file mode 100644 index 0000000000000000000000000000000000000000..eba6a538387129b820cceff97bd15d4cfae02367 --- /dev/null +++ b/01-ai/Yi-1.5-9B-32K/result_2024-07-04 10:40:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29948805460750855, + "acc_stderr": 0.013385021637313567, + "acc_norm": 0.3506825938566553, + "acc_norm_stderr": 0.013944635930726089 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3333001394144593, + "acc_stderr": 0.004704293898729902, + "acc_norm": 0.4137621987651862, + "acc_norm_stderr": 0.004915003499517831 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47509578544061304, + "acc_stderr": 0.01785777070490102, + "acc_norm": 0.47509578544061304, + "acc_norm_stderr": 0.01785777070490102 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.04122737111370332, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.04122737111370332 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736125, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736125 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.047803436269367894, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.047803436269367894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4185185185185185, + "acc_stderr": 0.030078013075022066, + "acc_norm": 0.4185185185185185, + "acc_norm_stderr": 0.030078013075022066 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5476190476190477, + "acc_stderr": 0.02563425811555495, + "acc_norm": 0.5476190476190477, + "acc_norm_stderr": 0.02563425811555495 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.026918645383239015, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.026918645383239015 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.046306532033665956, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.046306532033665956 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5027522935779817, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.5027522935779817, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591205, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591205 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874142, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874142 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024113, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024113 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40070921985815605, + "acc_stderr": 0.029233465745573086, + "acc_norm": 0.40070921985815605, + "acc_norm_stderr": 0.029233465745573086 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.034028015813589656, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.034028015813589656 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3329608938547486, + "acc_stderr": 0.015761716178397552, + "acc_norm": 0.3329608938547486, + "acc_norm_stderr": 0.015761716178397552 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.76, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.76, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681404, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681404 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6163265306122448, + "acc_stderr": 0.031130880396235943, + "acc_norm": 0.6163265306122448, + "acc_norm_stderr": 0.031130880396235943 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36571056062581486, + "acc_stderr": 0.012301028188840567, + "acc_norm": 0.36571056062581486, + "acc_norm_stderr": 0.012301028188840567 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.015945068581236614, + "mc2": 0.4670848140389129, + "mc2_stderr": 0.01585178282587417 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47107438016528924, + "acc_stderr": 0.017161563949916348, + "acc_norm": 0.5171192443919717, + "acc_norm_stderr": 0.017180275246085626 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "01-ai/Yi-1.5-9B-32K", + "model_sha": "c0239dbc923b8a2b5ca849763bdd592d39c60850", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/01-ai/Yi-1.5-9B-Chat-16K/result_2024-05-20 16:53:58.json b/01-ai/Yi-1.5-9B-Chat-16K/result_2024-05-20 16:53:58.json new file mode 100644 index 0000000000000000000000000000000000000000..a8ad2c7c0eb1fa693e6a693057b3317c29558559 --- /dev/null +++ b/01-ai/Yi-1.5-9B-Chat-16K/result_2024-05-20 16:53:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3122866894197952, + "acc_stderr": 0.013542598541688065, + "acc_norm": 0.35238907849829354, + "acc_norm_stderr": 0.013960142600598673 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3403704441346345, + "acc_stderr": 0.004728653488866913, + "acc_norm": 0.4166500697072296, + "acc_norm_stderr": 0.004919962822208309 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.04944901092973779, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.04944901092973779 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47509578544061304, + "acc_stderr": 0.01785777070490102, + "acc_norm": 0.47509578544061304, + "acc_norm_stderr": 0.01785777070490102 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800254, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800254 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.02832032583010592, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.02832032583010592 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145631, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145631 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.02832774309156106, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.02832774309156106 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.0311669573672359, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.0311669573672359 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.02986960509531691, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02986960509531691 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.037657466938651504, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.037657466938651504 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.544973544973545, + "acc_stderr": 0.025646928361049398, + "acc_norm": 0.544973544973545, + "acc_norm_stderr": 0.025646928361049398 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.03602573571288441, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.03602573571288441 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.045796394220704355, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.045796394220704355 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46605504587155966, + "acc_stderr": 0.02138786335035399, + "acc_norm": 0.46605504587155966, + "acc_norm_stderr": 0.02138786335035399 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617157, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449848, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449848 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.02914454478159615, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.02914454478159615 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.03395322726375797, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.03395322726375797 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28938547486033517, + "acc_stderr": 0.015166544550490317, + "acc_norm": 0.28938547486033517, + "acc_norm_stderr": 0.015166544550490317 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932261, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932261 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.02858270975389844, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.02858270975389844 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.03186785930004128, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.03186785930004128 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5274261603375527, + "acc_stderr": 0.03249822718301304, + "acc_norm": 0.5274261603375527, + "acc_norm_stderr": 0.03249822718301304 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585892, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585892 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3219094247246022, + "mc1_stderr": 0.016355567611960397, + "mc2": 0.5121087237362004, + "mc2_stderr": 0.01621113484074564 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4923258559622196, + "acc_stderr": 0.017188329219654276, + "acc_norm": 0.5171192443919717, + "acc_norm_stderr": 0.017180275246085626 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "01-ai/Yi-1.5-9B-Chat-16K", + "model_sha": "2b397e5f0fab87984efa66856c5c4ed4bbe68b50", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/01-ai/Yi-1.5-9B-Chat/result_2024-05-13 18:14:51.json b/01-ai/Yi-1.5-9B-Chat/result_2024-05-13 18:14:51.json new file mode 100644 index 0000000000000000000000000000000000000000..d2c715b5cb7b0171e650151eaa9253a7f8ee2f1e --- /dev/null +++ b/01-ai/Yi-1.5-9B-Chat/result_2024-05-13 18:14:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32081911262798635, + "acc_stderr": 0.013640943091946528, + "acc_norm": 0.35580204778157, + "acc_norm_stderr": 0.01399057113791876 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3337980481975702, + "acc_stderr": 0.004706048116764947, + "acc_norm": 0.40579565823541125, + "acc_norm_stderr": 0.004900417982582044 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3742690058479532, + "acc_stderr": 0.03711601185389482, + "acc_norm": 0.3742690058479532, + "acc_norm_stderr": 0.03711601185389482 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.438058748403576, + "acc_stderr": 0.017742232238257223, + "acc_norm": 0.438058748403576, + "acc_norm_stderr": 0.017742232238257223 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.038850042458002526, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.038850042458002526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.02815023224453559, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.02815023224453559 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.025348006031534788, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.025348006031534788 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.02807158890109185, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.02807158890109185 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.030351527323344937, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.030351527323344937 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.03050329201334259, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.03050329201334259 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.030401786406101503, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.030401786406101503 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.02573364199183898, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.02573364199183898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.038956324641389366, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.038956324641389366 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366596, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46422018348623856, + "acc_stderr": 0.021382364775701906, + "acc_norm": 0.46422018348623856, + "acc_norm_stderr": 0.021382364775701906 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805413, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874143, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874143 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577454, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577454 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4148936170212766, + "acc_stderr": 0.0293922365846125, + "acc_norm": 0.4148936170212766, + "acc_norm_stderr": 0.0293922365846125 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28268156424581004, + "acc_stderr": 0.015060381730018094, + "acc_norm": 0.28268156424581004, + "acc_norm_stderr": 0.015060381730018094 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.027257202606114944, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.027257202606114944 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.43037974683544306, + "acc_stderr": 0.032230171959375976, + "acc_norm": 0.43037974683544306, + "acc_norm_stderr": 0.032230171959375976 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.012198140605353593, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.012198140605353593 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33047735618115054, + "mc1_stderr": 0.01646676961369829, + "mc2": 0.5010992575203865, + "mc2_stderr": 0.016374030576131873 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.01716818720142925, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.01718976703213082 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "01-ai/Yi-1.5-9B-Chat", + "model_sha": "6afa72fa85c12128e9716fc189b6fc21fe26da83", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/01-ai/Yi-1.5-9B/result_2024-05-13 18:14:57.json b/01-ai/Yi-1.5-9B/result_2024-05-13 18:14:57.json new file mode 100644 index 0000000000000000000000000000000000000000..f6582a674974e03325053d72ee3b2db0c4dc2055 --- /dev/null +++ b/01-ai/Yi-1.5-9B/result_2024-05-13 18:14:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30887372013651876, + "acc_stderr": 0.013501770929344003, + "acc_norm": 0.3506825938566553, + "acc_norm_stderr": 0.013944635930726085 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33419637522405893, + "acc_stderr": 0.004707447244200622, + "acc_norm": 0.41674965146385184, + "acc_norm_stderr": 0.004920130733271773 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.048979577377811674, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.048979577377811674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46998722860791825, + "acc_stderr": 0.017847723086649118, + "acc_norm": 0.46998722860791825, + "acc_norm_stderr": 0.017847723086649118 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.0402477840197711, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.0402477840197711 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841585, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841585 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5793103448275863, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.5793103448275863, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.032422250271150053, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.032422250271150053 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4948717948717949, + "acc_stderr": 0.02534967290683866, + "acc_norm": 0.4948717948717949, + "acc_norm_stderr": 0.02534967290683866 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.030351527323344948, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.030351527323344948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03014913560136594, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03014913560136594 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5317460317460317, + "acc_stderr": 0.0256993528321318, + "acc_norm": 0.5317460317460317, + "acc_norm_stderr": 0.0256993528321318 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583302, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583302 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.046570472605949646, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.046570472605949646 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4954128440366973, + "acc_stderr": 0.021436420955529435, + "acc_norm": 0.4954128440366973, + "acc_norm_stderr": 0.021436420955529435 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635464, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635464 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.04065771002562605, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.04065771002562605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.019955975145835546, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.019955975145835546 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41134751773049644, + "acc_stderr": 0.02935491115994098, + "acc_norm": 0.41134751773049644, + "acc_norm_stderr": 0.02935491115994098 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.03409386946992699, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.03409386946992699 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.33519553072625696, + "acc_stderr": 0.01578800719018589, + "acc_norm": 0.33519553072625696, + "acc_norm_stderr": 0.01578800719018589 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898445, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898445 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6040816326530613, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.6040816326530613, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5021097046413502, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.5021097046413502, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3539765319426336, + "acc_stderr": 0.01221350473173165, + "acc_norm": 0.3539765319426336, + "acc_norm_stderr": 0.01221350473173165 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.01611412415688246, + "mc2": 0.48003239735848235, + "mc2_stderr": 0.015823495770172346 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5407319952774499, + "acc_stderr": 0.017133218276537666, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.01705775370216029 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "01-ai/Yi-1.5-9B", + "model_sha": "9a6839c5b9db3dbb245fb98a072bfabc242621f2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/01-ai/Yi-6B-Chat/result_2023-12-27 01:03:40.json b/01-ai/Yi-6B-Chat/result_2023-12-27 01:03:40.json new file mode 100644 index 0000000000000000000000000000000000000000..7d0f07e750675add229e2a05904eaf302b31acbd --- /dev/null +++ b/01-ai/Yi-6B-Chat/result_2023-12-27 01:03:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2235494880546075, + "acc_stderr": 0.012174896631202614, + "acc_norm": 0.26621160409556316, + "acc_norm_stderr": 0.012915774781523216 + }, + "harness|ko_hellaswag|10": { + "acc": 0.30611431985660226, + "acc_stderr": 0.004599358920909541, + "acc_norm": 0.35222067317267475, + "acc_norm_stderr": 0.004766860907171539 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3537675606641124, + "acc_stderr": 0.017098184708161906, + "acc_norm": 0.3537675606641124, + "acc_norm_stderr": 0.017098184708161906 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231004, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231004 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029319, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029319 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.028125340983972718, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.028125340983972718 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484504, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484504 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.035212249088415824, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.035212249088415824 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006718, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006718 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094764, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094764 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38064516129032255, + "acc_stderr": 0.027621717832907046, + "acc_norm": 0.38064516129032255, + "acc_norm_stderr": 0.027621717832907046 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.594017094017094, + "acc_stderr": 0.03217180182641086, + "acc_norm": 0.594017094017094, + "acc_norm_stderr": 0.03217180182641086 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.0472457740573157, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.0472457740573157 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.02813325257881563, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.02813325257881563 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696545, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696545 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4577114427860697, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.4577114427860697, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3941798941798942, + "acc_stderr": 0.02516798233389414, + "acc_norm": 0.3941798941798942, + "acc_norm_stderr": 0.02516798233389414 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.026772990653361813, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.026772990653361813 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.03746668325470022, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.03746668325470022 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.027002521034516475, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.027002521034516475 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.035729543331448094, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.035729543331448094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3779816513761468, + "acc_stderr": 0.02078918706672812, + "acc_norm": 0.3779816513761468, + "acc_norm_stderr": 0.02078918706672812 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.48412698412698413, + "acc_stderr": 0.04469881854072606, + "acc_norm": 0.48412698412698413, + "acc_norm_stderr": 0.04469881854072606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.019023726160724556, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.019023726160724556 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347247, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347247 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331161, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331161 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.02824568739146291, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.02824568739146291 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3200782268578879, + "acc_stderr": 0.011914791947638519, + "acc_norm": 0.3200782268578879, + "acc_norm_stderr": 0.011914791947638519 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.03426712349247271, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.03426712349247271 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512567, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512567 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3072215422276622, + "mc1_stderr": 0.016150201321323013, + "mc2": 0.48699251655132686, + "mc2_stderr": 0.016174272005682996 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33530106257378983, + "acc_stderr": 0.016230981232989827, + "acc_norm": 0.3742621015348288, + "acc_norm_stderr": 0.016637917789798732 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "01-ai/Yi-6B-Chat", + "model_sha": "36326f9bc1c8020e0cf29ea830ee5e6679a66a23", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/01-ai/Yi-6B/result_2023-12-27 01:03:17.json b/01-ai/Yi-6B/result_2023-12-27 01:03:17.json new file mode 100644 index 0000000000000000000000000000000000000000..4f459f8f998f2f48ffb1f00fcbd2126cbec91194 --- /dev/null +++ b/01-ai/Yi-6B/result_2023-12-27 01:03:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2175767918088737, + "acc_stderr": 0.012057262020972502, + "acc_norm": 0.26109215017064846, + "acc_norm_stderr": 0.012835523909473855 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3021310495917148, + "acc_stderr": 0.00458243310963648, + "acc_norm": 0.35012945628360886, + "acc_norm_stderr": 0.004760354191370852 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39080459770114945, + "acc_stderr": 0.01744836606706253, + "acc_norm": 0.39080459770114945, + "acc_norm_stderr": 0.01744836606706253 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029319, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029319 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.035212249088415824, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.035212249088415824 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929774, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929774 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115007, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34102564102564104, + "acc_stderr": 0.02403548967633507, + "acc_norm": 0.34102564102564104, + "acc_norm_stderr": 0.02403548967633507 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36774193548387096, + "acc_stderr": 0.027430866579973474, + "acc_norm": 0.36774193548387096, + "acc_norm_stderr": 0.027430866579973474 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791923, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3660377358490566, + "acc_stderr": 0.029647813539365263, + "acc_norm": 0.3660377358490566, + "acc_norm_stderr": 0.029647813539365263 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562424, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303128, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303128 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4012345679012346, + "acc_stderr": 0.027272582849839792, + "acc_norm": 0.4012345679012346, + "acc_norm_stderr": 0.027272582849839792 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.03499807276193338, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.03499807276193338 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3669724770642202, + "acc_stderr": 0.020664675659520536, + "acc_norm": 0.3669724770642202, + "acc_norm_stderr": 0.020664675659520536 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.02847293847803353, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.02847293847803353 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32189542483660133, + "acc_stderr": 0.018901015322093092, + "acc_norm": 0.32189542483660133, + "acc_norm_stderr": 0.018901015322093092 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.02909767559946393, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.02909767559946393 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225612, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225612 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841196, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.030964810588786713, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.030964810588786713 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897634, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897634 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713616, + "mc2": 0.47599173122840593, + "mc2_stderr": 0.015773874222919516 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3305785123966942, + "acc_stderr": 0.0161734232988457, + "acc_norm": 0.40968122786304606, + "acc_norm_stderr": 0.01690756819221947 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "01-ai/Yi-6B", + "model_sha": "b881162e08d0fa65011cb53f2c51544e1b623112", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/01-ai/Yi-9B-200K/result_2024-07-08 15:13:14.json b/01-ai/Yi-9B-200K/result_2024-07-08 15:13:14.json new file mode 100644 index 0000000000000000000000000000000000000000..f2508936c910dbd1a2db883e2d0e8f5a6fe0250b --- /dev/null +++ b/01-ai/Yi-9B-200K/result_2024-07-08 15:13:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2832764505119454, + "acc_stderr": 0.013167478735134576, + "acc_norm": 0.3378839590443686, + "acc_norm_stderr": 0.013822047922283523 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3271260705038837, + "acc_stderr": 0.004682048906622317, + "acc_norm": 0.40290778729336785, + "acc_norm_stderr": 0.00489480111989861 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458935, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48020434227330777, + "acc_stderr": 0.017865944827291605, + "acc_norm": 0.48020434227330777, + "acc_norm_stderr": 0.017865944827291605 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.025339003010106515, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.025339003010106515 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.030351527323344944, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.030351527323344944 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.03050329201334259, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.03050329201334259 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4111111111111111, + "acc_stderr": 0.029999923508706675, + "acc_norm": 0.4111111111111111, + "acc_norm_stderr": 0.029999923508706675 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4973544973544973, + "acc_stderr": 0.025750949678130387, + "acc_norm": 0.4973544973544973, + "acc_norm_stderr": 0.025750949678130387 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206167, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206167 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583302, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583302 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.036072280610477486, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.036072280610477486 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270697, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270697 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47889908256880737, + "acc_stderr": 0.02141822475426464, + "acc_norm": 0.47889908256880737, + "acc_norm_stderr": 0.02141822475426464 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528784, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528784 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249034, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249034 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.019898412717635903, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.019898412717635903 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.033812000056435254, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.033812000056435254 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3139664804469274, + "acc_stderr": 0.01552192393352364, + "acc_norm": 0.3139664804469274, + "acc_norm_stderr": 0.01552192393352364 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5877551020408164, + "acc_stderr": 0.0315123604467427, + "acc_norm": 0.5877551020408164, + "acc_norm_stderr": 0.0315123604467427 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37157757496740546, + "acc_stderr": 0.012341828514528289, + "acc_norm": 0.37157757496740546, + "acc_norm_stderr": 0.012341828514528289 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386854, + "mc2": 0.47423242047841757, + "mc2_stderr": 0.015707621226106624 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4887839433293979, + "acc_stderr": 0.017186028469489283, + "acc_norm": 0.5171192443919717, + "acc_norm_stderr": 0.01718027524608563 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "01-ai/Yi-9B-200K", + "model_sha": "0d1bd7c8efcce669cb35edda2106ddc8dbcd7dff", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/01-ai/Yi-9B/result_2024-05-16 07:03:08.json b/01-ai/Yi-9B/result_2024-05-16 07:03:08.json new file mode 100644 index 0000000000000000000000000000000000000000..76a90bd322d36f7ae42090fb491e5bcdda69fe87 --- /dev/null +++ b/01-ai/Yi-9B/result_2024-05-16 07:03:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29180887372013653, + "acc_stderr": 0.013284525292403506, + "acc_norm": 0.3430034129692833, + "acc_norm_stderr": 0.013872423223718169 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32822146982672773, + "acc_stderr": 0.004686062421158143, + "acc_norm": 0.4041027683728341, + "acc_norm_stderr": 0.004897146690596263 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44189016602809705, + "acc_stderr": 0.01775880053421441, + "acc_norm": 0.44189016602809705, + "acc_norm_stderr": 0.01775880053421441 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03820169914517905, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03820169914517905 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562783, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562783 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.0323854694875898, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.0323854694875898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04712821257426769, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04712821257426769 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317223, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317223 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.029560707392465715, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.029560707392465715 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.035080801121998406, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.035080801121998406 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.49206349206349204, + "acc_stderr": 0.025748065871673286, + "acc_norm": 0.49206349206349204, + "acc_norm_stderr": 0.025748065871673286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.02687408588351835, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.02687408588351835 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.02780749004427621, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.02780749004427621 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.03602573571288441, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.03602573571288441 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779207, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779207 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.019675808135281532, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.019675808135281532 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.029097675599463926, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.029097675599463926 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219588, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219588 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409153, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409153 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411952, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411952 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.031717528240626645, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.031717528240626645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5021097046413502, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.5021097046413502, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33833116036505867, + "acc_stderr": 0.01208426562634422, + "acc_norm": 0.33833116036505867, + "acc_norm_stderr": 0.01208426562634422 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.0345423658538061, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.0345423658538061 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3575757575757576, + "acc_stderr": 0.037425970438065864, + "acc_norm": 0.3575757575757576, + "acc_norm_stderr": 0.037425970438065864 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.47399915157940936, + "mc2_stderr": 0.015777434106257295 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.01717730199234255, + "acc_norm": 0.5053128689492326, + "acc_norm_stderr": 0.01718938362722971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "01-ai/Yi-9B", + "model_sha": "95b8e272566167182ef1c53657a97d87a4084c9e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/12thD/I-SOLAR-10.7B-dpo-sft-v0.1/result_2024-04-09 05:16:32.json b/12thD/I-SOLAR-10.7B-dpo-sft-v0.1/result_2024-04-09 05:16:32.json new file mode 100644 index 0000000000000000000000000000000000000000..3a1e119493f8979b7c412a285d15ec69bb9913d6 --- /dev/null +++ b/12thD/I-SOLAR-10.7B-dpo-sft-v0.1/result_2024-04-09 05:16:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6612627986348123, + "acc_stderr": 0.01383056892797433, + "acc_norm": 0.7209897610921502, + "acc_norm_stderr": 0.013106784883601336 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4347739494124676, + "acc_stderr": 0.0049471417973841305, + "acc_norm": 0.5764787890858395, + "acc_norm_stderr": 0.004931065434173685 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.04498676320572924, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.04498676320572924 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6807151979565773, + "acc_stderr": 0.016671261749538736, + "acc_norm": 0.6807151979565773, + "acc_norm_stderr": 0.016671261749538736 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5319148936170213, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.5319148936170213, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.02736807824397164, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.02736807824397164 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6322869955156951, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.6322869955156951, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.032087795587867514, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.032087795587867514 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.031566630992154156, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.031566630992154156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5846153846153846, + "acc_stderr": 0.024985354923102353, + "acc_norm": 0.5846153846153846, + "acc_norm_stderr": 0.024985354923102353 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04712821257426769, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04712821257426769 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6161290322580645, + "acc_stderr": 0.02766618207553963, + "acc_norm": 0.6161290322580645, + "acc_norm_stderr": 0.02766618207553963 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8290598290598291, + "acc_stderr": 0.024662496845209804, + "acc_norm": 0.8290598290598291, + "acc_norm_stderr": 0.024662496845209804 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.569811320754717, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.569811320754717, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630886, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630886 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115979, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115979 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6127167630057804, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.6127167630057804, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.026869490744815264, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.026869490744815264 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6528497409326425, + "acc_stderr": 0.03435696168361356, + "acc_norm": 0.6528497409326425, + "acc_norm_stderr": 0.03435696168361356 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.04598188057816542, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.04598188057816542 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7027522935779816, + "acc_stderr": 0.019595707224643558, + "acc_norm": 0.7027522935779816, + "acc_norm_stderr": 0.019595707224643558 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138293, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.618421052631579, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.618421052631579, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.020184583359102195, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.020184583359102195 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.02909767559946393, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.02909767559946393 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220517, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220517 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.48161764705882354, + "acc_stderr": 0.03035230339535196, + "acc_norm": 0.48161764705882354, + "acc_norm_stderr": 0.03035230339535196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6040816326530613, + "acc_stderr": 0.031308028990656864, + "acc_norm": 0.6040816326530613, + "acc_norm_stderr": 0.031308028990656864 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.030486039389105296, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.030486039389105296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3983050847457627, + "acc_stderr": 0.012503310565166235, + "acc_norm": 0.3983050847457627, + "acc_norm_stderr": 0.012503310565166235 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5642594859241126, + "mc1_stderr": 0.01735834539886313, + "mc2": 0.6711090076900339, + "mc2_stderr": 0.014635725108441697 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45808736717827625, + "acc_stderr": 0.017129852117911147, + "acc_norm": 0.5159386068476978, + "acc_norm_stderr": 0.017181617837190195 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "12thD/I-SOLAR-10.7B-dpo-sft-v0.1", + "model_sha": "38206239efc52267202f79250058496d78de4585", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/12thD/ko-Llama-3-8B-sft-v0.1/result_2024-04-22 02:51:25.json b/12thD/ko-Llama-3-8B-sft-v0.1/result_2024-04-22 02:51:25.json new file mode 100644 index 0000000000000000000000000000000000000000..19bb1c420d9dfe111e0886d0b9d138c7b575d8ea --- /dev/null +++ b/12thD/ko-Llama-3-8B-sft-v0.1/result_2024-04-22 02:51:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3703071672354949, + "acc_stderr": 0.01411129875167495, + "acc_norm": 0.4249146757679181, + "acc_norm_stderr": 0.014445698968520769 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37621987651862177, + "acc_stderr": 0.004834461997944866, + "acc_norm": 0.498406691894045, + "acc_norm_stderr": 0.004989756076956349 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.03815827365913237, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.03815827365913237 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47509578544061304, + "acc_stderr": 0.01785777070490102, + "acc_norm": 0.47509578544061304, + "acc_norm_stderr": 0.01785777070490102 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.032500536843658404, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.032500536843658404 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479637, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479637 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305693, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305693 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.035594435655639196, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.035594435655639196 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5672268907563025, + "acc_stderr": 0.032183581077426124, + "acc_norm": 0.5672268907563025, + "acc_norm_stderr": 0.032183581077426124 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5205128205128206, + "acc_stderr": 0.02532966316348994, + "acc_norm": 0.5205128205128206, + "acc_norm_stderr": 0.02532966316348994 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5032258064516129, + "acc_stderr": 0.028443414226438316, + "acc_norm": 0.5032258064516129, + "acc_norm_stderr": 0.028443414226438316 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.028286324075564407, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.028286324075564407 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083015, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083015 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.40397350993377484, + "acc_stderr": 0.0400648568536534, + "acc_norm": 0.40397350993377484, + "acc_norm_stderr": 0.0400648568536534 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02459497512892094, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02459497512892094 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.026817718130348916, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.026817718130348916 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008585, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008585 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5706422018348624, + "acc_stderr": 0.021222286397236508, + "acc_norm": 0.5706422018348624, + "acc_norm_stderr": 0.021222286397236508 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626057, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626057 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4395424836601307, + "acc_stderr": 0.020079420408087925, + "acc_norm": 0.4395424836601307, + "acc_norm_stderr": 0.020079420408087925 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.033981108902946366, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.033981108902946366 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2346368715083799, + "acc_stderr": 0.01417304409830367, + "acc_norm": 0.2346368715083799, + "acc_norm_stderr": 0.01417304409830367 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6244897959183674, + "acc_stderr": 0.031001209039894843, + "acc_norm": 0.6244897959183674, + "acc_norm_stderr": 0.031001209039894843 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35853976531942633, + "acc_stderr": 0.012248487319682737, + "acc_norm": 0.35853976531942633, + "acc_norm_stderr": 0.012248487319682737 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.034760990605016355, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.034760990605016355 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.037937131711656344, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.037937131711656344 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3219094247246022, + "mc1_stderr": 0.016355567611960383, + "mc2": 0.4972168450482467, + "mc2_stderr": 0.015567232356568489 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.01717730199234255, + "acc_norm": 0.525383707201889, + "acc_norm_stderr": 0.017168187201429253 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "12thD/ko-Llama-3-8B-sft-v0.1", + "model_sha": "5aca578ff0479831b5417ce031693c1f97899620", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/12thD/ko-Llama-3-8B-sft-v0.3/result_2024-05-02 01:25:36.json b/12thD/ko-Llama-3-8B-sft-v0.3/result_2024-05-02 01:25:36.json new file mode 100644 index 0000000000000000000000000000000000000000..0bb22a77e18d896558a25cf89e064eebcb4d1cf5 --- /dev/null +++ b/12thD/ko-Llama-3-8B-sft-v0.3/result_2024-05-02 01:25:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.013847460518892976, + "acc_norm": 0.4044368600682594, + "acc_norm_stderr": 0.014342036483436175 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37183827922724555, + "acc_stderr": 0.004823078145064962, + "acc_norm": 0.4825731925911173, + "acc_norm_stderr": 0.004986749760948692 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.017874698667491345, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.017874698667491345 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108102, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.0283332771095628, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.0283332771095628 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5793103448275863, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.5793103448275863, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.03228410626716391, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.03228410626716391 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.02535100632816969, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02535100632816969 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5129032258064516, + "acc_stderr": 0.02843453315268186, + "acc_norm": 0.5129032258064516, + "acc_norm_stderr": 0.02843453315268186 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.02479606060269995, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.02479606060269995 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756656, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756656 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5871559633027523, + "acc_stderr": 0.021109128133413913, + "acc_norm": 0.5871559633027523, + "acc_norm_stderr": 0.021109128133413913 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884124, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437538, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437538 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022128, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833585, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833585 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225612, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225612 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03004261583271487, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03004261583271487 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6081632653061224, + "acc_stderr": 0.031251275910891656, + "acc_norm": 0.6081632653061224, + "acc_norm_stderr": 0.031251275910891656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.031843998738112236, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.031843998738112236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34876140808344197, + "acc_stderr": 0.012172035157127115, + "acc_norm": 0.34876140808344197, + "acc_norm_stderr": 0.012172035157127115 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3157894736842105, + "mc1_stderr": 0.01627228795791694, + "mc2": 0.4887242465522298, + "mc2_stderr": 0.015611726455962618 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48760330578512395, + "acc_stderr": 0.017185069732676528, + "acc_norm": 0.5478158205430933, + "acc_norm_stderr": 0.017111567130916796 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "12thD/ko-Llama-3-8B-sft-v0.3", + "model_sha": "134a44b329a37805306c77e45e932d839cae8baa", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/12thD/ko-gemma-7b-sft-v1.5/result_2024-04-03 05:50:30.json b/12thD/ko-gemma-7b-sft-v1.5/result_2024-04-03 05:50:30.json new file mode 100644 index 0000000000000000000000000000000000000000..7b3c44db81eedad5855f238f30b9ebc356eb0e68 --- /dev/null +++ b/12thD/ko-gemma-7b-sft-v1.5/result_2024-04-03 05:50:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20051194539249148, + "acc_stderr": 0.011700318050499368, + "acc_norm": 0.2645051194539249, + "acc_norm_stderr": 0.01288927294931337 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2597092212706632, + "acc_stderr": 0.004375788991216851, + "acc_norm": 0.261700856403107, + "acc_norm_stderr": 0.0043866225891190805 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2046783625730994, + "acc_stderr": 0.03094445977853321, + "acc_norm": 0.2046783625730994, + "acc_norm_stderr": 0.03094445977853321 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21583652618135377, + "acc_stderr": 0.014711684386139946, + "acc_norm": 0.21583652618135377, + "acc_norm_stderr": 0.014711684386139946 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.03633384414073462, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.03633384414073462 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838746, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838746 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21084337349397592, + "acc_stderr": 0.0317555478662992, + "acc_norm": 0.21084337349397592, + "acc_norm_stderr": 0.0317555478662992 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.03021683101150878, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.03021683101150878 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.038073871163060866, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.038073871163060866 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23737373737373738, + "acc_stderr": 0.030313710538198885, + "acc_norm": 0.23737373737373738, + "acc_norm_stderr": 0.030313710538198885 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.036186648199362445, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.036186648199362445 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.02835962087053395, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.02835962087053395 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31794871794871793, + "acc_stderr": 0.02361088430892786, + "acc_norm": 0.31794871794871793, + "acc_norm_stderr": 0.02361088430892786 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036625, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036625 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2830188679245283, + "acc_stderr": 0.027724236492700904, + "acc_norm": 0.2830188679245283, + "acc_norm_stderr": 0.027724236492700904 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.035839017547364134, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.035839017547364134 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548574, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548574 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.022289638852617897, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.022289638852617897 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.023468429832451138, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.023468429832451138 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3626943005181347, + "acc_stderr": 0.034697137917043715, + "acc_norm": 0.3626943005181347, + "acc_norm_stderr": 0.034697137917043715 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3284403669724771, + "acc_stderr": 0.020135902797298395, + "acc_norm": 0.3284403669724771, + "acc_norm_stderr": 0.020135902797298395 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023805186524888156, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023805186524888156 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.32231404958677684, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.026244920349843, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.026244920349843 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536934, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536934 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2470664928292047, + "acc_stderr": 0.011015752255279341, + "acc_norm": 0.2470664928292047, + "acc_norm_stderr": 0.011015752255279341 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693257, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693257 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.41054014787859444, + "mc2_stderr": 0.016235535860246012 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.20425029515938606, + "acc_stderr": 0.01386067587817683, + "acc_norm": 0.2632821723730815, + "acc_norm_stderr": 0.01514175219957321 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "12thD/ko-gemma-7b-sft-v1.5", + "model_sha": "917d34440057e05d95620548d7b3b575d95d355a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama-2-ko-en-instruct-v1/result_2023-10-12 11:14:34.json b/42MARU/GenAI-llama-2-ko-en-instruct-v1/result_2023-10-12 11:14:34.json new file mode 100644 index 0000000000000000000000000000000000000000..019020d68e431f521fe326c7daf85eb7ef0be498 --- /dev/null +++ b/42MARU/GenAI-llama-2-ko-en-instruct-v1/result_2023-10-12 11:14:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938165, + "acc_norm": 0.4445392491467577, + "acc_norm_stderr": 0.014521226405627077 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41545508862776337, + "acc_stderr": 0.004917931778593191, + "acc_norm": 0.5571599283011353, + "acc_norm_stderr": 0.004957068377516512 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5108556832694764, + "acc_stderr": 0.017875748840242407, + "acc_norm": 0.5108556832694764, + "acc_norm_stderr": 0.017875748840242407 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361033, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361033 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.02836504154256457, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.02836504154256457 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416828, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416828 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36134453781512604, + "acc_stderr": 0.031204691225150013, + "acc_norm": 0.36134453781512604, + "acc_norm_stderr": 0.031204691225150013 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.02428314052946728, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.02428314052946728 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280459, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280459 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651047, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651047 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112126, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112126 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40414507772020725, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.40414507772020725, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4091743119266055, + "acc_stderr": 0.02108067026443373, + "acc_norm": 0.4091743119266055, + "acc_norm_stderr": 0.02108067026443373 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276863, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276863 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.02803609227389177, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.02803609227389177 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483184, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483184 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0190709855896875, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0190709855896875 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320207, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320207 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347019, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347019 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.02971127586000534, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.02971127586000534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.02533684856333237, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.02533684856333237 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.41350210970464135, + "acc_stderr": 0.03205649904851859, + "acc_norm": 0.41350210970464135, + "acc_norm_stderr": 0.03205649904851859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29335071707953064, + "acc_stderr": 0.011628520449582076, + "acc_norm": 0.29335071707953064, + "acc_norm_stderr": 0.011628520449582076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.033744993563193555, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.033744993563193555 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.01598359510181139, + "mc2": 0.4602391231259313, + "mc2_stderr": 0.015191570633369808 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4757969303423849, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama-2-ko-en-instruct-v1", + "model_sha": "aee07500d61a1d5d214cf0bc0040650957cf3da0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-dpo-13b-test3/result_2023-11-30 08:08:14.json b/42MARU/GenAI-llama2-ko-en-dpo-13b-test3/result_2023-11-30 08:08:14.json new file mode 100644 index 0000000000000000000000000000000000000000..230e3555a510960013fc3b9b4c895ff20d31ff66 --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-dpo-13b-test3/result_2023-11-30 08:08:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4112627986348123, + "acc_stderr": 0.014379441068522077, + "acc_norm": 0.45733788395904434, + "acc_norm_stderr": 0.014558106543924067 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43288189603664606, + "acc_stderr": 0.004944620712318274, + "acc_norm": 0.5816570404301932, + "acc_norm_stderr": 0.004922789247319874 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.01778403453499242, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.01778403453499242 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.0424463323835323, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.0424463323835323 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.035094383488796295, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.035094383488796295 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.040131241954243856, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.040131241954243856 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.02524277098712617, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.02524277098712617 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655812, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655812 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5828220858895705, + "acc_stderr": 0.03874102859818082, + "acc_norm": 0.5828220858895705, + "acc_norm_stderr": 0.03874102859818082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03835153954399419, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03835153954399419 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5926605504587156, + "acc_stderr": 0.02106598624441288, + "acc_norm": 0.5926605504587156, + "acc_norm_stderr": 0.02106598624441288 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.02830457667314111, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.02830457667314111 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.01997742260022747, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.01997742260022747 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163907, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163907 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.030486039389105303, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.030486039389105303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3774445893089961, + "acc_stderr": 0.012380680911165804, + "acc_norm": 0.3774445893089961, + "acc_norm_stderr": 0.012380680911165804 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31946144430844553, + "mc1_stderr": 0.0163226441829605, + "mc2": 0.4756188079524156, + "mc2_stderr": 0.015396392654893808 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5100354191263282, + "acc_stderr": 0.01718689128689406, + "acc_norm": 0.5832349468713105, + "acc_norm_stderr": 0.01695048914610882 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-dpo-13b-test3", + "model_sha": "d70fdfed2e0b43ac6715ee5ec24801fd2bd5c25d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-dpo-13b-v1/result_2023-11-18 17:03:07.json b/42MARU/GenAI-llama2-ko-en-dpo-13b-v1/result_2023-11-18 17:03:07.json new file mode 100644 index 0000000000000000000000000000000000000000..ac212570307bbc2d57f23c894a8c2edd8248bf1b --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-dpo-13b-v1/result_2023-11-18 17:03:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938165, + "acc_norm": 0.45819112627986347, + "acc_norm_stderr": 0.014560220308714702 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42561242780322645, + "acc_stderr": 0.004934250390879782, + "acc_norm": 0.569308902609042, + "acc_norm_stderr": 0.004941609820763589 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5210727969348659, + "acc_stderr": 0.01786407678621291, + "acc_norm": 0.5210727969348659, + "acc_norm_stderr": 0.01786407678621291 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540218, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540218 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489424, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489424 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416544, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416544 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.024537591572830517, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.024537591572830517 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.02807158890109185, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.02807158890109185 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091265, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.03032594578928611, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.03032594578928611 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523857, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523857 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413317, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413317 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48990825688073397, + "acc_stderr": 0.021432956203453316, + "acc_norm": 0.48990825688073397, + "acc_norm_stderr": 0.021432956203453316 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.02807415894760066, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.02807415894760066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.01984828016840117, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.01984828016840117 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605593, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605593 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.02806499816704009, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.02806499816704009 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235922, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235922 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.318122555410691, + "acc_stderr": 0.011895407281104074, + "acc_norm": 0.318122555410691, + "acc_norm_stderr": 0.011895407281104074 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03484941514429231, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03484941514429231 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165634, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165634 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559693, + "mc2": 0.474366186048088, + "mc2_stderr": 0.01540967506791855 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5041322314049587, + "acc_stderr": 0.017189767032130817, + "acc_norm": 0.5525383707201889, + "acc_norm_stderr": 0.017095190301500574 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-dpo-13b-v1", + "model_sha": "13d027c0a2069284308f4992d67a202ac2e50b22", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-dpo-13b-v2/result_2023-11-19 10:36:38.json b/42MARU/GenAI-llama2-ko-en-dpo-13b-v2/result_2023-11-19 10:36:38.json new file mode 100644 index 0000000000000000000000000000000000000000..48002060015dc96a5a052b340d92cbd2be535dc3 --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-dpo-13b-v2/result_2023-11-19 10:36:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938165, + "acc_norm": 0.46075085324232085, + "acc_norm_stderr": 0.014566303676636588 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42640908185620396, + "acc_stderr": 0.004935439955031694, + "acc_norm": 0.5706034654451304, + "acc_norm_stderr": 0.0049397843114489855 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5197956577266922, + "acc_stderr": 0.017865944827291633, + "acc_norm": 0.5197956577266922, + "acc_norm_stderr": 0.017865944827291633 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894245, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894245 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416544, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416544 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3717948717948718, + "acc_stderr": 0.024503472557110946, + "acc_norm": 0.3717948717948718, + "acc_norm_stderr": 0.024503472557110946 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.02807158890109185, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.02807158890109185 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.03158539157745636, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.03158539157745636 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.03032594578928611, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.03032594578928611 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523857, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523857 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.026918645383239015, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.026918645383239015 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214334, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.02803609227389177, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.02803609227389177 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.01984828016840117, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.01984828016840117 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605593, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605593 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235922, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235922 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31747066492829207, + "acc_stderr": 0.011888892068809309, + "acc_norm": 0.31747066492829207, + "acc_norm_stderr": 0.011888892068809309 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.47439440606323957, + "mc2_stderr": 0.015414552807155835 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5088547815820543, + "acc_stderr": 0.01718765819933674, + "acc_norm": 0.5548996458087367, + "acc_norm_stderr": 0.017086417431005464 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-dpo-13b-v2", + "model_sha": "6fd9c176286458a9e802d0955a243f7b538c8e1c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-instruct-20B-v1/result_2023-11-05 16:07:53.json b/42MARU/GenAI-llama2-ko-en-instruct-20B-v1/result_2023-11-05 16:07:53.json new file mode 100644 index 0000000000000000000000000000000000000000..8f478826335ca6cced97ce2a5200b6394a8e496a --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-instruct-20B-v1/result_2023-11-05 16:07:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3984641638225256, + "acc_stderr": 0.014306946052735563, + "acc_norm": 0.4616040955631399, + "acc_norm_stderr": 0.01456824555029636 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42162915753833896, + "acc_stderr": 0.004928105880776079, + "acc_norm": 0.5677155945030871, + "acc_norm_stderr": 0.004943809330692697 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.038110796698335316, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.038110796698335316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5491698595146871, + "acc_stderr": 0.01779329757269903, + "acc_norm": 0.5491698595146871, + "acc_norm_stderr": 0.01779329757269903 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916748, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916748 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.033764582465095665, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.033764582465095665 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051622, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051622 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.030365050829115208, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.030365050829115208 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5024875621890548, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.5024875621890548, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.037940126746970296, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.037940126746970296 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.023393826500484875, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.023393826500484875 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5, + "acc_stderr": 0.02782074420373286, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02782074420373286 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.03606065001832919, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.03606065001832919 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5431192660550459, + "acc_stderr": 0.021357458785226224, + "acc_norm": 0.5431192660550459, + "acc_norm_stderr": 0.021357458785226224 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.02799672318063145, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.02799672318063145 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3937908496732026, + "acc_stderr": 0.01976621199107307, + "acc_norm": 0.3937908496732026, + "acc_norm_stderr": 0.01976621199107307 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265015, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265015 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29497206703910617, + "acc_stderr": 0.015251931579208185, + "acc_norm": 0.29497206703910617, + "acc_norm_stderr": 0.015251931579208185 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6919831223628692, + "acc_stderr": 0.030052389335605695, + "acc_norm": 0.6919831223628692, + "acc_norm_stderr": 0.030052389335605695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3683181225554107, + "acc_stderr": 0.012319403369564639, + "acc_norm": 0.3683181225554107, + "acc_norm_stderr": 0.012319403369564639 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070262, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070262 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.458694749783158, + "mc2_stderr": 0.015135220490705375 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45336481700118064, + "acc_stderr": 0.017115418225226862, + "acc_norm": 0.564344746162928, + "acc_norm_stderr": 0.017047415229476313 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-instruct-20B-v1", + "model_sha": "4de05113ecc02aa2da28893d8e2827912ebe0d20", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-instruct-v2-13b/result_2023-10-18 01:40:38.json b/42MARU/GenAI-llama2-ko-en-instruct-v2-13b/result_2023-10-18 01:40:38.json new file mode 100644 index 0000000000000000000000000000000000000000..1f0fdffbf4b5df08259c8a13f05ba920a05cd5fb --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-instruct-v2-13b/result_2023-10-18 01:40:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3848122866894198, + "acc_stderr": 0.014218371065251095, + "acc_norm": 0.4402730375426621, + "acc_norm_stderr": 0.014506769524804243 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4190400318661621, + "acc_stderr": 0.0049239357498424945, + "acc_norm": 0.5560645289782912, + "acc_norm_stderr": 0.004958314114266494 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.017850410794380173, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.017850410794380173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828061, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230172, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230172 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0242785680243077, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0242785680243077 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.03594413711272436, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.03594413711272436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5559633027522936, + "acc_stderr": 0.021302621211654518, + "acc_norm": 0.5559633027522936, + "acc_norm_stderr": 0.021302621211654518 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225875, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225875 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.019576953122088833, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.019576953122088833 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.033509916046960436, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.033509916046960436 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260664, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260664 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280058, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280058 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741518, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741518 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.44227632802507094, + "mc2_stderr": 0.015242459306682204 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5088547815820543, + "acc_stderr": 0.017187658199336743, + "acc_norm": 0.5608028335301063, + "acc_norm_stderr": 0.017062775744780705 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-instruct-v2-13b", + "model_sha": "9f429309fc6b939d08c659ab4666f6e80324dcd1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-instruct-v3-13B/result_2023-11-02 01:15:35.json b/42MARU/GenAI-llama2-ko-en-instruct-v3-13B/result_2023-11-02 01:15:35.json new file mode 100644 index 0000000000000000000000000000000000000000..4cbd5ed51a3e7af0d6c3d63fd38ee646065de7c5 --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-instruct-v3-13B/result_2023-11-02 01:15:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3967576791808874, + "acc_stderr": 0.014296513020180646, + "acc_norm": 0.454778156996587, + "acc_norm_stderr": 0.014551507060836355 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4164509061939853, + "acc_stderr": 0.004919626380645517, + "acc_norm": 0.5536745668193587, + "acc_norm_stderr": 0.004960947388535101 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.038237270928823064, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.038237270928823064 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5159642401021711, + "acc_stderr": 0.017870847506081738, + "acc_norm": 0.5159642401021711, + "acc_norm_stderr": 0.017870847506081738 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.0281739177617629, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.0281739177617629 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168284, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168284 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5096774193548387, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.5096774193548387, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.030463656747340254, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.030463656747340254 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823017, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823017 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373056, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842424, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.02779476010500874, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.02779476010500874 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5229357798165137, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.5229357798165137, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089775, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089775 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529672, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529672 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963775, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963775 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409167, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409167 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.03027332507734575, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.03027332507734575 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3285528031290743, + "acc_stderr": 0.011996027247502912, + "acc_norm": 0.3285528031290743, + "acc_norm_stderr": 0.011996027247502912 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.035086373586305716, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.035086373586305716 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.01570210709062789, + "mc2": 0.44866578973581106, + "mc2_stderr": 0.015416926437342405 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190192, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.01705775370216029 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-instruct-v3-13B", + "model_sha": "199c2113f09f153bce1ad7aac35e6e756a99b89b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-instruct-v3-13b/result_2023-11-01 18:54:40.json b/42MARU/GenAI-llama2-ko-en-instruct-v3-13b/result_2023-11-01 18:54:40.json new file mode 100644 index 0000000000000000000000000000000000000000..a66de01a7aaa411a9c8ebe3fe34e77f99cbae6ca --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-instruct-v3-13b/result_2023-11-01 18:54:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3967576791808874, + "acc_stderr": 0.014296513020180646, + "acc_norm": 0.454778156996587, + "acc_norm_stderr": 0.014551507060836355 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4164509061939853, + "acc_stderr": 0.004919626380645517, + "acc_norm": 0.5536745668193587, + "acc_norm_stderr": 0.004960947388535101 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.038237270928823064, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.038237270928823064 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5159642401021711, + "acc_stderr": 0.017870847506081738, + "acc_norm": 0.5159642401021711, + "acc_norm_stderr": 0.017870847506081738 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.0281739177617629, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.0281739177617629 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168284, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168284 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5096774193548387, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.5096774193548387, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.030463656747340254, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.030463656747340254 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823017, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823017 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373056, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842424, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.02779476010500874, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.02779476010500874 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5229357798165137, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.5229357798165137, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089775, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089775 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529672, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529672 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963775, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963775 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409167, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409167 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.03027332507734575, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.03027332507734575 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3285528031290743, + "acc_stderr": 0.011996027247502912, + "acc_norm": 0.3285528031290743, + "acc_norm_stderr": 0.011996027247502912 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.035086373586305716, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.035086373586305716 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.01570210709062789, + "mc2": 0.4486611820923937, + "mc2_stderr": 0.015416976946375454 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190192, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.01705775370216029 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-instruct-v3-13b", + "model_sha": "199c2113f09f153bce1ad7aac35e6e756a99b89b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-instruct-v4-13B/result_2023-11-09 09:34:14.json b/42MARU/GenAI-llama2-ko-en-instruct-v4-13B/result_2023-11-09 09:34:14.json new file mode 100644 index 0000000000000000000000000000000000000000..8e21dcbfd6f324033fc14a45506c2ee101f8548e --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-instruct-v4-13B/result_2023-11-09 09:34:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4087030716723549, + "acc_stderr": 0.014365750345427006, + "acc_norm": 0.4564846416382253, + "acc_norm_stderr": 0.01455594976049644 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43168691495717987, + "acc_stderr": 0.0049429906231311166, + "acc_norm": 0.5795658235411273, + "acc_norm_stderr": 0.0049261984839487115 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.017769250583533246, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.017769250583533246 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.0424463323835323, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.0424463323835323 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.028386198084177673, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.028386198084177673 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.035029757994130085, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.035029757994130085 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673281, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673281 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463087, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463087 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.03594413711272436, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.03594413711272436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03835153954399419, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03835153954399419 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5871559633027523, + "acc_stderr": 0.02110912813341391, + "acc_norm": 0.5871559633027523, + "acc_norm_stderr": 0.02110912813341391 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.0200176292142131, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.0200176292142131 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650144, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650144 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697625, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697625 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.02952009569768775, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.02952009569768775 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.030486039389105303, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.030486039389105303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3741851368970013, + "acc_stderr": 0.012359335618172063, + "acc_norm": 0.3741851368970013, + "acc_norm_stderr": 0.012359335618172063 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3243574051407589, + "mc1_stderr": 0.01638797677964794, + "mc2": 0.4753344144954286, + "mc2_stderr": 0.015470233894001158 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.500590318772137, + "acc_stderr": 0.01719034212344859, + "acc_norm": 0.5726092089728453, + "acc_norm_stderr": 0.017008129844823156 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-instruct-v4-13B", + "model_sha": "fabf605d23d96e548908ffe9f0ad49dae01c46f8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-instruct-v5-13B/result_2023-11-16 17:37:54.json b/42MARU/GenAI-llama2-ko-en-instruct-v5-13B/result_2023-11-16 17:37:54.json new file mode 100644 index 0000000000000000000000000000000000000000..28ab0ab7f6dd0de1fbe25a5d1aa8496e63929e49 --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-instruct-v5-13B/result_2023-11-16 17:37:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3822525597269625, + "acc_stderr": 0.014200454049979272, + "acc_norm": 0.43600682593856654, + "acc_norm_stderr": 0.014491225699230918 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4221270663214499, + "acc_stderr": 0.00492889189587429, + "acc_norm": 0.5567616012746465, + "acc_norm_stderr": 0.004957524197900418 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5274584929757343, + "acc_stderr": 0.017852981266633944, + "acc_norm": 0.5274584929757343, + "acc_norm_stderr": 0.017852981266633944 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479637, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479637 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.031918633744784645, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.031918633744784645 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.024784316942156367, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.024784316942156367 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849734, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.030365050829115208, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.030365050829115208 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377906, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377906 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160667, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.019643801557924806, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.019643801557924806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012386, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012386 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983576, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983576 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33116036505867014, + "acc_stderr": 0.01202012819598575, + "acc_norm": 0.33116036505867014, + "acc_norm_stderr": 0.01202012819598575 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.4591418911312825, + "mc2_stderr": 0.015363002653584545 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4911452184179457, + "acc_stderr": 0.017187658199336736, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-instruct-v5-13B", + "model_sha": "e625b2673e2a0839e7d3fc0f2a844e9966404678", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-instruct-v6-13B/result_2023-11-27 02:12:26.json b/42MARU/GenAI-llama2-ko-en-instruct-v6-13B/result_2023-11-27 02:12:26.json new file mode 100644 index 0000000000000000000000000000000000000000..ca45303f7163cf60269b679775f5fff31b3374c7 --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-instruct-v6-13B/result_2023-11-27 02:12:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938215, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4272057359091814, + "acc_stderr": 0.004936616428922639, + "acc_norm": 0.5610436168094005, + "acc_norm_stderr": 0.004952454721934797 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5210727969348659, + "acc_stderr": 0.017864076786212903, + "acc_norm": 0.5210727969348659, + "acc_norm_stderr": 0.017864076786212903 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197604, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652629, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652629 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.031918633744784645, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.031918633744784645 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.02504919787604233, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.02504919787604233 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561056, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561056 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633345, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633345 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4421965317919075, + "acc_stderr": 0.026738603643807403, + "acc_norm": 0.4421965317919075, + "acc_norm_stderr": 0.026738603643807403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.03775205013583639, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.03775205013583639 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4935779816513762, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.4935779816513762, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.02833239748366427, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.02833239748366427 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529672, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529672 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.02678917235114024, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.02678917235114024 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.031546962856566295, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.031546962856566295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898428, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898428 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31486310299869624, + "acc_stderr": 0.011862561755715945, + "acc_norm": 0.31486310299869624, + "acc_norm_stderr": 0.011862561755715945 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.01607750926613303, + "mc2": 0.45710797981768625, + "mc2_stderr": 0.015464643764155465 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46871310507674147, + "acc_stderr": 0.017156666859785456, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.017057753702160294 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-instruct-v6-13B", + "model_sha": "f24326c48f4edb60bc3bdc186b65e0fcb9254c1e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-instruct-v7-13B/result_2023-12-03 17:19:11.json b/42MARU/GenAI-llama2-ko-en-instruct-v7-13B/result_2023-12-03 17:19:11.json new file mode 100644 index 0000000000000000000000000000000000000000..5a1c4933b81421155b683d58e8a40826bdc4a3c5 --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-instruct-v7-13B/result_2023-12-03 17:19:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938213, + "acc_norm": 0.4564846416382253, + "acc_norm_stderr": 0.014555949760496435 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42272455686118304, + "acc_stderr": 0.00492982833760698, + "acc_norm": 0.5592511451902011, + "acc_norm_stderr": 0.004954622308739005 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5146871008939975, + "acc_stderr": 0.017872248024429122, + "acc_norm": 0.5146871008939975, + "acc_norm_stderr": 0.017872248024429122 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863537, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863537 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.35172413793103446, + "acc_stderr": 0.03979236637497411, + "acc_norm": 0.35172413793103446, + "acc_norm_stderr": 0.03979236637497411 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38974358974358975, + "acc_stderr": 0.024726967886647074, + "acc_norm": 0.38974358974358975, + "acc_norm_stderr": 0.024726967886647074 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502737, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.03530235517334682, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.03530235517334682 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099522, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099522 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422708, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384486, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384486 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362233, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362233 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238106, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238106 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.028074158947600663, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.028074158947600663 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215923, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215923 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.030546745264953195, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.030546745264953195 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.02858270975389843, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.02858270975389843 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175364, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32529335071707954, + "acc_stderr": 0.011965311536571528, + "acc_norm": 0.32529335071707954, + "acc_norm_stderr": 0.011965311536571528 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.4532384559135145, + "mc2_stderr": 0.015485047009493541 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.017175671279836446, + "acc_norm": 0.5301062573789846, + "acc_norm_stderr": 0.017159163590170223 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-instruct-v7-13B", + "model_sha": "40b2f1775ec5f92bfa8191fda6bb5f7c78564b3c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-platypus-13B-v2/result_2023-10-30 01:04:14.json b/42MARU/GenAI-llama2-ko-en-platypus-13B-v2/result_2023-10-30 01:04:14.json new file mode 100644 index 0000000000000000000000000000000000000000..fdfd90d8bd50e1fefe22f76e84d2924800b5f4e6 --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-platypus-13B-v2/result_2023-10-30 01:04:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3890784982935154, + "acc_stderr": 0.014247309976045607, + "acc_norm": 0.4496587030716723, + "acc_norm_stderr": 0.01453714444428474 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42013543118900615, + "acc_stderr": 0.00492571700809971, + "acc_norm": 0.5487950607448715, + "acc_norm_stderr": 0.004965963647210315 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.01787469866749135, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.01787469866749135 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231015, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231015 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.03777798822748018, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.03777798822748018 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539753, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539753 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.02827241018621491, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.02827241018621491 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.030052580579557845, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.030052580579557845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.025497532639609553, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.025497532639609553 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.037336266553835096, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.037336266553835096 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.023865206836972585, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.023865206836972585 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836185, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836185 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518754, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518754 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.038932596106046734, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.038932596106046734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626057, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626057 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355435, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355435 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320203, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320203 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.030388051301678116, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.030388051301678116 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.027146271936625166, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.027146271936625166 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3135593220338983, + "acc_stderr": 0.011849234291459313, + "acc_norm": 0.3135593220338983, + "acc_norm_stderr": 0.011849234291459313 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606785, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606785 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.03883565977956928, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.03883565977956928 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834562, + "mc2": 0.44689474709496685, + "mc2_stderr": 0.015256070107718848 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5478158205430933, + "acc_stderr": 0.017111567130916796, + "acc_norm": 0.5962219598583235, + "acc_norm_stderr": 0.016869031540298632 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-platypus-13B-v2", + "model_sha": "1b4eb6319be99c113d17778ce2737acffe2a0fee", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/GenAI-llama2-ko-en-platypus-13B/result_2023-10-29 00:31:00.json b/42MARU/GenAI-llama2-ko-en-platypus-13B/result_2023-10-29 00:31:00.json new file mode 100644 index 0000000000000000000000000000000000000000..1ee41f8f43825a21a7caba67c5d46af076066d7c --- /dev/null +++ b/42MARU/GenAI-llama2-ko-en-platypus-13B/result_2023-10-29 00:31:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.01421244498065189, + "acc_norm": 0.4522184300341297, + "acc_norm_stderr": 0.014544519880633832 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4166500697072296, + "acc_stderr": 0.00491996282220832, + "acc_norm": 0.5524795857398924, + "acc_norm_stderr": 0.004962220512548352 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5351213282247765, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.5351213282247765, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255099, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255099 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266236, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042328, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042328 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509568, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509568 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150275, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150275 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.02578787422095932, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.02578787422095932 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.032979866484738336, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.032979866484738336 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.036812296333943194, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.036812296333943194 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456602, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456602 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281335, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281335 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5522935779816514, + "acc_stderr": 0.02131975496242546, + "acc_norm": 0.5522935779816514, + "acc_norm_stderr": 0.02131975496242546 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.02852638345214264, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.02852638345214264 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874141, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874141 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.019675808135281525, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.019675808135281525 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.02971127586000534, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.02971127586000534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.293854748603352, + "acc_stderr": 0.015235075776719616, + "acc_norm": 0.293854748603352, + "acc_norm_stderr": 0.015235075776719616 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.02725720260611495, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.02725720260611495 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33116036505867014, + "acc_stderr": 0.01202012819598576, + "acc_norm": 0.33116036505867014, + "acc_norm_stderr": 0.01202012819598576 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4478444454695957, + "mc2_stderr": 0.015296142940086415 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5454545454545454, + "acc_stderr": 0.017119172208061504, + "acc_norm": 0.5938606847697757, + "acc_norm_stderr": 0.016884749503191396 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/GenAI-llama2-ko-en-platypus-13B", + "model_sha": "61d276d0715184790bae2979744f1ae7c0f451c0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/llama-2-ko-7b-instruct/result_2023-09-29 09:41:36.json b/42MARU/llama-2-ko-7b-instruct/result_2023-09-29 09:41:36.json new file mode 100644 index 0000000000000000000000000000000000000000..c2d8f20a5394134efba430fcd786264af3e7fc26 --- /dev/null +++ b/42MARU/llama-2-ko-7b-instruct/result_2023-09-29 09:41:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145678, + "acc_norm": 0.3839590443686007, + "acc_norm_stderr": 0.01421244498065189 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3975303724357698, + "acc_stderr": 0.004883871774350598, + "acc_norm": 0.5247958573989245, + "acc_norm_stderr": 0.004983641854351152 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3831417624521073, + "acc_stderr": 0.01738477419488563, + "acc_norm": 0.3831417624521073, + "acc_norm_stderr": 0.01738477419488563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.02937917046412482, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.02937917046412482 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.03777798822748017, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.03777798822748017 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.35691318327974275, + "acc_stderr": 0.027210420375934012, + "acc_norm": 0.35691318327974275, + "acc_norm_stderr": 0.027210420375934012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.043285772152629715, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.043285772152629715 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35858585858585856, + "acc_stderr": 0.034169036403915214, + "acc_norm": 0.35858585858585856, + "acc_norm_stderr": 0.034169036403915214 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.03618664819936245, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.03618664819936245 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.028359620870533953, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.028359620870533953 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.021916957709213803, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.021916957709213803 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04668408033024932, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04668408033024932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733545, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042767, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042767 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.03248577511578401, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.03248577511578401 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844072, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3283582089552239, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.3283582089552239, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.0321473730202947, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.0321473730202947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184756, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184756 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.33236994219653176, + "acc_stderr": 0.025361168749688225, + "acc_norm": 0.33236994219653176, + "acc_norm_stderr": 0.025361168749688225 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33641975308641975, + "acc_stderr": 0.026289734945952926, + "acc_norm": 0.33641975308641975, + "acc_norm_stderr": 0.026289734945952926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32642487046632124, + "acc_stderr": 0.033840286211432945, + "acc_norm": 0.32642487046632124, + "acc_norm_stderr": 0.033840286211432945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3174311926605505, + "acc_stderr": 0.019957152198460497, + "acc_norm": 0.3174311926605505, + "acc_norm_stderr": 0.019957152198460497 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.03395490020856111, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.03395490020856111 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.02736359328468495, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.02736359328468495 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.037385206761196686, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.037385206761196686 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.017917974069594726, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.017917974069594726 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984301, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984301 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605586, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605586 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.0290294228156814, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.0290294228156814 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.028666857790274648, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.028666857790274648 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35864978902953587, + "acc_stderr": 0.031219569445301847, + "acc_norm": 0.35864978902953587, + "acc_norm_stderr": 0.031219569445301847 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27249022164276404, + "acc_stderr": 0.01137165829431153, + "acc_norm": 0.27249022164276404, + "acc_norm_stderr": 0.01137165829431153 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.031145570659486782, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.031145570659486782 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.39805148377575406, + "mc2_stderr": 0.015027401787198838 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2396694214876033, + "acc_stderr": 0.014676495332267253, + "acc_norm": 0.31286894923258557, + "acc_norm_stderr": 0.015941010118302654 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/llama-2-ko-7b-instruct", + "model_sha": "3c590472282b5de4c76d846153db5f41b82c1b62", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/llama-2-ko-7b-instruction-v3/result_2023-10-01 18:41:33.json b/42MARU/llama-2-ko-7b-instruction-v3/result_2023-10-01 18:41:33.json new file mode 100644 index 0000000000000000000000000000000000000000..17bf35f8ccfc589b128222e311f97daf61999735 --- /dev/null +++ b/42MARU/llama-2-ko-7b-instruction-v3/result_2023-10-01 18:41:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3293515358361775, + "acc_stderr": 0.013734057652635474, + "acc_norm": 0.386518771331058, + "acc_norm_stderr": 0.014230084761910474 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3835889265086636, + "acc_stderr": 0.00485265887677539, + "acc_norm": 0.5022903804023103, + "acc_norm_stderr": 0.004989729059957435 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.03743979825926401, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.03743979825926401 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.04453254836326466, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.04453254836326466 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.367816091954023, + "acc_stderr": 0.01724382889184626, + "acc_norm": 0.367816091954023, + "acc_norm_stderr": 0.01724382889184626 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996795, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996795 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.02951319662553935, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.02951319662553935 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.03610805018031024, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.03610805018031024 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3858520900321543, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.3858520900321543, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.31313131313131315, + "acc_stderr": 0.03304205087813653, + "acc_norm": 0.31313131313131315, + "acc_norm_stderr": 0.03304205087813653 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2794871794871795, + "acc_stderr": 0.022752388839776823, + "acc_norm": 0.2794871794871795, + "acc_norm_stderr": 0.022752388839776823 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22660098522167488, + "acc_stderr": 0.02945486383529298, + "acc_norm": 0.22660098522167488, + "acc_norm_stderr": 0.02945486383529298 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3225806451612903, + "acc_stderr": 0.026593084516572267, + "acc_norm": 0.3225806451612903, + "acc_norm_stderr": 0.026593084516572267 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5, + "acc_stderr": 0.03275608910402091, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03275608910402091 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3471698113207547, + "acc_stderr": 0.029300101705549652, + "acc_norm": 0.3471698113207547, + "acc_norm_stderr": 0.029300101705549652 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766118, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766118 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.03336767086567977, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.03336767086567977 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.39800995024875624, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.39800995024875624, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.03414014007044036, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.03414014007044036 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554859, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554859 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.38439306358381503, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.38439306358381503, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.03746668325470022, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.03746668325470022 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.026725868809100793, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.026725868809100793 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35751295336787564, + "acc_stderr": 0.03458816042181005, + "acc_norm": 0.35751295336787564, + "acc_norm_stderr": 0.03458816042181005 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3394495412844037, + "acc_stderr": 0.02030210934266235, + "acc_norm": 0.3394495412844037, + "acc_norm_stderr": 0.02030210934266235 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.027582811415159607, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.027582811415159607 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.49586776859504134, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.019117213911495165, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.019117213911495165 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503796, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503796 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.04327040932578728, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.04327040932578728 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510927, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510927 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553977, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553977 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776125, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776125 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3510204081632653, + "acc_stderr": 0.03055531675557364, + "acc_norm": 0.3510204081632653, + "acc_norm_stderr": 0.03055531675557364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4767932489451477, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.4767932489451477, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3089960886571056, + "acc_stderr": 0.01180172977723925, + "acc_norm": 0.3089960886571056, + "acc_norm_stderr": 0.01180172977723925 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.032702871814820816, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.032702871814820816 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.0364620496325381, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.0364620496325381 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.01517698502770769, + "mc2": 0.38056097212603235, + "mc2_stderr": 0.014936929596682727 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21605667060212513, + "acc_stderr": 0.014149496716043137, + "acc_norm": 0.29279811097992914, + "acc_norm_stderr": 0.015644823205401337 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/llama-2-ko-7b-instruction-v3", + "model_sha": "c0fea9cb31d4ae90aa2ed048f774a9000341b538", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/polyglot-ko-12.8b-instruct/result_2023-09-27 21:10:18.json b/42MARU/polyglot-ko-12.8b-instruct/result_2023-09-27 21:10:18.json new file mode 100644 index 0000000000000000000000000000000000000000..4db9a44cf2c11e6ee23ccc99e1d58f04a6bc6c12 --- /dev/null +++ b/42MARU/polyglot-ko-12.8b-instruct/result_2023-09-27 21:10:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3046075085324232, + "acc_stderr": 0.013449522109932492, + "acc_norm": 0.363481228668942, + "acc_norm_stderr": 0.014056207319068282 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3981278629755029, + "acc_stderr": 0.0048851164655502755, + "acc_norm": 0.5159330810595499, + "acc_norm_stderr": 0.004987247325495624 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150193, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150193 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.039154506304142495, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.039154506304142495 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2297872340425532, + "acc_stderr": 0.02750175294441242, + "acc_norm": 0.2297872340425532, + "acc_norm_stderr": 0.02750175294441242 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2469879518072289, + "acc_stderr": 0.03357351982064536, + "acc_norm": 0.2469879518072289, + "acc_norm_stderr": 0.03357351982064536 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.026795422327893944, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.026795422327893944 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.1031390134529148, + "acc_stderr": 0.020412564289839272, + "acc_norm": 0.1031390134529148, + "acc_norm_stderr": 0.020412564289839272 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.40404040404040403, + "acc_stderr": 0.03496130972056128, + "acc_norm": 0.40404040404040403, + "acc_norm_stderr": 0.03496130972056128 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.03921545312467122, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.03921545312467122 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.18907563025210083, + "acc_stderr": 0.02543511943810536, + "acc_norm": 0.18907563025210083, + "acc_norm_stderr": 0.02543511943810536 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.020932445774463175, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.020932445774463175 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.1921182266009852, + "acc_stderr": 0.027719315709614778, + "acc_norm": 0.1921182266009852, + "acc_norm_stderr": 0.027719315709614778 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332204, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332204 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.29056603773584905, + "acc_stderr": 0.027943219989337156, + "acc_norm": 0.29056603773584905, + "acc_norm_stderr": 0.027943219989337156 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.19402985074626866, + "acc_stderr": 0.027962677604768914, + "acc_norm": 0.19402985074626866, + "acc_norm_stderr": 0.027962677604768914 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641143, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641143 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.02226181769240018, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.02226181769240018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.024105712607754307, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.024105712607754307 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.023788583551658537, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.023788583551658537 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178253, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178253 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3376146788990826, + "acc_stderr": 0.02027526598663891, + "acc_norm": 0.3376146788990826, + "acc_norm_stderr": 0.02027526598663891 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238126, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.025553169991826524, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.025553169991826524 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516302, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516302 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810537, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810537 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.01774089950917779, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.01774089950917779 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.024847921358063962, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.024847921358063962 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.029346665094372937, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.029346665094372937 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24472573839662448, + "acc_stderr": 0.027985699387036416, + "acc_norm": 0.24472573839662448, + "acc_norm_stderr": 0.027985699387036416 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2588005215123859, + "acc_stderr": 0.011186109046564608, + "acc_norm": 0.2588005215123859, + "acc_norm_stderr": 0.011186109046564608 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.031660096793998116, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.031660096793998116 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2, + "acc_stderr": 0.03123475237772118, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03123475237772118 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627887, + "mc2": 0.4515720476496737, + "mc2_stderr": 0.015493161984611252 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2680047225501771, + "acc_stderr": 0.015227905796335147, + "acc_norm": 0.3707201889020071, + "acc_norm_stderr": 0.016605801289212598 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/polyglot-ko-12.8b-instruct", + "model_sha": "a8354bcedc167e8e1f7dac8a347bf4b61d9c9bf0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42MARU/sitebunny-13b/result_2023-09-27 08:17:31.json b/42MARU/sitebunny-13b/result_2023-09-27 08:17:31.json new file mode 100644 index 0000000000000000000000000000000000000000..a0541d1d294503564b350859ce432296bf101ded --- /dev/null +++ b/42MARU/sitebunny-13b/result_2023-09-27 08:17:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3643344709897611, + "acc_stderr": 0.014063260279882417, + "acc_norm": 0.4112627986348123, + "acc_norm_stderr": 0.014379441068522084 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3732324238199562, + "acc_stderr": 0.004826746160830189, + "acc_norm": 0.4751045608444533, + "acc_norm_stderr": 0.004983592410934169 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.017874698667491355, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.017874698667491355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.042561937679014075, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.042561937679014075 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894245, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894245 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416544, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416544 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761005, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761005 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.03166098891888078, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.03166098891888078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683526, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683526 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762613, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762613 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02764847787741332, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02764847787741332 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.037124548537213684, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.037124548537213684 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47889908256880737, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.47889908256880737, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.028408302020332687, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.028408302020332687 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.03878139888797609, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.03878139888797609 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.01927099870822398, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.01927099870822398 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.040073418097558065, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.040073418097558065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.031546962856566295, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.031546962856566295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3106145251396648, + "acc_stderr": 0.015476515438005566, + "acc_norm": 0.3106145251396648, + "acc_norm_stderr": 0.015476515438005566 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.02841820861940679, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.02841820861940679 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.03189141832421396, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.03189141832421396 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32333767926988266, + "acc_stderr": 0.011946565758447202, + "acc_norm": 0.32333767926988266, + "acc_norm_stderr": 0.011946565758447202 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35006119951040393, + "mc1_stderr": 0.01669794942015103, + "mc2": 0.5148844380994511, + "mc2_stderr": 0.015947695748354234 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42857142857142855, + "acc_stderr": 0.017014038119297473, + "acc_norm": 0.44155844155844154, + "acc_norm_stderr": 0.0170725258755631 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42MARU/sitebunny-13b", + "model_sha": "15c8578d2be688d6b03ed2076658865bb8752673", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42dot/42dot_LLM-PLM-1.3B/result_2023-10-18 01:46:47.json b/42dot/42dot_LLM-PLM-1.3B/result_2023-10-18 01:46:47.json new file mode 100644 index 0000000000000000000000000000000000000000..332095c02e6b1b6bfc3e63c9bb110307e1733c01 --- /dev/null +++ b/42dot/42dot_LLM-PLM-1.3B/result_2023-10-18 01:46:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2636518771331058, + "acc_stderr": 0.01287592915129705, + "acc_norm": 0.32593856655290104, + "acc_norm_stderr": 0.013697432466693242 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3563035251941844, + "acc_stderr": 0.004779276329704052, + "acc_norm": 0.4473212507468632, + "acc_norm_stderr": 0.004962010338226348 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.22094508301404853, + "acc_stderr": 0.014836205167333574, + "acc_norm": 0.22094508301404853, + "acc_norm_stderr": 0.014836205167333574 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.0335567721631314, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.0335567721631314 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.02951319662553935, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.02951319662553935 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824664, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824664 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2347266881028939, + "acc_stderr": 0.024071805887677045, + "acc_norm": 0.2347266881028939, + "acc_norm_stderr": 0.024071805887677045 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2242152466367713, + "acc_stderr": 0.027991534258519527, + "acc_norm": 0.2242152466367713, + "acc_norm_stderr": 0.027991534258519527 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124484, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124484 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.21379310344827587, + "acc_stderr": 0.034165204477475494, + "acc_norm": 0.21379310344827587, + "acc_norm_stderr": 0.034165204477475494 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.030388353551886835, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.030388353551886835 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36153846153846153, + "acc_stderr": 0.02435958146539698, + "acc_norm": 0.36153846153846153, + "acc_norm_stderr": 0.02435958146539698 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.038935425188248475, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.038935425188248475 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.025736542745594525, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.025736542745594525 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.20085470085470086, + "acc_stderr": 0.02624677294689047, + "acc_norm": 0.20085470085470086, + "acc_norm_stderr": 0.02624677294689047 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.25660377358490566, + "acc_stderr": 0.026880647889051985, + "acc_norm": 0.25660377358490566, + "acc_norm_stderr": 0.026880647889051985 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2935323383084577, + "acc_stderr": 0.03220024104534205, + "acc_norm": 0.2935323383084577, + "acc_norm_stderr": 0.03220024104534205 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.022101128787415426, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.022101128787415426 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.022497230190967547, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.022497230190967547 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.025171041915309684, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.025171041915309684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35751295336787564, + "acc_stderr": 0.03458816042181006, + "acc_norm": 0.35751295336787564, + "acc_norm_stderr": 0.03458816042181006 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24954128440366974, + "acc_stderr": 0.018553897629501614, + "acc_norm": 0.24954128440366974, + "acc_norm_stderr": 0.018553897629501614 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882924, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.017630827375148383, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.017630827375148383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596452, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596452 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27346938775510204, + "acc_stderr": 0.02853556033712845, + "acc_norm": 0.27346938775510204, + "acc_norm_stderr": 0.02853556033712845 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965833, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965833 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2561929595827901, + "acc_stderr": 0.011149173153110583, + "acc_norm": 0.2561929595827901, + "acc_norm_stderr": 0.011149173153110583 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715006, + "mc2": 0.40367736123530334, + "mc2_stderr": 0.014824402657107816 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2396694214876033, + "acc_stderr": 0.014676495332267253, + "acc_norm": 0.36835891381345925, + "acc_norm_stderr": 0.016583858982639074 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42dot/42dot_LLM-PLM-1.3B", + "model_sha": "a72bf57eb02cd4ea4388a344b4a5893aa95698da", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/42dot/42dot_LLM-SFT-1.3B/result_2023-10-18 01:47:03.json b/42dot/42dot_LLM-SFT-1.3B/result_2023-10-18 01:47:03.json new file mode 100644 index 0000000000000000000000000000000000000000..e28633aafb785215be2332ffde5050034d091502 --- /dev/null +++ b/42dot/42dot_LLM-SFT-1.3B/result_2023-10-18 01:47:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28242320819112626, + "acc_stderr": 0.01315545688409722, + "acc_norm": 0.35494880546075086, + "acc_norm_stderr": 0.013983036904094094 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36317466640111534, + "acc_stderr": 0.004799317209902023, + "acc_norm": 0.4613622784305915, + "acc_norm_stderr": 0.004974860878464429 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.13592233009708737, + "acc_stderr": 0.033932957297610124, + "acc_norm": 0.13592233009708737, + "acc_norm_stderr": 0.033932957297610124 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150193, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150193 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386698, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386698 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.034605799075530276, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.034605799075530276 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2604501607717042, + "acc_stderr": 0.024926723224845543, + "acc_norm": 0.2604501607717042, + "acc_norm_stderr": 0.024926723224845543 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.242152466367713, + "acc_stderr": 0.028751392398694755, + "acc_norm": 0.242152466367713, + "acc_norm_stderr": 0.028751392398694755 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596918, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596918 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.02985751567338641, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.02985751567338641 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.03618664819936246, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.03618664819936246 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24789915966386555, + "acc_stderr": 0.028047967224176892, + "acc_norm": 0.24789915966386555, + "acc_norm_stderr": 0.028047967224176892 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.021362027725222728, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.021362027725222728 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.042365112580946336, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.042365112580946336 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18719211822660098, + "acc_stderr": 0.027444924966882618, + "acc_norm": 0.18719211822660098, + "acc_norm_stderr": 0.027444924966882618 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.02509189237885928, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.02509189237885928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3034188034188034, + "acc_stderr": 0.030118210106942652, + "acc_norm": 0.3034188034188034, + "acc_norm_stderr": 0.030118210106942652 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2037735849056604, + "acc_stderr": 0.02479078450177541, + "acc_norm": 0.2037735849056604, + "acc_norm_stderr": 0.02479078450177541 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072775, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275794, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275794 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.035118075718047245, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.035118075718047245 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.18497109826589594, + "acc_stderr": 0.029605623981771204, + "acc_norm": 0.18497109826589594, + "acc_norm_stderr": 0.029605623981771204 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071128, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071128 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.025171041915309684, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.025171041915309684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.029252823291803644, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.029252823291803644 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796624, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796624 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.024739981355113592, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.024739981355113592 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.32231404958677684, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.20394736842105263, + "acc_stderr": 0.0327900040631005, + "acc_norm": 0.20394736842105263, + "acc_norm_stderr": 0.0327900040631005 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.017704531653250075, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.017704531653250075 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729903, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729903 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802747, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802747 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.25, + "acc_stderr": 0.026303648393696036, + "acc_norm": 0.25, + "acc_norm_stderr": 0.026303648393696036 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.21224489795918366, + "acc_stderr": 0.026176967197866764, + "acc_norm": 0.21224489795918366, + "acc_norm_stderr": 0.026176967197866764 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.28270042194092826, + "acc_stderr": 0.029312814153955914, + "acc_norm": 0.28270042194092826, + "acc_norm_stderr": 0.029312814153955914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2503259452411995, + "acc_stderr": 0.011064151027165438, + "acc_norm": 0.2503259452411995, + "acc_norm_stderr": 0.011064151027165438 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.03287666758603488, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.03287666758603488 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.43765472485909873, + "mc2_stderr": 0.015405588178148114 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2550177095631641, + "acc_stderr": 0.014985559533428578, + "acc_norm": 0.3754427390791027, + "acc_norm_stderr": 0.016648411589511095 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "42dot/42dot_LLM-SFT-1.3B", + "model_sha": "2dadd4492f0b27c302d8a5518003fa6045e32a8a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4n3mone/KoSOLAR_merge_test_v0.1/result_2024-02-21 07:42:24.json b/4n3mone/KoSOLAR_merge_test_v0.1/result_2024-02-21 07:42:24.json new file mode 100644 index 0000000000000000000000000000000000000000..05ac650135a277f2ff4c14e6404e449fb820e016 --- /dev/null +++ b/4n3mone/KoSOLAR_merge_test_v0.1/result_2024-02-21 07:42:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27047781569965873, + "acc_stderr": 0.012980954547659556, + "acc_norm": 0.3302047781569966, + "acc_norm_stderr": 0.013743085603760424 + }, + "harness|ko_hellaswag|10": { + "acc": 0.27763393746265685, + "acc_stderr": 0.004469165728600334, + "acc_norm": 0.31607249551882094, + "acc_norm_stderr": 0.004639913709615934 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.047504583990416925, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.047504583990416925 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.421455938697318, + "acc_stderr": 0.017657976412654857, + "acc_norm": 0.421455938697318, + "acc_norm_stderr": 0.017657976412654857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.02964400657700962, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.02964400657700962 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.037117251907407535, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.037117251907407535 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.028043399858210635, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.028043399858210635 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455005, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455005 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677697, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677697 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.40404040404040403, + "acc_stderr": 0.03496130972056128, + "acc_norm": 0.40404040404040403, + "acc_norm_stderr": 0.03496130972056128 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386215, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.03038835355188685, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.03038835355188685 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24615384615384617, + "acc_stderr": 0.021840866990423077, + "acc_norm": 0.24615384615384617, + "acc_norm_stderr": 0.021840866990423077 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36129032258064514, + "acc_stderr": 0.02732754844795755, + "acc_norm": 0.36129032258064514, + "acc_norm_stderr": 0.02732754844795755 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.44017094017094016, + "acc_stderr": 0.032520741720630506, + "acc_norm": 0.44017094017094016, + "acc_norm_stderr": 0.032520741720630506 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276613, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276613 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.032578473844367746, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.032578473844367746 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03333333333333336, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03333333333333336 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047873, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047873 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.022101128787415415, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.022101128787415415 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.33815028901734107, + "acc_stderr": 0.02546977014940017, + "acc_norm": 0.33815028901734107, + "acc_norm_stderr": 0.02546977014940017 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.03731133519673893, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.03731133519673893 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.02672586880910079, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.02672586880910079 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.31088082901554404, + "acc_stderr": 0.03340361906276586, + "acc_norm": 0.31088082901554404, + "acc_norm_stderr": 0.03340361906276586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281335, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281335 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3559633027522936, + "acc_stderr": 0.020528559278244214, + "acc_norm": 0.3559633027522936, + "acc_norm_stderr": 0.020528559278244214 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.0275300784471103, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.0275300784471103 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.47107438016528924, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779205, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.019206606848825365, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.019206606848825365 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.026991454502036744, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.026991454502036744 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2346368715083799, + "acc_stderr": 0.014173044098303673, + "acc_norm": 0.2346368715083799, + "acc_norm_stderr": 0.014173044098303673 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16176470588235295, + "acc_stderr": 0.02236867256288675, + "acc_norm": 0.16176470588235295, + "acc_norm_stderr": 0.02236867256288675 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.030964810588786713, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.030964810588786713 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2907431551499348, + "acc_stderr": 0.011598062372851988, + "acc_norm": 0.2907431551499348, + "acc_norm_stderr": 0.011598062372851988 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.03713158067481913, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.03713158067481913 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589667, + "mc2": 0.536422656873522, + "mc2_stderr": 0.016939809819665783 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.1641086186540732, + "acc_stderr": 0.012733724137996926, + "acc_norm": 0.4639905548996458, + "acc_norm_stderr": 0.017145715365486654 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4n3mone/KoSOLAR_merge_test_v0.1", + "model_sha": "5bb8e2693638d22227dff2b59b1f06dbbe59cc9a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama/result_2024-07-10 12:39:52.json b/4yo1/llama/result_2024-07-10 12:39:52.json new file mode 100644 index 0000000000000000000000000000000000000000..8ab4e4626906658cb0d950fcdfcaed734948d22f --- /dev/null +++ b/4yo1/llama/result_2024-07-10 12:39:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3720136518771331, + "acc_stderr": 0.014124597881844461, + "acc_norm": 0.4325938566552901, + "acc_norm_stderr": 0.014478005694182531 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3584943238398725, + "acc_stderr": 0.004785781979354873, + "acc_norm": 0.46564429396534557, + "acc_norm_stderr": 0.00497798845250264 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.017784034534992457, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.017784034534992457 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742399, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4978723404255319, + "acc_stderr": 0.032685726586674915, + "acc_norm": 0.4978723404255319, + "acc_norm_stderr": 0.032685726586674915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.035594435655639196, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.035594435655639196 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5655172413793104, + "acc_stderr": 0.04130740879555498, + "acc_norm": 0.5655172413793104, + "acc_norm_stderr": 0.04130740879555498 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.032422250271150053, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.032422250271150053 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5076923076923077, + "acc_stderr": 0.02534800603153475, + "acc_norm": 0.5076923076923077, + "acc_norm_stderr": 0.02534800603153475 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.035107665979592154, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.035107665979592154 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.532258064516129, + "acc_stderr": 0.028384747788813336, + "acc_norm": 0.532258064516129, + "acc_norm_stderr": 0.028384747788813336 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.028286324075564414, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.028286324075564414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556552, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556552 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37407407407407406, + "acc_stderr": 0.02950286112895529, + "acc_norm": 0.37407407407407406, + "acc_norm_stderr": 0.02950286112895529 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.03983798306659809, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.03983798306659809 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.03794012674697029, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.03794012674697029 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520203, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520203 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.02683080599895224, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.02683080599895224 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138936, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138936 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5246913580246914, + "acc_stderr": 0.027786800931427443, + "acc_norm": 0.5246913580246914, + "acc_norm_stderr": 0.027786800931427443 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008585, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008585 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5688073394495413, + "acc_stderr": 0.021233365030319567, + "acc_norm": 0.5688073394495413, + "acc_norm_stderr": 0.021233365030319567 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138293, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.02010986454718136, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.02010986454718136 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611324, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611324 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696044, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3027932960893855, + "acc_stderr": 0.015366860386397114, + "acc_norm": 0.3027932960893855, + "acc_norm_stderr": 0.015366860386397114 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.7, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.03086214492108756, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.03086214492108756 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.03121956944530185, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.03121956944530185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36897001303780963, + "acc_stderr": 0.012323936650174859, + "acc_norm": 0.36897001303780963, + "acc_norm_stderr": 0.012323936650174859 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.037131580674819135, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.037131580674819135 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31334149326805383, + "mc1_stderr": 0.016238065069059622, + "mc2": 0.48760282451527875, + "mc2_stderr": 0.01585626318171357 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4781582054309327, + "acc_stderr": 0.01717394447429438, + "acc_norm": 0.5076741440377804, + "acc_norm_stderr": 0.017188329219654273 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama", + "model_sha": "2f7568051dbd3185c834cac527616af8378f9206", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-eng-ko-8-llama/result_2024-07-02 09:08:08.json b/4yo1/llama3-eng-ko-8-llama/result_2024-07-02 09:08:08.json new file mode 100644 index 0000000000000000000000000000000000000000..8b722b5f205efff6c1ae5aea15952e480dec3853 --- /dev/null +++ b/4yo1/llama3-eng-ko-8-llama/result_2024-07-02 09:08:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25341296928327645, + "acc_stderr": 0.012710896778378602, + "acc_norm": 0.31313993174061433, + "acc_norm_stderr": 0.013552671543623492 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32812188807010556, + "acc_stderr": 0.004685698752104808, + "acc_norm": 0.40928101971718783, + "acc_norm_stderr": 0.004906962980328288 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3614303959131545, + "acc_stderr": 0.01717960132890074, + "acc_norm": 0.3614303959131545, + "acc_norm_stderr": 0.01717960132890074 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.026858825879488544, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.026858825879488544 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.04093329229834278, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.04093329229834278 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3383838383838384, + "acc_stderr": 0.03371124142626303, + "acc_norm": 0.3383838383838384, + "acc_norm_stderr": 0.03371124142626303 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617749, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617749 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31794871794871793, + "acc_stderr": 0.02361088430892786, + "acc_norm": 0.31794871794871793, + "acc_norm_stderr": 0.02361088430892786 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978815, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978815 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.027666182075539635, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.027666182075539635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5598290598290598, + "acc_stderr": 0.032520741720630506, + "acc_norm": 0.5598290598290598, + "acc_norm_stderr": 0.032520741720630506 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.40298507462686567, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.40298507462686567, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762613, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762613 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554858, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554858 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.025416003773165555, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.025416003773165555 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.026406145973625672, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.026406145973625672 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35751295336787564, + "acc_stderr": 0.03458816042181006, + "acc_norm": 0.35751295336787564, + "acc_norm_stderr": 0.03458816042181006 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3467889908256881, + "acc_stderr": 0.020406097104093024, + "acc_norm": 0.3467889908256881, + "acc_norm_stderr": 0.020406097104093024 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.027870745278290313, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.027870745278290313 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.49586776859504134, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31209150326797386, + "acc_stderr": 0.018745011201277657, + "acc_norm": 0.31209150326797386, + "acc_norm_stderr": 0.018745011201277657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.031546962856566295, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.031546962856566295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103982, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103982 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983583, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983583 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.21224489795918366, + "acc_stderr": 0.026176967197866764, + "acc_norm": 0.21224489795918366, + "acc_norm_stderr": 0.026176967197866764 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4430379746835443, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.4430379746835443, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2803129074315515, + "acc_stderr": 0.01147155594495862, + "acc_norm": 0.2803129074315515, + "acc_norm_stderr": 0.01147155594495862 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.0327028718148208, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.0327028718148208 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.44739358713372257, + "mc2_stderr": 0.015551996419791592 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2987012987012987, + "acc_stderr": 0.01573565739143828, + "acc_norm": 0.4085005903187721, + "acc_norm_stderr": 0.01690006287942712 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-eng-ko-8-llama", + "model_sha": "e2c5c277fae6b14e4a8f6c45075f0518199d3e95", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-eng-ko-8/result_2024-07-02 08:52:49.json b/4yo1/llama3-eng-ko-8/result_2024-07-02 08:52:49.json new file mode 100644 index 0000000000000000000000000000000000000000..f80c5f2fd531ecdf939d7a89f5ae7948fb2b13b1 --- /dev/null +++ b/4yo1/llama3-eng-ko-8/result_2024-07-02 08:52:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25341296928327645, + "acc_stderr": 0.012710896778378602, + "acc_norm": 0.31313993174061433, + "acc_norm_stderr": 0.013552671543623492 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32812188807010556, + "acc_stderr": 0.004685698752104808, + "acc_norm": 0.40928101971718783, + "acc_norm_stderr": 0.004906962980328288 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3614303959131545, + "acc_stderr": 0.01717960132890074, + "acc_norm": 0.3614303959131545, + "acc_norm_stderr": 0.01717960132890074 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.026858825879488544, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.026858825879488544 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.04093329229834278, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.04093329229834278 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3383838383838384, + "acc_stderr": 0.03371124142626303, + "acc_norm": 0.3383838383838384, + "acc_norm_stderr": 0.03371124142626303 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617749, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617749 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31794871794871793, + "acc_stderr": 0.02361088430892786, + "acc_norm": 0.31794871794871793, + "acc_norm_stderr": 0.02361088430892786 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978815, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978815 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.027666182075539635, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.027666182075539635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5598290598290598, + "acc_stderr": 0.032520741720630506, + "acc_norm": 0.5598290598290598, + "acc_norm_stderr": 0.032520741720630506 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.40298507462686567, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.40298507462686567, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762613, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762613 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554858, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554858 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.025416003773165555, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.025416003773165555 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.026406145973625672, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.026406145973625672 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35751295336787564, + "acc_stderr": 0.03458816042181006, + "acc_norm": 0.35751295336787564, + "acc_norm_stderr": 0.03458816042181006 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3467889908256881, + "acc_stderr": 0.020406097104093024, + "acc_norm": 0.3467889908256881, + "acc_norm_stderr": 0.020406097104093024 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.027870745278290313, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.027870745278290313 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.49586776859504134, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31209150326797386, + "acc_stderr": 0.018745011201277657, + "acc_norm": 0.31209150326797386, + "acc_norm_stderr": 0.018745011201277657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.031546962856566295, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.031546962856566295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103982, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103982 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983583, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983583 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.21224489795918366, + "acc_stderr": 0.026176967197866764, + "acc_norm": 0.21224489795918366, + "acc_norm_stderr": 0.026176967197866764 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4430379746835443, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.4430379746835443, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2803129074315515, + "acc_stderr": 0.01147155594495862, + "acc_norm": 0.2803129074315515, + "acc_norm_stderr": 0.01147155594495862 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.0327028718148208, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.0327028718148208 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.44739358713372257, + "mc2_stderr": 0.015551996419791592 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2987012987012987, + "acc_stderr": 0.01573565739143828, + "acc_norm": 0.4085005903187721, + "acc_norm_stderr": 0.01690006287942712 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-eng-ko-8", + "model_sha": "e2c5c277fae6b14e4a8f6c45075f0518199d3e95", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-eng-ko-8/result_2024-07-02 08:54:35.json b/4yo1/llama3-eng-ko-8/result_2024-07-02 08:54:35.json new file mode 100644 index 0000000000000000000000000000000000000000..974b368b460f4f9ad3e9ef563ab34c8bf2146eab --- /dev/null +++ b/4yo1/llama3-eng-ko-8/result_2024-07-02 08:54:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2525597269624573, + "acc_stderr": 0.012696728980207708, + "acc_norm": 0.31399317406143346, + "acc_norm_stderr": 0.013562691224726304 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32752439753037244, + "acc_stderr": 0.004683511716552236, + "acc_norm": 0.40967934674367656, + "acc_norm_stderr": 0.0049076947279356915 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36015325670498083, + "acc_stderr": 0.0171663624713693, + "acc_norm": 0.36015325670498083, + "acc_norm_stderr": 0.0171663624713693 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.026858825879488544, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.026858825879488544 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.04093329229834278, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.04093329229834278 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3383838383838384, + "acc_stderr": 0.03371124142626303, + "acc_norm": 0.3383838383838384, + "acc_norm_stderr": 0.03371124142626303 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617749, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617749 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3153846153846154, + "acc_stderr": 0.02355964698318995, + "acc_norm": 0.3153846153846154, + "acc_norm_stderr": 0.02355964698318995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978815, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978815 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.027666182075539635, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.027666182075539635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5598290598290598, + "acc_stderr": 0.032520741720630506, + "acc_norm": 0.5598290598290598, + "acc_norm_stderr": 0.032520741720630506 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.02904560029061626, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.02904560029061626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.40298507462686567, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.40298507462686567, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762613, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762613 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.025416003773165555, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.025416003773165555 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.026406145973625672, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.026406145973625672 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3626943005181347, + "acc_stderr": 0.03469713791704372, + "acc_norm": 0.3626943005181347, + "acc_norm_stderr": 0.03469713791704372 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3486238532110092, + "acc_stderr": 0.020431254090714317, + "acc_norm": 0.3486238532110092, + "acc_norm_stderr": 0.020431254090714317 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.02782610930728369, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.02782610930728369 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.045641987674327526, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.045641987674327526 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.018771683893528186, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.018771683893528186 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.045723723587374296, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.045723723587374296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.031546962856566295, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.031546962856566295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103982, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103982 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983583, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983583 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.025991117672813296, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.025991117672813296 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4472573839662447, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.4472573839662447, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2790091264667536, + "acc_stderr": 0.011455208832803529, + "acc_norm": 0.2790091264667536, + "acc_norm_stderr": 0.011455208832803529 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.0327028718148208, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.0327028718148208 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.44742099912433764, + "mc2_stderr": 0.015551700567433569 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2987012987012987, + "acc_stderr": 0.01573565739143828, + "acc_norm": 0.4085005903187721, + "acc_norm_stderr": 0.01690006287942712 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-eng-ko-8", + "model_sha": "e2c5c277fae6b14e4a8f6c45075f0518199d3e95", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-eng-ko-8b-sl/result_2024-07-03 13:25:55.json b/4yo1/llama3-eng-ko-8b-sl/result_2024-07-03 13:25:55.json new file mode 100644 index 0000000000000000000000000000000000000000..359f24150163d10257a398d5f6817409cf520c5e --- /dev/null +++ b/4yo1/llama3-eng-ko-8b-sl/result_2024-07-03 13:25:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31569965870307165, + "acc_stderr": 0.013582571095815291, + "acc_norm": 0.3720136518771331, + "acc_norm_stderr": 0.014124597881844453 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36317466640111534, + "acc_stderr": 0.004799317209902019, + "acc_norm": 0.46574387572196774, + "acc_norm_stderr": 0.004978056798794869 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39208173690932313, + "acc_stderr": 0.017458524050147643, + "acc_norm": 0.39208173690932313, + "acc_norm_stderr": 0.017458524050147643 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.43434343434343436, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.43434343434343436, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.032363611119519416, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.032363611119519416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933927, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933927 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.034711928605184676, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.034711928605184676 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.03158539157745636, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.03158539157745636 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983045, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983045 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857392, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857392 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123935, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123935 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.03794012674697029, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.03794012674697029 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.037311335196738925, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.037311335196738925 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542595, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542595 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.03602573571288441, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.03602573571288441 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5155963302752293, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.5155963302752293, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061177, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364555, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364555 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841196, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31290743155149936, + "acc_stderr": 0.011842529823062997, + "acc_norm": 0.31290743155149936, + "acc_norm_stderr": 0.011842529823062997 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006516, + "mc2": 0.4494211990695322, + "mc2_stderr": 0.016190690301781833 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3990554899645809, + "acc_stderr": 0.0168363772928493, + "acc_norm": 0.4498229043683589, + "acc_norm_stderr": 0.01710357334382571 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-eng-ko-8b-sl", + "model_sha": "5f82a1a6e36c50db9a2ee4b815d742b27cdb6023", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-eng-ko-8b-sl2/result_2024-07-04 14:21:25.json b/4yo1/llama3-eng-ko-8b-sl2/result_2024-07-04 14:21:25.json new file mode 100644 index 0000000000000000000000000000000000000000..5f4a916b3c6a88afaeb65ec2d42bc1f769081309 --- /dev/null +++ b/4yo1/llama3-eng-ko-8b-sl2/result_2024-07-04 14:21:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3148464163822526, + "acc_stderr": 0.013572657703084948, + "acc_norm": 0.3583617747440273, + "acc_norm_stderr": 0.01401288333485987 + }, + "harness|ko_hellaswag|10": { + "acc": 0.358195578570006, + "acc_stderr": 0.004784901248558722, + "acc_norm": 0.44971121290579563, + "acc_norm_stderr": 0.004964479324552529 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3997445721583653, + "acc_stderr": 0.01751684790705327, + "acc_norm": 0.3997445721583653, + "acc_norm_stderr": 0.01751684790705327 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.03915450630414251, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.03915450630414251 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.028256660723360177, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.028256660723360177 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465918, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465918 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.025174048384000777, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.025174048384000777 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04712821257426769, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04712821257426769 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.035281314729336065, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.035281314729336065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752045, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752045 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.026830805998952243, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.026830805998952243 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5192660550458715, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.5192660550458715, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5424836601307189, + "acc_stderr": 0.028526383452142638, + "acc_norm": 0.5424836601307189, + "acc_norm_stderr": 0.028526383452142638 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483205, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.02746470844202213, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.02746470844202213 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.014149575348976273, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.014149575348976273 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933105, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.03197694118713671, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.03197694118713671 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.510548523206751, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.510548523206751, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31877444589308995, + "acc_stderr": 0.011901895635786084, + "acc_norm": 0.31877444589308995, + "acc_norm_stderr": 0.011901895635786084 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006514, + "mc2": 0.4303948510286609, + "mc2_stderr": 0.01639757694486845 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3659976387249115, + "acc_stderr": 0.0165614896648957, + "acc_norm": 0.3990554899645809, + "acc_norm_stderr": 0.016836377292849303 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-eng-ko-8b-sl2", + "model_sha": "d99912a733ba3a27a3fa36d6e9fb9b4558cefb60", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-eng-ko-8b-sl3/result_2024-07-08 02:28:51.json b/4yo1/llama3-eng-ko-8b-sl3/result_2024-07-08 02:28:51.json new file mode 100644 index 0000000000000000000000000000000000000000..9585ce7915067bd1d1aaade9064fb5422684d5c2 --- /dev/null +++ b/4yo1/llama3-eng-ko-8b-sl3/result_2024-07-08 02:28:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3097269624573379, + "acc_stderr": 0.013512058415238361, + "acc_norm": 0.3796928327645051, + "acc_norm_stderr": 0.014182119866974869 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3564031069508066, + "acc_stderr": 0.004779574402771384, + "acc_norm": 0.45379406492730534, + "acc_norm_stderr": 0.004968429476345018 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.038268824176603676, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.038268824176603676 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4061302681992337, + "acc_stderr": 0.017562037406478912, + "acc_norm": 0.4061302681992337, + "acc_norm_stderr": 0.017562037406478912 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653697, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653697 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788682, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788682 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42765273311897106, + "acc_stderr": 0.028099240775809567, + "acc_norm": 0.42765273311897106, + "acc_norm_stderr": 0.028099240775809567 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266236, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822041, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822041 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652458, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652458 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5024875621890548, + "acc_stderr": 0.03535490150137288, + "acc_norm": 0.5024875621890548, + "acc_norm_stderr": 0.03535490150137288 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.02397386199899207, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.02397386199899207 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670788, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670788 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.026756255129663765, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.026756255129663765 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.038470214204560246, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.038470214204560246 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.0274874729808716, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.0274874729808716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48623853211009177, + "acc_stderr": 0.02142920208987408, + "acc_norm": 0.48623853211009177, + "acc_norm_stderr": 0.02142920208987408 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490435, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490435 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.0197370089980946, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.0197370089980946 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.032259413526312945, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.032259413526312945 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824862, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824862 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.03254462010767859, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.03254462010767859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28292046936114734, + "acc_stderr": 0.011503891323188976, + "acc_norm": 0.28292046936114734, + "acc_norm_stderr": 0.011503891323188976 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396708, + "mc2": 0.4556979185667561, + "mc2_stderr": 0.01630139604691971 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3707201889020071, + "acc_stderr": 0.016605801289212605, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.016884749503191392 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-eng-ko-8b-sl3", + "model_sha": "4d29db816f84bee0341d5037f89e2d63c4a05381", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-eng-ko-8b-sl4/result_2024-07-10 12:28:24.json b/4yo1/llama3-eng-ko-8b-sl4/result_2024-07-10 12:28:24.json new file mode 100644 index 0000000000000000000000000000000000000000..2295c9e90fd888fa78374772343fee715ac2990a --- /dev/null +++ b/4yo1/llama3-eng-ko-8b-sl4/result_2024-07-10 12:28:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.310580204778157, + "acc_stderr": 0.013522292098053054, + "acc_norm": 0.363481228668942, + "acc_norm_stderr": 0.01405620731906828 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3595897231627166, + "acc_stderr": 0.004788994060654275, + "acc_norm": 0.460565624377614, + "acc_norm_stderr": 0.004974238284524824 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4112388250319285, + "acc_stderr": 0.017595971908056576, + "acc_norm": 0.4112388250319285, + "acc_norm_stderr": 0.017595971908056576 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101736, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101736 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.04858083574266347, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.04858083574266347 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115007, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986483, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406795, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406795 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.02834378725054064, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.02834378725054064 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.03076213487450049, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.03076213487450049 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524593, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952168, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952168 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752045, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752045 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831028, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831028 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5192660550458715, + "acc_stderr": 0.02142140298254889, + "acc_norm": 0.5192660550458715, + "acc_norm_stderr": 0.02142140298254889 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.044492703500683815, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.044492703500683815 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3937908496732026, + "acc_stderr": 0.019766211991073063, + "acc_norm": 0.3937908496732026, + "acc_norm_stderr": 0.019766211991073063 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125145, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125145 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833586, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833586 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2770949720670391, + "acc_stderr": 0.014968772435812143, + "acc_norm": 0.2770949720670391, + "acc_norm_stderr": 0.014968772435812143 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898445, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898445 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3005215123859192, + "acc_stderr": 0.011709918883039117, + "acc_norm": 0.3005215123859192, + "acc_norm_stderr": 0.011709918883039117 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.01576477083677731, + "mc2": 0.4661586592281064, + "mc2_stderr": 0.016330593604368164 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40613931523022434, + "acc_stderr": 0.016884749503191392, + "acc_norm": 0.43919716646989376, + "acc_norm_stderr": 0.017062775744780705 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-eng-ko-8b-sl4", + "model_sha": "25a488e868ecf3d10fef14a534257043b895df7e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-eng-ko-8b-sl5/result_2024-07-16 00:48:26.json b/4yo1/llama3-eng-ko-8b-sl5/result_2024-07-16 00:48:26.json new file mode 100644 index 0000000000000000000000000000000000000000..da3d94ae84d42ab8297ba60f40a5011dfbf65668 --- /dev/null +++ b/4yo1/llama3-eng-ko-8b-sl5/result_2024-07-16 00:48:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33532423208191126, + "acc_stderr": 0.013796182947785564, + "acc_norm": 0.3771331058020478, + "acc_norm_stderr": 0.014163366896192584 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3536148177653854, + "acc_stderr": 0.004771143074426136, + "acc_norm": 0.4545907189802828, + "acc_norm_stderr": 0.004969160917379652 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3895274584929757, + "acc_stderr": 0.01743808255626459, + "acc_norm": 0.3895274584929757, + "acc_norm_stderr": 0.01743808255626459 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.032321469162244675, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.032321469162244675 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.028396770444111288, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.028396770444111288 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.033141902221106564, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.033141902221106564 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.46464646464646464, + "acc_stderr": 0.035534363688280626, + "acc_norm": 0.46464646464646464, + "acc_norm_stderr": 0.035534363688280626 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6068965517241379, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.6068965517241379, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.025317649726448652, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.025317649726448652 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883233, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883233 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688173, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688173 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.03056159042673183, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.03056159042673183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857403, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857403 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137605, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137605 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.026864624366756646, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.026864624366756646 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5376146788990825, + "acc_stderr": 0.021376575274397576, + "acc_norm": 0.5376146788990825, + "acc_norm_stderr": 0.021376575274397576 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292535, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292535 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777473, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777473 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.019675808135281515, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.019675808135281515 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639886, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639886 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.0443280405529152, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.0443280405529152 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553974, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553974 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841195, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5877551020408164, + "acc_stderr": 0.031512360446742695, + "acc_norm": 0.5877551020408164, + "acc_norm_stderr": 0.031512360446742695 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.032007041833595914, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.032007041833595914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.012150699768228575, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.012150699768228575 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.038783721137112745, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.038783721137112745 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384308, + "mc2": 0.4858897187001004, + "mc2_stderr": 0.016250190279064488 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4132231404958678, + "acc_stderr": 0.01692948023449523, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.01701403811929749 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-eng-ko-8b-sl5", + "model_sha": "75a5820ba69a2def1bee5341d49082ee1372db27", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-eng-ko-8b-sl6_1/result_2024-07-16 04:26:59.json b/4yo1/llama3-eng-ko-8b-sl6_1/result_2024-07-16 04:26:59.json new file mode 100644 index 0000000000000000000000000000000000000000..65449b934e014e6696cc91c3c024f127c8a1cd55 --- /dev/null +++ b/4yo1/llama3-eng-ko-8b-sl6_1/result_2024-07-16 04:26:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19965870307167236, + "acc_stderr": 0.01168162575688869, + "acc_norm": 0.26109215017064846, + "acc_norm_stderr": 0.012835523909473843 + }, + "harness|ko_hellaswag|10": { + "acc": 0.262796255725951, + "acc_stderr": 0.004392531344297395, + "acc_norm": 0.28809002190798644, + "acc_norm_stderr": 0.004519476835646786 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.04453254836326466, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.04453254836326466 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.35759897828863346, + "acc_stderr": 0.017139488998803302, + "acc_norm": 0.35759897828863346, + "acc_norm_stderr": 0.017139488998803302 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.04049122041702506, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.04049122041702506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.026795422327893937, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.026795422327893937 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.27802690582959644, + "acc_stderr": 0.030069584874494047, + "acc_norm": 0.27802690582959644, + "acc_norm_stderr": 0.030069584874494047 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3838383838383838, + "acc_stderr": 0.034648816750163375, + "acc_norm": 0.3838383838383838, + "acc_norm_stderr": 0.034648816750163375 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.03191863374478466, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.03191863374478466 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02390115797940254, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02390115797940254 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36129032258064514, + "acc_stderr": 0.027327548447957532, + "acc_norm": 0.36129032258064514, + "acc_norm_stderr": 0.027327548447957532 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36752136752136755, + "acc_stderr": 0.03158539157745636, + "acc_norm": 0.36752136752136755, + "acc_norm_stderr": 0.03158539157745636 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3283018867924528, + "acc_stderr": 0.028901593612411784, + "acc_norm": 0.3283018867924528, + "acc_norm_stderr": 0.028901593612411784 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066465, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066465 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.39800995024875624, + "acc_stderr": 0.03461199429040014, + "acc_norm": 0.39800995024875624, + "acc_norm_stderr": 0.03461199429040014 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.0241804971643769, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.0241804971643769 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.025305258131879706, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.025305258131879706 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02540719779889017, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02540719779889017 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29015544041450775, + "acc_stderr": 0.03275264467791516, + "acc_norm": 0.29015544041450775, + "acc_norm_stderr": 0.03275264467791516 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3339449541284404, + "acc_stderr": 0.020220554196736403, + "acc_norm": 0.3339449541284404, + "acc_norm_stderr": 0.020220554196736403 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283686, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283686 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.045641987674327526, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.045641987674327526 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.018433427649401896, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.018433427649401896 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.014736926383761983, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.014736926383761983 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.33755274261603374, + "acc_stderr": 0.03078154910202622, + "acc_norm": 0.33755274261603374, + "acc_norm_stderr": 0.03078154910202622 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.303129074315515, + "acc_stderr": 0.011738669951254298, + "acc_norm": 0.303129074315515, + "acc_norm_stderr": 0.011738669951254298 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520688, + "mc2": 0.4886424856035836, + "mc2_stderr": 0.016537537410601783 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.12160566706021252, + "acc_stderr": 0.011236640546845988, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.015163499477892412 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-eng-ko-8b-sl6_1", + "model_sha": "82835f842c382a868f4c6f5fac4baa1d80c1cfab", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-eng-ko-8b/result_2024-06-24 08:56:27.json b/4yo1/llama3-eng-ko-8b/result_2024-06-24 08:56:27.json new file mode 100644 index 0000000000000000000000000000000000000000..4fdafd5772baaf0e00650a174a32b9b66d79ea0b --- /dev/null +++ b/4yo1/llama3-eng-ko-8b/result_2024-06-24 08:56:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2525597269624573, + "acc_stderr": 0.012696728980207708, + "acc_norm": 0.31399317406143346, + "acc_norm_stderr": 0.013562691224726304 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32752439753037244, + "acc_stderr": 0.004683511716552236, + "acc_norm": 0.40967934674367656, + "acc_norm_stderr": 0.0049076947279356915 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36015325670498083, + "acc_stderr": 0.0171663624713693, + "acc_norm": 0.36015325670498083, + "acc_norm_stderr": 0.0171663624713693 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.026858825879488544, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.026858825879488544 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.04093329229834278, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.04093329229834278 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3383838383838384, + "acc_stderr": 0.03371124142626303, + "acc_norm": 0.3383838383838384, + "acc_norm_stderr": 0.03371124142626303 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617749, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617749 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3153846153846154, + "acc_stderr": 0.02355964698318995, + "acc_norm": 0.3153846153846154, + "acc_norm_stderr": 0.02355964698318995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978815, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978815 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.027666182075539635, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.027666182075539635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5598290598290598, + "acc_stderr": 0.032520741720630506, + "acc_norm": 0.5598290598290598, + "acc_norm_stderr": 0.032520741720630506 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.02904560029061626, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.02904560029061626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.40298507462686567, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.40298507462686567, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762613, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762613 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.025416003773165555, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.025416003773165555 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.026406145973625672, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.026406145973625672 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3626943005181347, + "acc_stderr": 0.03469713791704372, + "acc_norm": 0.3626943005181347, + "acc_norm_stderr": 0.03469713791704372 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3486238532110092, + "acc_stderr": 0.020431254090714317, + "acc_norm": 0.3486238532110092, + "acc_norm_stderr": 0.020431254090714317 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.02782610930728369, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.02782610930728369 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.045641987674327526, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.045641987674327526 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.018771683893528186, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.018771683893528186 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.045723723587374296, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.045723723587374296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.031546962856566295, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.031546962856566295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103982, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103982 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983583, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983583 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.025991117672813296, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.025991117672813296 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4472573839662447, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.4472573839662447, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2790091264667536, + "acc_stderr": 0.011455208832803529, + "acc_norm": 0.2790091264667536, + "acc_norm_stderr": 0.011455208832803529 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.0327028718148208, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.0327028718148208 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.44742099912433764, + "mc2_stderr": 0.015551700567433569 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2987012987012987, + "acc_stderr": 0.01573565739143828, + "acc_norm": 0.4085005903187721, + "acc_norm_stderr": 0.01690006287942712 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-eng-ko-8b", + "model_sha": "e2c5c277fae6b14e4a8f6c45075f0518199d3e95", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-pre1-ds-lora1/result_2024-07-18 01:07:43.json b/4yo1/llama3-pre1-ds-lora1/result_2024-07-18 01:07:43.json new file mode 100644 index 0000000000000000000000000000000000000000..e705407c3518c3d5a033ba87a7553d4da26a7e8a --- /dev/null +++ b/4yo1/llama3-pre1-ds-lora1/result_2024-07-18 01:07:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20136518771331058, + "acc_stderr": 0.011718927477444265, + "acc_norm": 0.24914675767918087, + "acc_norm_stderr": 0.012639407111926437 + }, + "harness|ko_hellaswag|10": { + "acc": 0.26110336586337385, + "acc_stderr": 0.004383384784038473, + "acc_norm": 0.2729535949014141, + "acc_norm_stderr": 0.004445667638734141 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.26900584795321636, + "acc_stderr": 0.0340105262010409, + "acc_norm": 0.26900584795321636, + "acc_norm_stderr": 0.0340105262010409 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.04245022486384493, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.04245022486384493 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2720306513409962, + "acc_stderr": 0.015913367447500517, + "acc_norm": 0.2720306513409962, + "acc_norm_stderr": 0.015913367447500517 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.029771642712491227, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.029771642712491227 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.03384429155233135, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.03384429155233135 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.27009646302250806, + "acc_stderr": 0.025218040373410622, + "acc_norm": 0.27009646302250806, + "acc_norm_stderr": 0.025218040373410622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.25112107623318386, + "acc_stderr": 0.029105220833224615, + "acc_norm": 0.25112107623318386, + "acc_norm_stderr": 0.029105220833224615 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.038073871163060866, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.038073871163060866 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.02985751567338642, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.02985751567338642 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.040287315329475604, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.040287315329475604 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.13725490196078433, + "acc_stderr": 0.0342408466989152, + "acc_norm": 0.13725490196078433, + "acc_norm_stderr": 0.0342408466989152 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868966, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868966 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2358974358974359, + "acc_stderr": 0.021525965407408726, + "acc_norm": 0.2358974358974359, + "acc_norm_stderr": 0.021525965407408726 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2903225806451613, + "acc_stderr": 0.025822106119415898, + "acc_norm": 0.2903225806451613, + "acc_norm_stderr": 0.025822106119415898 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.27350427350427353, + "acc_stderr": 0.029202540153431173, + "acc_norm": 0.27350427350427353, + "acc_norm_stderr": 0.029202540153431173 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.040693063197213754, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.040693063197213754 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.02794045713622842, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02794045713622842 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483098, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483098 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02351729433596329, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02351729433596329 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2976878612716763, + "acc_stderr": 0.024617055388677, + "acc_norm": 0.2976878612716763, + "acc_norm_stderr": 0.024617055388677 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.037311335196738925, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.037311335196738925 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32098765432098764, + "acc_stderr": 0.025976566010862737, + "acc_norm": 0.32098765432098764, + "acc_norm_stderr": 0.025976566010862737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21243523316062177, + "acc_stderr": 0.02951928261681725, + "acc_norm": 0.21243523316062177, + "acc_norm_stderr": 0.02951928261681725 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518752, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518752 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.01822407811729908, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.01822407811729908 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523813, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523813 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.026716118380156834, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.026716118380156834 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720685, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720685 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.03842498559395269, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.03842498559395269 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.018120224251484587, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.018120224251484587 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902002, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902002 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.18055555555555555, + "acc_stderr": 0.026232878971491656, + "acc_norm": 0.18055555555555555, + "acc_norm_stderr": 0.026232878971491656 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.01448750085285042, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.01448750085285042 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21323529411764705, + "acc_stderr": 0.024880971512294254, + "acc_norm": 0.21323529411764705, + "acc_norm_stderr": 0.024880971512294254 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26792698826597133, + "acc_stderr": 0.011311347690633869, + "acc_norm": 0.26792698826597133, + "acc_norm_stderr": 0.011311347690633869 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.0340150671524904, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.0340150671524904 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148128, + "mc2": 0.4821689215890819, + "mc2_stderr": 0.016978019371229284 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.10861865407319952, + "acc_stderr": 0.010697906495255899, + "acc_norm": 0.32113341204250295, + "acc_norm_stderr": 0.016052762579111573 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-pre1-ds-lora1", + "model_sha": "e958846fd51d3fff3221716153cbd7a2df924dd2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-pre1-ds-lora2/result_2024-07-19 01:53:13.json b/4yo1/llama3-pre1-ds-lora2/result_2024-07-19 01:53:13.json new file mode 100644 index 0000000000000000000000000000000000000000..5e5a290592e12af99df2aafeda16b843eb5bbb95 --- /dev/null +++ b/4yo1/llama3-pre1-ds-lora2/result_2024-07-19 01:53:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19965870307167236, + "acc_stderr": 0.011681625756888693, + "acc_norm": 0.25, + "acc_norm_stderr": 0.012653835621466646 + }, + "harness|ko_hellaswag|10": { + "acc": 0.255327623979287, + "acc_stderr": 0.0043515406039885685, + "acc_norm": 0.27604062935670187, + "acc_norm_stderr": 0.004461235175488321 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.0352821125824523, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.0352821125824523 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2822477650063857, + "acc_stderr": 0.016095302969878558, + "acc_norm": 0.2822477650063857, + "acc_norm_stderr": 0.016095302969878558 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.029896145682095455, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.029896145682095455 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.035716092300534796, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.035716092300534796 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2765273311897106, + "acc_stderr": 0.025403832978179604, + "acc_norm": 0.2765273311897106, + "acc_norm_stderr": 0.025403832978179604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.030898610882477515, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.030898610882477515 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.039609335494512087, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.039609335494512087 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24789915966386555, + "acc_stderr": 0.028047967224176896, + "acc_norm": 0.24789915966386555, + "acc_norm_stderr": 0.028047967224176896 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.022489389793654824, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.022489389793654824 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764826, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764826 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.02661648298050171, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.02661648298050171 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2, + "acc_stderr": 0.03831305140884603, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03831305140884603 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228416, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228416 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.03479185572599661, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.03479185572599661 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.03152439186555402, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.03152439186555402 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321659, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321659 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.1875, + "acc_stderr": 0.032639560491693344, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.032639560491693344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30346820809248554, + "acc_stderr": 0.024752411960917212, + "acc_norm": 0.30346820809248554, + "acc_norm_stderr": 0.024752411960917212 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.32515337423312884, + "acc_stderr": 0.036803503712864595, + "acc_norm": 0.32515337423312884, + "acc_norm_stderr": 0.036803503712864595 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24691358024691357, + "acc_stderr": 0.02399350170904212, + "acc_norm": 0.24691358024691357, + "acc_norm_stderr": 0.02399350170904212 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.029778663037752954, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.029778663037752954 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21651376146788992, + "acc_stderr": 0.01765871059444313, + "acc_norm": 0.21651376146788992, + "acc_norm_stderr": 0.01765871059444313 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.02555316999182651, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.02555316999182651 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3140495867768595, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.3140495867768595, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.018120224251484598, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.018120224251484598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.031798763421768524, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.031798763421768524 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859926, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859926 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21691176470588236, + "acc_stderr": 0.02503584522771126, + "acc_norm": 0.21691176470588236, + "acc_norm_stderr": 0.02503584522771126 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2163265306122449, + "acc_stderr": 0.026358916334904024, + "acc_norm": 0.2163265306122449, + "acc_norm_stderr": 0.026358916334904024 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2489451476793249, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.2489451476793249, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2646675358539765, + "acc_stderr": 0.011267332992845524, + "acc_norm": 0.2646675358539765, + "acc_norm_stderr": 0.011267332992845524 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.0340150671524904, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.0340150671524904 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520688, + "mc2": 0.48623344584189665, + "mc2_stderr": 0.016862674875056858 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.12514757969303425, + "acc_stderr": 0.011376101146401418, + "acc_norm": 0.21959858323494688, + "acc_norm_stderr": 0.014232743085580252 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-pre1-ds-lora2", + "model_sha": "852b1091a5bbda40c7013948b1f8ec4094844456", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-pre1-ds-lora3/result_2024-07-23 07:30:22.json b/4yo1/llama3-pre1-ds-lora3/result_2024-07-23 07:30:22.json new file mode 100644 index 0000000000000000000000000000000000000000..2af33d0fcda4a82fbf870306706639f992ac00d0 --- /dev/null +++ b/4yo1/llama3-pre1-ds-lora3/result_2024-07-23 07:30:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2568259385665529, + "acc_stderr": 0.012766923794116801, + "acc_norm": 0.3097269624573379, + "acc_norm_stderr": 0.013512058415238363 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2590121489743079, + "acc_stderr": 0.004371969542814558, + "acc_norm": 0.27106154152559253, + "acc_norm_stderr": 0.0044359934925838835 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.03815827365913237, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.03815827365913237 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48659003831417624, + "acc_stderr": 0.01787353173651038, + "acc_norm": 0.48659003831417624, + "acc_norm_stderr": 0.01787353173651038 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.028256660723360177, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.028256660723360177 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4494949494949495, + "acc_stderr": 0.03544132491947969, + "acc_norm": 0.4494949494949495, + "acc_norm_stderr": 0.03544132491947969 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.024321738484602364, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.024321738484602364 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.0311669573672359, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.0311669573672359 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.03005258057955784, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.03005258057955784 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228412, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228412 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463087, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463087 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.0398124054371786, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.0398124054371786 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.026362437574546545, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.026362437574546545 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400463, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400463 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37305699481865284, + "acc_stderr": 0.034902055920485744, + "acc_norm": 0.37305699481865284, + "acc_norm_stderr": 0.034902055920485744 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995093, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995093 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3545751633986928, + "acc_stderr": 0.01935336054755369, + "acc_norm": 0.3545751633986928, + "acc_norm_stderr": 0.01935336054755369 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534802, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534802 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.02841820861940679, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.02841820861940679 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4530612244897959, + "acc_stderr": 0.03186785930004129, + "acc_norm": 0.4530612244897959, + "acc_norm_stderr": 0.03186785930004129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.03253302807877738, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.03253302807877738 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29986962190352023, + "acc_stderr": 0.011702660860193975, + "acc_norm": 0.29986962190352023, + "acc_norm_stderr": 0.011702660860193975 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.40606060606060607, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.40606060606060607, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707687, + "mc2": 0.49534110195918407, + "mc2_stderr": 0.017080132275211678 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09445100354191263, + "acc_stderr": 0.010054814982894204, + "acc_norm": 0.35064935064935066, + "acc_norm_stderr": 0.016405556903893295 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-pre1-ds-lora3", + "model_sha": "6749d7d4761a167f1c160a7d0b9e746f3cb04aab", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-pre1-pre2-ds-ins2-lora3/result_2024-07-31 06:39:29.json b/4yo1/llama3-pre1-pre2-ds-ins2-lora3/result_2024-07-31 06:39:29.json new file mode 100644 index 0000000000000000000000000000000000000000..be0075eb0ec0af7b9fb3cae7762a1b09483cd054 --- /dev/null +++ b/4yo1/llama3-pre1-pre2-ds-ins2-lora3/result_2024-07-31 06:39:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20392491467576793, + "acc_stderr": 0.011774262478702254, + "acc_norm": 0.2551194539249147, + "acc_norm_stderr": 0.012739038695202102 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2531368253335989, + "acc_stderr": 0.0043392003634544945, + "acc_norm": 0.2502489543915555, + "acc_norm_stderr": 0.004322710911026373 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.03743979825926401, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.03743979825926401 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.30779054916985954, + "acc_stderr": 0.016506045045155633, + "acc_norm": 0.30779054916985954, + "acc_norm_stderr": 0.016506045045155633 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.028020226271200217, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.028020226271200217 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.35691318327974275, + "acc_stderr": 0.02721042037593403, + "acc_norm": 0.35691318327974275, + "acc_norm_stderr": 0.02721042037593403 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.242152466367713, + "acc_stderr": 0.028751392398694755, + "acc_norm": 0.242152466367713, + "acc_norm_stderr": 0.028751392398694755 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237656, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237656 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2773109243697479, + "acc_stderr": 0.029079374539480007, + "acc_norm": 0.2773109243697479, + "acc_norm_stderr": 0.029079374539480007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.022139081103971527, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.022139081103971527 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233484, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233484 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.026377567028645854, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.026377567028645854 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.029872577708891148, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.029872577708891148 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.026480357179895702, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.026480357179895702 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.03456425745086999, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.03456425745086999 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.02339382650048488, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.02339382650048488 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080342, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080342 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.024946792225272314, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.024946792225272314 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3374233128834356, + "acc_stderr": 0.03714908409935574, + "acc_norm": 0.3374233128834356, + "acc_norm_stderr": 0.03714908409935574 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2808641975308642, + "acc_stderr": 0.025006469755799208, + "acc_norm": 0.2808641975308642, + "acc_norm_stderr": 0.025006469755799208 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.03201867122877793, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.03201867122877793 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28073394495412846, + "acc_stderr": 0.019266055045871616, + "acc_norm": 0.28073394495412846, + "acc_norm_stderr": 0.019266055045871616 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333338, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333338 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4297520661157025, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.018311653053648222, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.018311653053648222 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432397, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432397 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.04327040932578732, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.04327040932578732 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098426, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098426 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1948529411764706, + "acc_stderr": 0.02406059942348742, + "acc_norm": 0.1948529411764706, + "acc_norm_stderr": 0.02406059942348742 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174927, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174927 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2620599739243807, + "acc_stderr": 0.011231552795890394, + "acc_norm": 0.2620599739243807, + "acc_norm_stderr": 0.011231552795890394 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139405, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139405 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.015000674373570342, + "mc2": 0.502230955672644, + "mc2_stderr": 0.017048304732843935 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.10271546635182999, + "acc_stderr": 0.010437532255238496, + "acc_norm": 0.3695395513577332, + "acc_norm_stderr": 0.01659488340568542 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-pre1-pre2-ds-ins2-lora3", + "model_sha": "e36e97d7503ad2c1d406edd928de720fa514d1ef", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-pre1-pre2-ds-lora3-mergkit-base/result_2024-08-06 05:00:04.json b/4yo1/llama3-pre1-pre2-ds-lora3-mergkit-base/result_2024-08-06 05:00:04.json new file mode 100644 index 0000000000000000000000000000000000000000..72d86d771b86f66b7a7791643042042593515424 --- /dev/null +++ b/4yo1/llama3-pre1-pre2-ds-lora3-mergkit-base/result_2024-08-06 05:00:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2090443686006826, + "acc_stderr": 0.011882746987406453, + "acc_norm": 0.2636518771331058, + "acc_norm_stderr": 0.012875929151297054 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2555267874925314, + "acc_stderr": 0.004352655263682342, + "acc_norm": 0.27126070503883687, + "acc_norm_stderr": 0.004437016600956915 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.03786720706234215, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.03786720706234215 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.046202840822800406, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.046202840822800406 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2937420178799489, + "acc_stderr": 0.01628775938849167, + "acc_norm": 0.2937420178799489, + "acc_norm_stderr": 0.01628775938849167 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800254, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800254 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102967, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102967 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944966, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944966 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.026858825879488558, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.026858825879488558 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.273542600896861, + "acc_stderr": 0.029918586707798834, + "acc_norm": 0.273542600896861, + "acc_norm_stderr": 0.029918586707798834 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677697, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677697 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3383838383838384, + "acc_stderr": 0.03371124142626302, + "acc_norm": 0.3383838383838384, + "acc_norm_stderr": 0.03371124142626302 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207763, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207763 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.0242831405294673, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.0242831405294673 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.04414343666854933, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.04414343666854933 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34516129032258064, + "acc_stderr": 0.02704574657353432, + "acc_norm": 0.34516129032258064, + "acc_norm_stderr": 0.02704574657353432 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.028254200344438662, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.028254200344438662 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721376, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721376 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.32338308457711445, + "acc_stderr": 0.03307615947979033, + "acc_norm": 0.32338308457711445, + "acc_norm_stderr": 0.03307615947979033 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523864, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36127167630057805, + "acc_stderr": 0.02586220185227789, + "acc_norm": 0.36127167630057805, + "acc_norm_stderr": 0.02586220185227789 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.0348782516849789, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.0348782516849789 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.02465968518596729, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.02465968518596729 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27461139896373055, + "acc_stderr": 0.03221024508041153, + "acc_norm": 0.27461139896373055, + "acc_norm_stderr": 0.03221024508041153 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30825688073394497, + "acc_stderr": 0.019798366698367268, + "acc_norm": 0.30825688073394497, + "acc_norm_stderr": 0.019798366698367268 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906045, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906045 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.027634176689602663, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.027634176689602663 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.47107438016528924, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.017555818091322273, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.017555818091322273 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.033812000056435254, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.033812000056435254 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3142857142857143, + "acc_stderr": 0.029719329422417475, + "acc_norm": 0.3142857142857143, + "acc_norm_stderr": 0.029719329422417475 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3037974683544304, + "acc_stderr": 0.029936696387138605, + "acc_norm": 0.3037974683544304, + "acc_norm_stderr": 0.029936696387138605 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2835723598435463, + "acc_stderr": 0.011511900775968318, + "acc_norm": 0.2835723598435463, + "acc_norm_stderr": 0.011511900775968318 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.03228210387037893, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.03228210387037893 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156482, + "mc2": 0.5010694048297774, + "mc2_stderr": 0.016769441079303827 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.14994096812278632, + "acc_stderr": 0.012274378656217326, + "acc_norm": 0.29634002361275086, + "acc_norm_stderr": 0.015699701628594232 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-pre1-pre2-ds-lora3-mergkit-base", + "model_sha": "128792e6e9c8774b61fc763735a99f7faa6aad41", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-pre1-pre2-ds-lora3/result_2024-07-26 21:17:55.json b/4yo1/llama3-pre1-pre2-ds-lora3/result_2024-07-26 21:17:55.json new file mode 100644 index 0000000000000000000000000000000000000000..6ad3fbf9d71c34fa1f2556601bffaf35d1107523 --- /dev/null +++ b/4yo1/llama3-pre1-pre2-ds-lora3/result_2024-07-26 21:17:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20392491467576793, + "acc_stderr": 0.011774262478702256, + "acc_norm": 0.25853242320819114, + "acc_norm_stderr": 0.012794553754288666 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2531368253335989, + "acc_stderr": 0.004339200363454499, + "acc_norm": 0.253734315873332, + "acc_norm_stderr": 0.004342580277662754 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.04453254836326468, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.04453254836326468 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.30268199233716475, + "acc_stderr": 0.016428781581749364, + "acc_norm": 0.30268199233716475, + "acc_norm_stderr": 0.016428781581749364 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595852, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595852 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.24680851063829787, + "acc_stderr": 0.02818544130123409, + "acc_norm": 0.24680851063829787, + "acc_norm_stderr": 0.02818544130123409 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3536977491961415, + "acc_stderr": 0.027155208103200854, + "acc_norm": 0.3536977491961415, + "acc_norm_stderr": 0.027155208103200854 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.02944249558585747, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.02944249558585747 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2878787878787879, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.2878787878787879, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.029472485833136084, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.029472485833136084 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.022421273612923703, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.022421273612923703 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335137, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335137 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.02987257770889114, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.02987257770889114 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708076, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708076 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.03152439186555402, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.03152439186555402 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047875, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047875 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.02339382650048488, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.02339382650048488 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.33815028901734107, + "acc_stderr": 0.025469770149400175, + "acc_norm": 0.33815028901734107, + "acc_norm_stderr": 0.025469770149400175 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292404, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292404 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.02508947852376513, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.02508947852376513 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29357798165137616, + "acc_stderr": 0.019525151122639663, + "acc_norm": 0.29357798165137616, + "acc_norm_stderr": 0.019525151122639663 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011743, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.026256053835718964, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.026256053835718964 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909281, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909281 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.018311653053648222, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.018311653053648222 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.025518731049537786, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.025518731049537786 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602158, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602158 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2670391061452514, + "acc_stderr": 0.01479650262256255, + "acc_norm": 0.2670391061452514, + "acc_norm_stderr": 0.01479650262256255 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19117647058823528, + "acc_stderr": 0.023886881922440355, + "acc_norm": 0.19117647058823528, + "acc_norm_stderr": 0.023886881922440355 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.027682979522960224, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.027682979522960224 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842555, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842555 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2627118644067797, + "acc_stderr": 0.011240545514995664, + "acc_norm": 0.2627118644067797, + "acc_norm_stderr": 0.011240545514995664 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.03317505930009179, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.03317505930009179 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.506116595507612, + "mc2_stderr": 0.017054384753311957 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09917355371900827, + "acc_stderr": 0.010276218268084948, + "acc_norm": 0.3659976387249115, + "acc_norm_stderr": 0.01656148966489569 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-pre1-pre2-ds-lora3", + "model_sha": "1a0c007ab818dd0b388e73fe894f1b3a0ebe592d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-pre1-pre2-ins1-lora3/result_2024-07-29 01:26:40.json b/4yo1/llama3-pre1-pre2-ins1-lora3/result_2024-07-29 01:26:40.json new file mode 100644 index 0000000000000000000000000000000000000000..6aa5a83976242434de365b868de6aed0d906fd55 --- /dev/null +++ b/4yo1/llama3-pre1-pre2-ins1-lora3/result_2024-07-29 01:26:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20392491467576793, + "acc_stderr": 0.011774262478702256, + "acc_norm": 0.2593856655290102, + "acc_norm_stderr": 0.01280827357392709 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2528380800637323, + "acc_stderr": 0.004337506344899919, + "acc_norm": 0.25403306114319857, + "acc_norm_stderr": 0.004344266179634921 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.04453254836326468, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.04453254836326468 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3065134099616858, + "acc_stderr": 0.01648695289304151, + "acc_norm": 0.3065134099616858, + "acc_norm_stderr": 0.01648695289304151 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595852, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595852 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.24680851063829787, + "acc_stderr": 0.02818544130123409, + "acc_norm": 0.24680851063829787, + "acc_norm_stderr": 0.02818544130123409 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.03610805018031024, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.03610805018031024 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3536977491961415, + "acc_stderr": 0.027155208103200854, + "acc_norm": 0.3536977491961415, + "acc_norm_stderr": 0.027155208103200854 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.24663677130044842, + "acc_stderr": 0.028930413120910877, + "acc_norm": 0.24663677130044842, + "acc_norm_stderr": 0.028930413120910877 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.029597329730978093, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.029597329730978093 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.0224212736129237, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.0224212736129237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.04414343666854933, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.04414343666854933 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.031947400722655395, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.031947400722655395 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335137, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335137 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2863247863247863, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.2863247863247863, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708076, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708076 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.03152439186555402, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.03152439186555402 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.03456425745086999, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.03456425745086999 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.023330654054535903, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.023330654054535903 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.33815028901734107, + "acc_stderr": 0.025469770149400175, + "acc_norm": 0.33815028901734107, + "acc_norm_stderr": 0.025469770149400175 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292404, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292404 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2808641975308642, + "acc_stderr": 0.025006469755799208, + "acc_norm": 0.2808641975308642, + "acc_norm_stderr": 0.025006469755799208 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147601, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147601 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28807339449541286, + "acc_stderr": 0.019416445892636025, + "acc_norm": 0.28807339449541286, + "acc_norm_stderr": 0.019416445892636025 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333338, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333338 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.026256053835718964, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.026256053835718964 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909281, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909281 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.018373116915903966, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.018373116915903966 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729903, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729903 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602158, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602158 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260664, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260664 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19852941176470587, + "acc_stderr": 0.024231013370541107, + "acc_norm": 0.19852941176470587, + "acc_norm_stderr": 0.024231013370541107 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.027682979522960224, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.027682979522960224 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293433, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293433 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26010430247718386, + "acc_stderr": 0.01120438288782385, + "acc_norm": 0.26010430247718386, + "acc_norm_stderr": 0.01120438288782385 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.03317505930009179, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.03317505930009179 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.506140069993186, + "mc2_stderr": 0.017052404281409264 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09799291617473435, + "acc_stderr": 0.010221558855214861, + "acc_norm": 0.3659976387249115, + "acc_norm_stderr": 0.01656148966489569 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-pre1-pre2-ins1-lora3", + "model_sha": "b2a24d122d994c15a523a22ac607948564990154", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-pre1-pre2-inst3-ds-lora3/result_2024-08-06 05:33:47.json b/4yo1/llama3-pre1-pre2-inst3-ds-lora3/result_2024-08-06 05:33:47.json new file mode 100644 index 0000000000000000000000000000000000000000..0dfeabdd8e8d6212350b5bdff80aa3bf34c41770 --- /dev/null +++ b/4yo1/llama3-pre1-pre2-inst3-ds-lora3/result_2024-08-06 05:33:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19795221843003413, + "acc_stderr": 0.011643990971573401, + "acc_norm": 0.24744027303754265, + "acc_norm_stderr": 0.01261035266329267 + }, + "harness|ko_hellaswag|10": { + "acc": 0.255327623979287, + "acc_stderr": 0.004351540603988567, + "acc_norm": 0.2583150766779526, + "acc_norm_stderr": 0.004368135676213556 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.03753638955761691, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.03753638955761691 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.046202840822800406, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.046202840822800406 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.016543785026048315, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.016543785026048315 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.037498507091740234, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.037498507091740234 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.02924188386962882, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.02924188386962882 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.03550920185689629, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.03550920185689629 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2861736334405145, + "acc_stderr": 0.025670259242188943, + "acc_norm": 0.2861736334405145, + "acc_norm_stderr": 0.025670259242188943 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.17937219730941703, + "acc_stderr": 0.025749819569192794, + "acc_norm": 0.17937219730941703, + "acc_norm_stderr": 0.025749819569192794 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.041423137719966634, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.041423137719966634 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179961, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179961 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33589743589743587, + "acc_stderr": 0.023946724741563976, + "acc_norm": 0.33589743589743587, + "acc_norm_stderr": 0.023946724741563976 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243838, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243838 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33004926108374383, + "acc_stderr": 0.03308530426228257, + "acc_norm": 0.33004926108374383, + "acc_norm_stderr": 0.03308530426228257 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.02637756702864586, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.02637756702864586 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2863247863247863, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.2863247863247863, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.027495663683724043, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.027495663683724043 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072773, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072773 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3482587064676617, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.3482587064676617, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321659, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321659 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577656, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577656 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2630057803468208, + "acc_stderr": 0.023703099525258165, + "acc_norm": 0.2630057803468208, + "acc_norm_stderr": 0.023703099525258165 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292406, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292406 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.025842248700902168, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.025842248700902168 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3137614678899083, + "acc_stderr": 0.01989472334146913, + "acc_norm": 0.3137614678899083, + "acc_norm_stderr": 0.01989472334146913 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604675, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604675 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.026568921015457162, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.026568921015457162 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2369281045751634, + "acc_stderr": 0.01720166216978978, + "acc_norm": 0.2369281045751634, + "acc_norm_stderr": 0.01720166216978978 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590624, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590624 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26927374301675977, + "acc_stderr": 0.014835616582882601, + "acc_norm": 0.26927374301675977, + "acc_norm_stderr": 0.014835616582882601 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.22784810126582278, + "acc_stderr": 0.02730348459906944, + "acc_norm": 0.22784810126582278, + "acc_norm_stderr": 0.02730348459906944 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24837027379400262, + "acc_stderr": 0.011035212598034503, + "acc_norm": 0.24837027379400262, + "acc_norm_stderr": 0.011035212598034503 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.029554292605695063, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.029554292605695063 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2, + "acc_stderr": 0.03123475237772118, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03123475237772118 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006526, + "mc2": 0.5147520134989033, + "mc2_stderr": 0.016916247986138434 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.10743801652892562, + "acc_stderr": 0.01064665060891188, + "acc_norm": 0.31641086186540734, + "acc_norm_stderr": 0.015989617951065477 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-pre1-pre2-inst3-ds-lora3", + "model_sha": "d5fbcf408c849c5efd33e1dd1c36f9f583c9433b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/llama3-pre1-pre2-inst3-lora3-mergkit-base/result_2024-08-06 07:19:39.json b/4yo1/llama3-pre1-pre2-inst3-lora3-mergkit-base/result_2024-08-06 07:19:39.json new file mode 100644 index 0000000000000000000000000000000000000000..3cc211db5c2fc093b31507a4fea97c50724abe80 --- /dev/null +++ b/4yo1/llama3-pre1-pre2-inst3-lora3-mergkit-base/result_2024-08-06 07:19:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2030716723549488, + "acc_stderr": 0.011755899303705582, + "acc_norm": 0.2645051194539249, + "acc_norm_stderr": 0.012889272949313368 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2574188408683529, + "acc_stderr": 0.004363185172047173, + "acc_norm": 0.2765385381398128, + "acc_norm_stderr": 0.004463721071319079 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.33980582524271846, + "acc_stderr": 0.04689765937278136, + "acc_norm": 0.33980582524271846, + "acc_norm_stderr": 0.04689765937278136 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3243933588761175, + "acc_stderr": 0.016740929047162702, + "acc_norm": 0.3243933588761175, + "acc_norm_stderr": 0.016740929047162702 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03820169914517905, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03820169914517905 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610334, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610334 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553026, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553026 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3183279742765273, + "acc_stderr": 0.02645722506781102, + "acc_norm": 0.3183279742765273, + "acc_norm_stderr": 0.02645722506781102 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.26905829596412556, + "acc_stderr": 0.029763779406874975, + "acc_norm": 0.26905829596412556, + "acc_norm_stderr": 0.029763779406874975 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36554621848739494, + "acc_stderr": 0.03128217706368461, + "acc_norm": 0.36554621848739494, + "acc_norm_stderr": 0.03128217706368461 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.32564102564102565, + "acc_stderr": 0.02375966576741229, + "acc_norm": 0.32564102564102565, + "acc_norm_stderr": 0.02375966576741229 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.04587904741301812, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.04587904741301812 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678242, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678242 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3717948717948718, + "acc_stderr": 0.03166098891888078, + "acc_norm": 0.3717948717948718, + "acc_norm_stderr": 0.03166098891888078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.02815283794249385, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.02815283794249385 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176095, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176095 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31343283582089554, + "acc_stderr": 0.03280188205348642, + "acc_norm": 0.31343283582089554, + "acc_norm_stderr": 0.03280188205348642 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.024026846392873502, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.024026846392873502 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.0358687928008034, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.0358687928008034 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36127167630057805, + "acc_stderr": 0.025862201852277885, + "acc_norm": 0.36127167630057805, + "acc_norm_stderr": 0.025862201852277885 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.024659685185967284, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.024659685185967284 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.03097543638684542, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.03097543638684542 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30275229357798167, + "acc_stderr": 0.019698711434756353, + "acc_norm": 0.30275229357798167, + "acc_norm_stderr": 0.019698711434756353 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604676, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604676 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.026643278474508755, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.026643278474508755 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.017740899509177795, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.017740899509177795 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.02657786094330786, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.02657786094330786 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.045723723587374296, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.045723723587374296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025445, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025445 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3014705882352941, + "acc_stderr": 0.027875982114273168, + "acc_norm": 0.3014705882352941, + "acc_norm_stderr": 0.027875982114273168 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29535864978902954, + "acc_stderr": 0.02969633871342289, + "acc_norm": 0.29535864978902954, + "acc_norm_stderr": 0.02969633871342289 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.011787910251664587, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.011787910251664587 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.03296245110172229, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.03296245110172229 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139406, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139406 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.4921908388094137, + "mc2_stderr": 0.01680481846998291 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.13105076741440377, + "acc_stderr": 0.011601971778212317, + "acc_norm": 0.2550177095631641, + "acc_norm_stderr": 0.014985559533428564 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/llama3-pre1-pre2-inst3-lora3-mergkit-base", + "model_sha": "694a24ef87aa42e0f3efa199162815da054ee45f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/4yo1/sapie/result_2024-07-26 22:21:28.json b/4yo1/sapie/result_2024-07-26 22:21:28.json new file mode 100644 index 0000000000000000000000000000000000000000..b0c16983cd70b93c1f191e1acc17f5d0f3076780 --- /dev/null +++ b/4yo1/sapie/result_2024-07-26 22:21:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37457337883959047, + "acc_stderr": 0.014144193471893452, + "acc_norm": 0.42918088737201365, + "acc_norm_stderr": 0.014464085894870657 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39165504879506075, + "acc_stderr": 0.004871226629346399, + "acc_norm": 0.5270862378012349, + "acc_norm_stderr": 0.004982454383162069 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03615507630310935, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03615507630310935 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7475728155339806, + "acc_stderr": 0.043012503996908764, + "acc_norm": 0.7475728155339806, + "acc_norm_stderr": 0.043012503996908764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5696040868454662, + "acc_stderr": 0.01770586877629239, + "acc_norm": 0.5696040868454662, + "acc_norm_stderr": 0.01770586877629239 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542124, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542124 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936337, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305693, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305693 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6919191919191919, + "acc_stderr": 0.03289477330098614, + "acc_norm": 0.6919191919191919, + "acc_norm_stderr": 0.03289477330098614 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.04122737111370333, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.04122737111370333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201943, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201943 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.03225294232399639, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.03225294232399639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.025323990861736246, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.025323990861736246 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5387096774193548, + "acc_stderr": 0.028358634859836945, + "acc_norm": 0.5387096774193548, + "acc_norm_stderr": 0.028358634859836945 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808086, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808086 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.029723278961476664, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.029723278961476664 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.03983798306659808, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.03983798306659808 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.03807301726504511, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.03807301726504511 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115978, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.569364161849711, + "acc_stderr": 0.026658800273672376, + "acc_norm": 0.569364161849711, + "acc_norm_stderr": 0.026658800273672376 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5339506172839507, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.5339506172839507, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6321243523316062, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.6321243523316062, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.04598188057816542, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.04598188057816542 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5889908256880734, + "acc_stderr": 0.02109505068727765, + "acc_norm": 0.5889908256880734, + "acc_norm_stderr": 0.02109505068727765 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5592105263157895, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.5592105263157895, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.020148939420415738, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.020148939420415738 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.034076320938540516, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.034076320938540516 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.311731843575419, + "acc_stderr": 0.015491756531894637, + "acc_norm": 0.311731843575419, + "acc_norm_stderr": 0.015491756531894637 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5877551020408164, + "acc_stderr": 0.031512360446742695, + "acc_norm": 0.5877551020408164, + "acc_norm_stderr": 0.031512360446742695 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.030486039389105313, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.030486039389105313 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3683181225554107, + "acc_stderr": 0.012319403369564644, + "acc_norm": 0.3683181225554107, + "acc_norm_stderr": 0.012319403369564644 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205983, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205983 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087305, + "mc2": 0.4266118480864784, + "mc2_stderr": 0.014954014245341285 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.017168187201429253, + "acc_norm": 0.615112160566706, + "acc_norm_stderr": 0.016728579701498648 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "4yo1/sapie", + "model_sha": "15e11220e73283506ff17e0e15ef79bbdac9103c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-instruct-42dot_LLM-SFT-1.3B-dpo/result_2024-01-30 03:46:37.json b/AIFT/AIFT-instruct-42dot_LLM-SFT-1.3B-dpo/result_2024-01-30 03:46:37.json new file mode 100644 index 0000000000000000000000000000000000000000..1a9675872499712fa2b1ea7663b47f80fd0df3c5 --- /dev/null +++ b/AIFT/AIFT-instruct-42dot_LLM-SFT-1.3B-dpo/result_2024-01-30 03:46:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27474402730375425, + "acc_stderr": 0.013044617212771227, + "acc_norm": 0.3361774744027304, + "acc_norm_stderr": 0.01380485502620576 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3577972515435172, + "acc_stderr": 0.0047837237982865, + "acc_norm": 0.4455287791276638, + "acc_norm_stderr": 0.004960082528852433 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.034678266857038245, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.034678266857038245 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260597, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260597 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21966794380587484, + "acc_stderr": 0.014805384478371169, + "acc_norm": 0.21966794380587484, + "acc_norm_stderr": 0.014805384478371169 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.1925925925925926, + "acc_stderr": 0.03406542058502652, + "acc_norm": 0.1925925925925926, + "acc_norm_stderr": 0.03406542058502652 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23829787234042554, + "acc_stderr": 0.027851252973889774, + "acc_norm": 0.23829787234042554, + "acc_norm_stderr": 0.027851252973889774 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2765273311897106, + "acc_stderr": 0.025403832978179622, + "acc_norm": 0.2765273311897106, + "acc_norm_stderr": 0.025403832978179622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21973094170403587, + "acc_stderr": 0.02779017706438359, + "acc_norm": 0.21973094170403587, + "acc_norm_stderr": 0.02779017706438359 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.18686868686868688, + "acc_stderr": 0.027772533334218967, + "acc_norm": 0.18686868686868688, + "acc_norm_stderr": 0.027772533334218967 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179326, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179326 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2184873949579832, + "acc_stderr": 0.02684151432295893, + "acc_norm": 0.2184873949579832, + "acc_norm_stderr": 0.02684151432295893 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.019982347208637292, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.019982347208637292 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052192, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.03282649385304151, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.03282649385304151 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.02606936229533513, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02606936229533513 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.029343114798094462, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.029343114798094462 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.025604233470899095, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.025604233470899095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252089, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252089 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275798, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275798 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788989, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788989 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.02226181769240018, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.02226181769240018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.024105712607754307, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.024105712607754307 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.032591773927421776, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.032591773927421776 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21243523316062177, + "acc_stderr": 0.02951928261681725, + "acc_norm": 0.21243523316062177, + "acc_norm_stderr": 0.02951928261681725 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.2, + "acc_stderr": 0.017149858514250944, + "acc_norm": 0.2, + "acc_norm_stderr": 0.017149858514250944 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.026173908506718576, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.026173908506718576 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3305785123966942, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.01755581809132227, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.01755581809132227 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266726, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266726 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.033812000056435254, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.033812000056435254 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824862, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824862 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.02540930195322568, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.02540930195322568 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29535864978902954, + "acc_stderr": 0.029696338713422882, + "acc_norm": 0.29535864978902954, + "acc_norm_stderr": 0.029696338713422882 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139404, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139404 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485083, + "mc2": 0.4174368957869544, + "mc2_stderr": 0.015294388765459724 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3317591499409681, + "acc_stderr": 0.01618798464215732, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.017090852631668332 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-instruct-42dot_LLM-SFT-1.3B-dpo", + "model_sha": "e9e27e2063046b74476dadb9af3eb45e8786310c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-instruct-42dot_LLM-SFT-1.3B/result_2024-01-30 00:16:56.json b/AIFT/AIFT-instruct-42dot_LLM-SFT-1.3B/result_2024-01-30 00:16:56.json new file mode 100644 index 0000000000000000000000000000000000000000..86d30aaa58c78a3d428638f14870d8a8942569f0 --- /dev/null +++ b/AIFT/AIFT-instruct-42dot_LLM-SFT-1.3B/result_2024-01-30 00:16:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2721843003412969, + "acc_stderr": 0.013006600406423709, + "acc_norm": 0.33276450511945393, + "acc_norm_stderr": 0.013769863046192305 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3543118900617407, + "acc_stderr": 0.004773267510112743, + "acc_norm": 0.4435371439952201, + "acc_norm_stderr": 0.004957863944093124 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457923, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457923 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260597, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260597 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21966794380587484, + "acc_stderr": 0.014805384478371169, + "acc_norm": 0.21966794380587484, + "acc_norm_stderr": 0.014805384478371169 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2, + "acc_stderr": 0.03455473702325438, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03455473702325438 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23829787234042554, + "acc_stderr": 0.027851252973889774, + "acc_norm": 0.23829787234042554, + "acc_norm_stderr": 0.027851252973889774 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.035294868015111155, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.035294868015111155 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26366559485530544, + "acc_stderr": 0.02502553850053234, + "acc_norm": 0.26366559485530544, + "acc_norm_stderr": 0.02502553850053234 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21076233183856502, + "acc_stderr": 0.027373095500540193, + "acc_norm": 0.21076233183856502, + "acc_norm_stderr": 0.027373095500540193 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082395, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082395 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.18181818181818182, + "acc_stderr": 0.027479603010538804, + "acc_norm": 0.18181818181818182, + "acc_norm_stderr": 0.027479603010538804 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.226890756302521, + "acc_stderr": 0.027205371538279472, + "acc_norm": 0.226890756302521, + "acc_norm_stderr": 0.027205371538279472 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.019982347208637296, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.019982347208637296 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052192, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.03178529710642749, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.03178529710642749 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.026377567028645854, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.026377567028645854 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2863247863247863, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.2863247863247863, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23018867924528302, + "acc_stderr": 0.025907897122408173, + "acc_norm": 0.23018867924528302, + "acc_norm_stderr": 0.025907897122408173 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252089, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252089 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766107, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766107 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.03036049015401464, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.03036049015401464 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.03414014007044036, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.03414014007044036 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.02210112878741543, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.02210112878741543 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.024027745155265016, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.024027745155265016 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.02465968518596728, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.02465968518596728 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21243523316062177, + "acc_stderr": 0.02951928261681725, + "acc_norm": 0.21243523316062177, + "acc_norm_stderr": 0.02951928261681725 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1944954128440367, + "acc_stderr": 0.016970289090458054, + "acc_norm": 0.1944954128440367, + "acc_norm_stderr": 0.016970289090458054 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.025829163272757468, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.025829163272757468 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.26973684210526316, + "acc_stderr": 0.03611780560284898, + "acc_norm": 0.26973684210526316, + "acc_norm_stderr": 0.03611780560284898 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190714, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398865, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398865 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.02540930195322568, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.02540930195322568 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3037974683544304, + "acc_stderr": 0.0299366963871386, + "acc_norm": 0.3037974683544304, + "acc_norm_stderr": 0.0299366963871386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2438070404172099, + "acc_stderr": 0.010966507972178475, + "acc_norm": 0.2438070404172099, + "acc_norm_stderr": 0.010966507972178475 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.033175059300091805, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.033175059300091805 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156465, + "mc2": 0.41626362709754605, + "mc2_stderr": 0.015226489644958928 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3234946871310508, + "acc_stderr": 0.016083627290483675, + "acc_norm": 0.44391971664698937, + "acc_norm_stderr": 0.017081884623542543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-instruct-42dot_LLM-SFT-1.3B", + "model_sha": "58801e4a8909a9cda6173c51bd79470297beb4af", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-instruct-SFT-1.3B-refine-v3/result_2024-02-28 12:40:49.json b/AIFT/AIFT-instruct-SFT-1.3B-refine-v3/result_2024-02-28 12:40:49.json new file mode 100644 index 0000000000000000000000000000000000000000..60b6f99cbb07b6d8b15d90abdd8601b19f73dc06 --- /dev/null +++ b/AIFT/AIFT-instruct-SFT-1.3B-refine-v3/result_2024-02-28 12:40:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27559726962457337, + "acc_stderr": 0.01305716965576184, + "acc_norm": 0.33532423208191126, + "acc_norm_stderr": 0.013796182947785564 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3536148177653854, + "acc_stderr": 0.004771143074426132, + "acc_norm": 0.4457279426409082, + "acc_norm_stderr": 0.0049602999525194 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260597, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260597 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21711366538952745, + "acc_stderr": 0.014743125394823291, + "acc_norm": 0.21711366538952745, + "acc_norm_stderr": 0.014743125394823291 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.03633384414073465, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.03633384414073465 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.02924188386962882, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.02924188386962882 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2540192926045016, + "acc_stderr": 0.024723861504771686, + "acc_norm": 0.2540192926045016, + "acc_norm_stderr": 0.024723861504771686 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21973094170403587, + "acc_stderr": 0.0277901770643836, + "acc_norm": 0.21973094170403587, + "acc_norm_stderr": 0.0277901770643836 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596917, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596917 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.1919191919191919, + "acc_stderr": 0.028057791672989017, + "acc_norm": 0.1919191919191919, + "acc_norm_stderr": 0.028057791672989017 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.028359620870533946, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.028359620870533946 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20512820512820512, + "acc_stderr": 0.020473233173551975, + "acc_norm": 0.20512820512820512, + "acc_norm_stderr": 0.020473233173551975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052192, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.03108982600293752, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.03108982600293752 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.025736542745594528, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.025736542745594528 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.27350427350427353, + "acc_stderr": 0.029202540153431166, + "acc_norm": 0.27350427350427353, + "acc_norm_stderr": 0.029202540153431166 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.026341480371118355, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.026341480371118355 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823019, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823019 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008936, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008936 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.03152439186555402, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.03152439186555402 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.19653179190751446, + "acc_stderr": 0.03029957466478815, + "acc_norm": 0.19653179190751446, + "acc_norm_stderr": 0.03029957466478815 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.02193587808118476, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.02193587808118476 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.023786203255508287, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.023786203255508287 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.024836057868294677, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.024836057868294677 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.18652849740932642, + "acc_stderr": 0.02811209121011747, + "acc_norm": 0.18652849740932642, + "acc_norm_stderr": 0.02811209121011747 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.20550458715596331, + "acc_stderr": 0.01732435232501601, + "acc_norm": 0.20550458715596331, + "acc_norm_stderr": 0.01732435232501601 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276863, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276863 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.031103182383123377, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.031103182383123377 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.017986615304030305, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.017986615304030305 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.0257700156442904, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.0257700156442904 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353603, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353603 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.01448750085285042, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.01448750085285042 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16911764705882354, + "acc_stderr": 0.022770868010113007, + "acc_norm": 0.16911764705882354, + "acc_norm_stderr": 0.022770868010113007 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22448979591836735, + "acc_stderr": 0.026711430555538408, + "acc_norm": 0.22448979591836735, + "acc_norm_stderr": 0.026711430555538408 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2633637548891786, + "acc_stderr": 0.011249506403605296, + "acc_norm": 0.2633637548891786, + "acc_norm_stderr": 0.011249506403605296 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604243, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604243 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826824, + "mc2": 0.4107878952898989, + "mc2_stderr": 0.01500499376546119 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3270365997638725, + "acc_stderr": 0.016129047485457022, + "acc_norm": 0.4332939787485242, + "acc_norm_stderr": 0.017036683641893105 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-instruct-SFT-1.3B-refine-v3", + "model_sha": "51280ba05cc276e596478e551c75fd4c61b07fe3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-instruct-SFT-1.3B-v1.1/result_2024-02-22 12:37:56.json b/AIFT/AIFT-instruct-SFT-1.3B-v1.1/result_2024-02-22 12:37:56.json new file mode 100644 index 0000000000000000000000000000000000000000..6ad03e00e7db3205865a8a8b49c4244760484108 --- /dev/null +++ b/AIFT/AIFT-instruct-SFT-1.3B-v1.1/result_2024-02-22 12:37:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2713310580204778, + "acc_stderr": 0.012993807727545784, + "acc_norm": 0.3319112627986348, + "acc_norm_stderr": 0.013760988200880534 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35590519816769567, + "acc_stderr": 0.004778081784542411, + "acc_norm": 0.44503087034455285, + "acc_norm_stderr": 0.004959535443170614 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2388250319284802, + "acc_stderr": 0.015246803197398687, + "acc_norm": 0.2388250319284802, + "acc_norm_stderr": 0.015246803197398687 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.03502553170678316, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.03502553170678316 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3021276595744681, + "acc_stderr": 0.03001755447188055, + "acc_norm": 0.3021276595744681, + "acc_norm_stderr": 0.03001755447188055 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288086, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288086 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26366559485530544, + "acc_stderr": 0.02502553850053234, + "acc_norm": 0.26366559485530544, + "acc_norm_stderr": 0.02502553850053234 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.030216831011508762, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.030216831011508762 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.026653531596715487, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.026653531596715487 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2230769230769231, + "acc_stderr": 0.02110773012724401, + "acc_norm": 0.2230769230769231, + "acc_norm_stderr": 0.02110773012724401 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.030315099285617715, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.030315099285617715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.22258064516129034, + "acc_stderr": 0.023664216671642507, + "acc_norm": 0.22258064516129034, + "acc_norm_stderr": 0.023664216671642507 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.26495726495726496, + "acc_stderr": 0.028911208802749465, + "acc_norm": 0.26495726495726496, + "acc_norm_stderr": 0.028911208802749465 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891373, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891373 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.032578473844367746, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.032578473844367746 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.03096590312357304, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.03096590312357304 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641144, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641144 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2328042328042328, + "acc_stderr": 0.02176596167215454, + "acc_norm": 0.2328042328042328, + "acc_norm_stderr": 0.02176596167215454 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.02289408248992599, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.02289408248992599 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.03291099578615771, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.03291099578615771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20207253886010362, + "acc_stderr": 0.02897908979429673, + "acc_norm": 0.20207253886010362, + "acc_norm_stderr": 0.02897908979429673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.03775205013583638, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.03775205013583638 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1981651376146789, + "acc_stderr": 0.01709057380421789, + "acc_norm": 0.1981651376146789, + "acc_norm_stderr": 0.01709057380421789 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.035670166752768635, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.035670166752768635 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.023929155517351284, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.023929155517351284 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.03984979653302872, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.03984979653302872 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.01774089950917779, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.01774089950917779 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.025518731049537766, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.025518731049537766 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467764, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467764 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005344, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005344 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16176470588235295, + "acc_stderr": 0.02236867256288675, + "acc_norm": 0.16176470588235295, + "acc_norm_stderr": 0.02236867256288675 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.1836734693877551, + "acc_stderr": 0.024789071332007636, + "acc_norm": 0.1836734693877551, + "acc_norm_stderr": 0.024789071332007636 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598046, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598046 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23663624511082137, + "acc_stderr": 0.010855137351572742, + "acc_norm": 0.23663624511082137, + "acc_norm_stderr": 0.010855137351572742 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.02977177522814563, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.02977177522814563 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139404, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139404 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752329, + "mc2": 0.4094493980194844, + "mc2_stderr": 0.014890936810930833 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30932703659976385, + "acc_stderr": 0.015891320505520886, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.01701403811929746 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-instruct-SFT-1.3B-v1.1", + "model_sha": "2aae4491faed1be050cac64de55d0a79288e96a9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-instruct-SFT-1.3B-v1.6.2/result_2024-02-27 08:46:20.json b/AIFT/AIFT-instruct-SFT-1.3B-v1.6.2/result_2024-02-27 08:46:20.json new file mode 100644 index 0000000000000000000000000000000000000000..f1066dc359c1bbd766a33d99f753c6e171b83856 --- /dev/null +++ b/AIFT/AIFT-instruct-SFT-1.3B-v1.6.2/result_2024-02-27 08:46:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2901023890784983, + "acc_stderr": 0.013261573677520773, + "acc_norm": 0.34215017064846415, + "acc_norm_stderr": 0.013864152159177278 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3567018522206732, + "acc_stderr": 0.0047804672709117636, + "acc_norm": 0.4446325433180641, + "acc_norm_stderr": 0.004959094146471525 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2573099415204678, + "acc_stderr": 0.03352799844161865, + "acc_norm": 0.2573099415204678, + "acc_norm_stderr": 0.03352799844161865 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1650485436893204, + "acc_stderr": 0.036756688322331886, + "acc_norm": 0.1650485436893204, + "acc_norm_stderr": 0.036756688322331886 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.015302380123542089, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.015302380123542089 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.035914440841969694, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.035914440841969694 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.029513196625539355, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.029513196625539355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683228, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683228 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.27009646302250806, + "acc_stderr": 0.025218040373410598, + "acc_norm": 0.27009646302250806, + "acc_norm_stderr": 0.025218040373410598 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22137404580152673, + "acc_stderr": 0.0364129708131373, + "acc_norm": 0.22137404580152673, + "acc_norm_stderr": 0.0364129708131373 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.16161616161616163, + "acc_stderr": 0.026225919863629283, + "acc_norm": 0.16161616161616163, + "acc_norm_stderr": 0.026225919863629283 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868963, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868963 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.023119362758232273, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.023119362758232273 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.17733990147783252, + "acc_stderr": 0.026874337276808342, + "acc_norm": 0.17733990147783252, + "acc_norm_stderr": 0.026874337276808342 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.22258064516129034, + "acc_stderr": 0.023664216671642518, + "acc_norm": 0.22258064516129034, + "acc_norm_stderr": 0.023664216671642518 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21132075471698114, + "acc_stderr": 0.025125766484827852, + "acc_norm": 0.21132075471698114, + "acc_norm_stderr": 0.025125766484827852 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073817, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073817 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.03014777593540922, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.03014777593540922 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483098, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483098 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643895, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643895 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.023786203255508283, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.023786203255508283 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25617283950617287, + "acc_stderr": 0.0242885336377261, + "acc_norm": 0.25617283950617287, + "acc_norm_stderr": 0.0242885336377261 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.18652849740932642, + "acc_stderr": 0.02811209121011747, + "acc_norm": 0.18652849740932642, + "acc_norm_stderr": 0.02811209121011747 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03835153954399421, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03835153954399421 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.20733944954128442, + "acc_stderr": 0.017381415563608674, + "acc_norm": 0.20733944954128442, + "acc_norm_stderr": 0.017381415563608674 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102149, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102149 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023805186524888146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023805186524888146 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.017401816711427657, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.017401816711427657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307857, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307857 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260664, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260664 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.02725720260611494, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.02725720260611494 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2, + "acc_stderr": 0.02560737598657916, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02560737598657916 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31223628691983124, + "acc_stderr": 0.030165137867847, + "acc_norm": 0.31223628691983124, + "acc_norm_stderr": 0.030165137867847 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24445893089960888, + "acc_stderr": 0.0109764250131139, + "acc_norm": 0.24445893089960888, + "acc_norm_stderr": 0.0109764250131139 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02933116229425172, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02933116229425172 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.03287666758603487, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.03287666758603487 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456416, + "mc2": 0.40837934461063286, + "mc2_stderr": 0.014888690859718215 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.269185360094451, + "acc_stderr": 0.015249098024144538, + "acc_norm": 0.4037780401416765, + "acc_norm_stderr": 0.016869031540298632 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-instruct-SFT-1.3B-v1.6.2", + "model_sha": "93eb653f28e35bb8e84014db3a5082338b257a3a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-instruct-SFT-1.3B-v2.1.1/result_2024-02-27 23:18:23.json b/AIFT/AIFT-instruct-SFT-1.3B-v2.1.1/result_2024-02-27 23:18:23.json new file mode 100644 index 0000000000000000000000000000000000000000..25bdc7688d057d7b61c38c3fb3059f83a330d031 --- /dev/null +++ b/AIFT/AIFT-instruct-SFT-1.3B-v2.1.1/result_2024-02-27 23:18:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27559726962457337, + "acc_stderr": 0.01305716965576184, + "acc_norm": 0.3370307167235495, + "acc_norm_stderr": 0.01381347665290227 + }, + "harness|ko_hellaswag|10": { + "acc": 0.355008962358096, + "acc_stderr": 0.004775380866948017, + "acc_norm": 0.44971121290579563, + "acc_norm_stderr": 0.004964479324552527 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.041858325989283136, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.041858325989283136 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24265644955300128, + "acc_stderr": 0.015329888940899863, + "acc_norm": 0.24265644955300128, + "acc_norm_stderr": 0.015329888940899863 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.03633384414073465, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.03633384414073465 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.27009646302250806, + "acc_stderr": 0.02521804037341062, + "acc_norm": 0.27009646302250806, + "acc_norm_stderr": 0.02521804037341062 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.029442495585857476, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.029442495585857476 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.18181818181818182, + "acc_stderr": 0.0274796030105388, + "acc_norm": 0.18181818181818182, + "acc_norm_stderr": 0.0274796030105388 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.02665353159671549, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.02665353159671549 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.0210206726808279, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.0210206726808279 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.1477832512315271, + "acc_stderr": 0.02496962133352127, + "acc_norm": 0.1477832512315271, + "acc_norm_stderr": 0.02496962133352127 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1870967741935484, + "acc_stderr": 0.022185710092252255, + "acc_norm": 0.1870967741935484, + "acc_norm_stderr": 0.022185710092252255 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.0302363899421731, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.0302363899421731 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.20754716981132076, + "acc_stderr": 0.024959918028911274, + "acc_norm": 0.20754716981132076, + "acc_norm_stderr": 0.024959918028911274 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.04309118709946459, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.04309118709946459 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.02992941540834838, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.02992941540834838 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641143, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641143 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21164021164021163, + "acc_stderr": 0.02103733150526289, + "acc_norm": 0.21164021164021163, + "acc_norm_stderr": 0.02103733150526289 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071134, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071134 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.0246596851859673, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.0246596851859673 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102149, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102149 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.025058503316958164, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.025058503316958164 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.23140495867768596, + "acc_stderr": 0.03849856098794087, + "acc_norm": 0.23140495867768596, + "acc_norm_stderr": 0.03849856098794087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17105263157894737, + "acc_stderr": 0.030643607071677105, + "acc_norm": 0.17105263157894737, + "acc_norm_stderr": 0.030643607071677105 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.017630827375148383, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.017630827375148383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880585, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880585 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285714, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.02876511171804694, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.02876511171804694 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.01502408388332288, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.01502408388332288 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.1836734693877551, + "acc_stderr": 0.024789071332007636, + "acc_norm": 0.1836734693877551, + "acc_norm_stderr": 0.024789071332007636 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.02944377302259469, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.02944377302259469 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2516297262059974, + "acc_stderr": 0.011083276280441902, + "acc_norm": 0.2516297262059974, + "acc_norm_stderr": 0.011083276280441902 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501954, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501954 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.01522589934082683, + "mc2": 0.4063966962881522, + "mc2_stderr": 0.01492795604718442 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.34946871310507677, + "acc_stderr": 0.016392797085769843, + "acc_norm": 0.4592680047225502, + "acc_norm_stderr": 0.017133218276537673 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-instruct-SFT-1.3B-v2.1.1", + "model_sha": "4d434f21f7343f698e1d175cf9e740a0238c9cb9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-instruct-SFT-1.3B-v2.1/result_2024-02-26 23:32:31.json b/AIFT/AIFT-instruct-SFT-1.3B-v2.1/result_2024-02-26 23:32:31.json new file mode 100644 index 0000000000000000000000000000000000000000..ee20a9551d828b190afe0d321910cb97d0bae807 --- /dev/null +++ b/AIFT/AIFT-instruct-SFT-1.3B-v2.1/result_2024-02-26 23:32:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2696245733788396, + "acc_stderr": 0.01296804068686917, + "acc_norm": 0.3370307167235495, + "acc_norm_stderr": 0.013813476652902276 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35680143397729536, + "acc_stderr": 0.004780764443411318, + "acc_norm": 0.44542919737104164, + "acc_norm_stderr": 0.0049599735147725105 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393161, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393161 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260597, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260597 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24265644955300128, + "acc_stderr": 0.01532988894089986, + "acc_norm": 0.24265644955300128, + "acc_norm_stderr": 0.01532988894089986 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3021276595744681, + "acc_stderr": 0.030017554471880554, + "acc_norm": 0.3021276595744681, + "acc_norm_stderr": 0.030017554471880554 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2540192926045016, + "acc_stderr": 0.02472386150477169, + "acc_norm": 0.2540192926045016, + "acc_norm_stderr": 0.02472386150477169 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21524663677130046, + "acc_stderr": 0.027584066602208256, + "acc_norm": 0.21524663677130046, + "acc_norm_stderr": 0.027584066602208256 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.20610687022900764, + "acc_stderr": 0.03547771004159464, + "acc_norm": 0.20610687022900764, + "acc_norm_stderr": 0.03547771004159464 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.18686868686868688, + "acc_stderr": 0.027772533334218977, + "acc_norm": 0.18686868686868688, + "acc_norm_stderr": 0.027772533334218977 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.02788682807838057, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.02788682807838057 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2641025641025641, + "acc_stderr": 0.02235219373745326, + "acc_norm": 0.2641025641025641, + "acc_norm_stderr": 0.02235219373745326 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.16748768472906403, + "acc_stderr": 0.026273086047535414, + "acc_norm": 0.16748768472906403, + "acc_norm_stderr": 0.026273086047535414 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.0253781399708852, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.0253781399708852 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.029343114798094455, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.029343114798094455 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23018867924528302, + "acc_stderr": 0.02590789712240817, + "acc_norm": 0.23018867924528302, + "acc_norm_stderr": 0.02590789712240817 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04265792110940589, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04265792110940589 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.029929415408348377, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.029929415408348377 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.03156809362703173, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.03156809362703173 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.0222896388526179, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.0222896388526179 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.0329109957861577, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.0329109957861577 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.024922001168886335, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.024922001168886335 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768081, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768081 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20207253886010362, + "acc_stderr": 0.02897908979429673, + "acc_norm": 0.20207253886010362, + "acc_norm_stderr": 0.02897908979429673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21284403669724772, + "acc_stderr": 0.017549376389313694, + "acc_norm": 0.21284403669724772, + "acc_norm_stderr": 0.017549376389313694 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.03395490020856111, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.03395490020856111 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023805186524888142, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023805186524888142 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2809917355371901, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882924, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.017322789207784326, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.017322789207784326 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.024847921358063962, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.024847921358063962 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952688, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952688 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.16, + "acc_stderr": 0.036845294917747094, + "acc_norm": 0.16, + "acc_norm_stderr": 0.036845294917747094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.027257202606114944, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.027257202606114944 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.02500025603954621, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.02500025603954621 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.34177215189873417, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.34177215189873417, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23598435462842243, + "acc_stderr": 0.01084480266966268, + "acc_norm": 0.23598435462842243, + "acc_norm_stderr": 0.01084480266966268 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.029331162294251728, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.029331162294251728 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015008, + "mc2": 0.4198042558596364, + "mc2_stderr": 0.0150312470035071 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33293978748524206, + "acc_stderr": 0.016202431208373776, + "acc_norm": 0.45218417945690675, + "acc_norm_stderr": 0.017111567130916782 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-instruct-SFT-1.3B-v2.1", + "model_sha": "0e5b001601e4f2131e800a6a696d1d71469d7356", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-instruct-SFT-dpo-1.3B-v1.1/result_2024-02-22 23:51:45.json b/AIFT/AIFT-instruct-SFT-dpo-1.3B-v1.1/result_2024-02-22 23:51:45.json new file mode 100644 index 0000000000000000000000000000000000000000..fb26e5ff7e10942ebc28b65e6555ef9debd5c526 --- /dev/null +++ b/AIFT/AIFT-instruct-SFT-dpo-1.3B-v1.1/result_2024-02-22 23:51:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27474402730375425, + "acc_stderr": 0.013044617212771227, + "acc_norm": 0.3412969283276451, + "acc_norm_stderr": 0.013855831287497723 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36128261302529374, + "acc_stderr": 0.004793904922401889, + "acc_norm": 0.4475204142601075, + "acc_norm_stderr": 0.0049622205125483595 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457922, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457922 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24010217113665389, + "acc_stderr": 0.015274685213734193, + "acc_norm": 0.24010217113665389, + "acc_norm_stderr": 0.015274685213734193 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.035914440841969694, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.035914440841969694 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.02977164271249123, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.02977164271249123 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288085, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288085 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26366559485530544, + "acc_stderr": 0.02502553850053234, + "acc_norm": 0.26366559485530544, + "acc_norm_stderr": 0.02502553850053234 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2914798206278027, + "acc_stderr": 0.030500283176545913, + "acc_norm": 0.2914798206278027, + "acc_norm_stderr": 0.030500283176545913 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2184873949579832, + "acc_stderr": 0.02684151432295893, + "acc_norm": 0.2184873949579832, + "acc_norm_stderr": 0.02684151432295893 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2153846153846154, + "acc_stderr": 0.020843034557462878, + "acc_norm": 0.2153846153846154, + "acc_norm_stderr": 0.020843034557462878 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.02967833314144446, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.02967833314144446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2161290322580645, + "acc_stderr": 0.02341529343356853, + "acc_norm": 0.2161290322580645, + "acc_norm_stderr": 0.02341529343356853 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.0281209665039144, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.0281209665039144 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891373, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891373 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.033367670865679766, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.033367670865679766 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.03096590312357304, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.03096590312357304 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.03095289021774988, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.03095289021774988 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2328042328042328, + "acc_stderr": 0.02176596167215454, + "acc_norm": 0.2328042328042328, + "acc_norm_stderr": 0.02176596167215454 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071134, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071134 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.03291099578615771, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.03291099578615771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.024477222856135114, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.024477222856135114 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20207253886010362, + "acc_stderr": 0.02897908979429673, + "acc_norm": 0.20207253886010362, + "acc_norm_stderr": 0.02897908979429673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.03775205013583638, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.03775205013583638 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1981651376146789, + "acc_stderr": 0.01709057380421789, + "acc_norm": 0.1981651376146789, + "acc_norm_stderr": 0.01709057380421789 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.034550710191021496, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.034550710191021496 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.024051029739912255, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.024051029739912255 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.017555818091322267, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.017555818091322267 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729906, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729906 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.02934666509437294, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.02934666509437294 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.014487500852850426, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.014487500852850426 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.02315746830855935, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.02315746830855935 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19183673469387755, + "acc_stderr": 0.025206963154225378, + "acc_norm": 0.19183673469387755, + "acc_norm_stderr": 0.025206963154225378 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598046, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598046 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676651, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676651 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816525, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816525 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148123, + "mc2": 0.4130446713954393, + "mc2_stderr": 0.014977317476214325 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30932703659976385, + "acc_stderr": 0.015891320505520886, + "acc_norm": 0.41204250295159384, + "acc_norm_stderr": 0.01692227673852836 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-instruct-SFT-dpo-1.3B-v1.1", + "model_sha": "56d98539706359a035a379ae5461cb34620ab5b2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-instruct-dpo-v1.3-42dot_LLM-SFT-1.3B/result_2024-02-01 03:31:14.json b/AIFT/AIFT-instruct-dpo-v1.3-42dot_LLM-SFT-1.3B/result_2024-02-01 03:31:14.json new file mode 100644 index 0000000000000000000000000000000000000000..0de5d7726ae4a49024b00f41a95a3f29be957194 --- /dev/null +++ b/AIFT/AIFT-instruct-dpo-v1.3-42dot_LLM-SFT-1.3B/result_2024-02-01 03:31:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2901023890784983, + "acc_stderr": 0.013261573677520776, + "acc_norm": 0.3361774744027304, + "acc_norm_stderr": 0.013804855026205756 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3567018522206732, + "acc_stderr": 0.004780467270911769, + "acc_norm": 0.4511053574985063, + "acc_norm_stderr": 0.004965866098318165 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690876, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690876 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23116219667943805, + "acc_stderr": 0.015075523238101081, + "acc_norm": 0.23116219667943805, + "acc_norm_stderr": 0.015075523238101081 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.02732107841738753, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.02732107841738753 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288085, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288085 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2765273311897106, + "acc_stderr": 0.025403832978179615, + "acc_norm": 0.2765273311897106, + "acc_norm_stderr": 0.025403832978179615 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2645739910313901, + "acc_stderr": 0.029605103217038336, + "acc_norm": 0.2645739910313901, + "acc_norm_stderr": 0.029605103217038336 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082395, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082395 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24789915966386555, + "acc_stderr": 0.028047967224176896, + "acc_norm": 0.24789915966386555, + "acc_norm_stderr": 0.028047967224176896 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.022282141204204426, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.022282141204204426 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.043300437496507416, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.043300437496507416 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.16748768472906403, + "acc_stderr": 0.026273086047535407, + "acc_norm": 0.16748768472906403, + "acc_norm_stderr": 0.026273086047535407 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.025560604721022884, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.025560604721022884 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.29914529914529914, + "acc_stderr": 0.029996951858349497, + "acc_norm": 0.29914529914529914, + "acc_norm_stderr": 0.029996951858349497 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21132075471698114, + "acc_stderr": 0.025125766484827852, + "acc_norm": 0.21132075471698114, + "acc_norm_stderr": 0.025125766484827852 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910507, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910507 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275805, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275805 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1907514450867052, + "acc_stderr": 0.029957851329869334, + "acc_norm": 0.1907514450867052, + "acc_norm_stderr": 0.029957851329869334 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.03396116205845333, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.03396116205845333 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.02289408248992599, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.02289408248992599 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.024836057868294677, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.024836057868294677 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21467889908256882, + "acc_stderr": 0.017604304149256483, + "acc_norm": 0.21467889908256882, + "acc_norm_stderr": 0.017604304149256483 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906045, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906045 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.024630048979824768, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.024630048979824768 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3140495867768595, + "acc_stderr": 0.04236964753041019, + "acc_norm": 0.3140495867768595, + "acc_norm_stderr": 0.04236964753041019 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810535, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810535 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460987, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460987 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025445, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025445 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.025187786660227255, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.025187786660227255 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.025991117672813296, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.025991117672813296 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3206751054852321, + "acc_stderr": 0.030381931949990407, + "acc_norm": 0.3206751054852321, + "acc_norm_stderr": 0.030381931949990407 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2627118644067797, + "acc_stderr": 0.011240545514995676, + "acc_norm": 0.2627118644067797, + "acc_norm_stderr": 0.011240545514995676 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834557, + "mc2": 0.4175867544708941, + "mc2_stderr": 0.014944385749223169 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26564344746162927, + "acc_stderr": 0.015185107107791253, + "acc_norm": 0.39787485242030696, + "acc_norm_stderr": 0.016827959054733388 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-instruct-dpo-v1.3-42dot_LLM-SFT-1.3B", + "model_sha": "aa6cc70a444c3c9f933ceefec4e84add872a37b1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-instruct-v1.3-42dot_LLM-SFT-1.3B/result_2024-02-01 00:38:50.json b/AIFT/AIFT-instruct-v1.3-42dot_LLM-SFT-1.3B/result_2024-02-01 00:38:50.json new file mode 100644 index 0000000000000000000000000000000000000000..bd6603a3915f7fd2219182b3c5841dea11475783 --- /dev/null +++ b/AIFT/AIFT-instruct-v1.3-42dot_LLM-SFT-1.3B/result_2024-02-01 00:38:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.013203196088537364, + "acc_norm": 0.3387372013651877, + "acc_norm_stderr": 0.01383056892797433 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35391356303525195, + "acc_stderr": 0.0047720549044044346, + "acc_norm": 0.450408285202151, + "acc_norm_stderr": 0.004965177633049922 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245231, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245231 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690876, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690876 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.22988505747126436, + "acc_stderr": 0.01504630184669182, + "acc_norm": 0.22988505747126436, + "acc_norm_stderr": 0.01504630184669182 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.03633384414073465, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.03633384414073465 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.02802022627120022, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.02802022627120022 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2645739910313901, + "acc_stderr": 0.029605103217038336, + "acc_norm": 0.2645739910313901, + "acc_norm_stderr": 0.029605103217038336 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082395, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082395 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24789915966386555, + "acc_stderr": 0.028047967224176896, + "acc_norm": 0.24789915966386555, + "acc_norm_stderr": 0.028047967224176896 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.022282141204204426, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.022282141204204426 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.043300437496507416, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.043300437496507416 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15763546798029557, + "acc_stderr": 0.025639014131172408, + "acc_norm": 0.15763546798029557, + "acc_norm_stderr": 0.025639014131172408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.02564938106302926, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.02564938106302926 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2863247863247863, + "acc_stderr": 0.029614323690456645, + "acc_norm": 0.2863247863247863, + "acc_norm_stderr": 0.029614323690456645 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.026335739404055803, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.026335739404055803 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.19653179190751446, + "acc_stderr": 0.030299574664788147, + "acc_norm": 0.19653179190751446, + "acc_norm_stderr": 0.030299574664788147 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776578, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776578 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.03396116205845333, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.03396116205845333 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.02289408248992599, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.02289408248992599 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.024836057868294677, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.024836057868294677 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.029252823291803624, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.029252823291803624 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21284403669724772, + "acc_stderr": 0.017549376389313694, + "acc_norm": 0.21284403669724772, + "acc_norm_stderr": 0.017549376389313694 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906045, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906045 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.024739981355113592, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.024739981355113592 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.041733491480834994, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.041733491480834994 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810535, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810535 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.01740181671142765, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.01740181671142765 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460976, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460976 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.03128039084329881, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.03128039084329881 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2426470588235294, + "acc_stderr": 0.02604066247420125, + "acc_norm": 0.2426470588235294, + "acc_norm_stderr": 0.02604066247420125 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.025991117672813296, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.025991117672813296 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2633637548891786, + "acc_stderr": 0.011249506403605284, + "acc_norm": 0.2633637548891786, + "acc_norm_stderr": 0.011249506403605284 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604243, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604243 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.035014387062967806, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.035014387062967806 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522505, + "mc2": 0.41669987176454626, + "mc2_stderr": 0.014909839390920814 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.269185360094451, + "acc_stderr": 0.015249098024144531, + "acc_norm": 0.39787485242030696, + "acc_norm_stderr": 0.016827959054733388 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-instruct-v1.3-42dot_LLM-SFT-1.3B", + "model_sha": "a8842b6e8bb0bc14dd1871b5b892ee75be4b88d4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-instruct-v1.6-42dot_LLM-SFT-1.3B/result_2024-02-07 07:05:15.json b/AIFT/AIFT-instruct-v1.6-42dot_LLM-SFT-1.3B/result_2024-02-07 07:05:15.json new file mode 100644 index 0000000000000000000000000000000000000000..e18ba115d8a47c45c88c6f44b0fe68312a5c612f --- /dev/null +++ b/AIFT/AIFT-instruct-v1.6-42dot_LLM-SFT-1.3B/result_2024-02-07 07:05:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2713310580204778, + "acc_stderr": 0.012993807727545778, + "acc_norm": 0.33532423208191126, + "acc_norm_stderr": 0.013796182947785568 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35620394343756223, + "acc_stderr": 0.004778978031389643, + "acc_norm": 0.4493128858793069, + "acc_norm_stderr": 0.0049640758701203404 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23243933588761176, + "acc_stderr": 0.015104550008905704, + "acc_norm": 0.23243933588761176, + "acc_norm_stderr": 0.015104550008905704 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.1925925925925926, + "acc_stderr": 0.03406542058502651, + "acc_norm": 0.1925925925925926, + "acc_norm_stderr": 0.03406542058502651 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.251063829787234, + "acc_stderr": 0.028346963777162462, + "acc_norm": 0.251063829787234, + "acc_norm_stderr": 0.028346963777162462 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.03711725190740748, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.03711725190740748 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.27009646302250806, + "acc_stderr": 0.02521804037341062, + "acc_norm": 0.27009646302250806, + "acc_norm_stderr": 0.02521804037341062 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.030898610882477515, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.030898610882477515 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.026265024608275882, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.026265024608275882 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21025641025641026, + "acc_stderr": 0.020660597485026924, + "acc_norm": 0.21025641025641026, + "acc_norm_stderr": 0.020660597485026924 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.042365112580946336, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.042365112580946336 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.16748768472906403, + "acc_stderr": 0.026273086047535414, + "acc_norm": 0.16748768472906403, + "acc_norm_stderr": 0.026273086047535414 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2064516129032258, + "acc_stderr": 0.023025899617188712, + "acc_norm": 0.2064516129032258, + "acc_norm_stderr": 0.023025899617188712 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.26495726495726496, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.26495726495726496, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108614, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108614 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721375, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721375 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823018, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.033367670865679766, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.033367670865679766 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.03156809362703174, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.03156809362703174 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.02084229093011466, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.02084229093011466 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566018, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566018 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.023176298203992012, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.023176298203992012 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24382716049382716, + "acc_stderr": 0.0238918795419596, + "acc_norm": 0.24382716049382716, + "acc_norm_stderr": 0.0238918795419596 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.18652849740932642, + "acc_stderr": 0.02811209121011746, + "acc_norm": 0.18652849740932642, + "acc_norm_stderr": 0.02811209121011746 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1908256880733945, + "acc_stderr": 0.016847676400091105, + "acc_norm": 0.1908256880733945, + "acc_norm_stderr": 0.016847676400091105 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102149, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102149 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.024170840879341016, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.024170840879341016 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24673202614379086, + "acc_stderr": 0.0174408203674025, + "acc_norm": 0.24673202614379086, + "acc_norm_stderr": 0.0174408203674025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953776, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953776 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952685, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952685 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.02576725201085596, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.02576725201085596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.1836734693877551, + "acc_stderr": 0.02478907133200765, + "acc_norm": 0.1836734693877551, + "acc_norm_stderr": 0.02478907133200765 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3037974683544304, + "acc_stderr": 0.029936696387138608, + "acc_norm": 0.3037974683544304, + "acc_norm_stderr": 0.029936696387138608 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23272490221642764, + "acc_stderr": 0.010792595553888472, + "acc_norm": 0.23272490221642764, + "acc_norm_stderr": 0.010792595553888472 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.02977177522814563, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.02977177522814563 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766368, + "mc2": 0.4130821616166691, + "mc2_stderr": 0.015021400614528406 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27036599763872493, + "acc_stderr": 0.015270152942068413, + "acc_norm": 0.3730814639905549, + "acc_norm_stderr": 0.016627318275137422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-instruct-v1.6-42dot_LLM-SFT-1.3B", + "model_sha": "5635442a973d21711e330a7b0ba95bd2ca0eaa3d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-refine-v1.2/result_2024-01-23 00:42:27.json b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-refine-v1.2/result_2024-01-23 00:42:27.json new file mode 100644 index 0000000000000000000000000000000000000000..e41c3d10e14bb26174f22df2618c162372134a21 --- /dev/null +++ b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-refine-v1.2/result_2024-01-23 00:42:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3438566552901024, + "acc_stderr": 0.01388064457015621, + "acc_norm": 0.4206484641638225, + "acc_norm_stderr": 0.0144262112525084 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3999203345947023, + "acc_stderr": 0.00488880500310307, + "acc_norm": 0.527185819557857, + "acc_norm_stderr": 0.004982400368939666 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5504469987228607, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.5504469987228607, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04316378599511324, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04316378599511324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079021, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079021 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03427308652999935, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03427308652999935 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.032284106267163895, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.032284106267163895 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942652, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942652 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075657, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075657 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731571, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731571 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255168, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255168 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887249, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887249 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.02441923496681906, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.02441923496681906 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5651376146788991, + "acc_stderr": 0.021254631465609273, + "acc_norm": 0.5651376146788991, + "acc_norm_stderr": 0.021254631465609273 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883034, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883034 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.019997973035458333, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.019997973035458333 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534778, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534778 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.043642261558410445, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.043642261558410445 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353603, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353603 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.03190080389473235, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.03190080389473235 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3324641460234681, + "acc_stderr": 0.012032022332260518, + "acc_norm": 0.3324641460234681, + "acc_norm_stderr": 0.012032022332260518 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.593939393939394, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.593939393939394, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.01574402724825605, + "mc2": 0.4217458755495607, + "mc2_stderr": 0.014981565515176903 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5041322314049587, + "acc_stderr": 0.017189767032130814, + "acc_norm": 0.5749704840613932, + "acc_norm_stderr": 0.016996016308362887 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-ko-orca-plat-Yi-ko-6b-refine-v1.2", + "model_sha": "9002849cbf8c80a7f812a2d284e8c073f77d1d00", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.1-dpo/result_2024-01-22 08:54:55.json b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.1-dpo/result_2024-01-22 08:54:55.json new file mode 100644 index 0000000000000000000000000000000000000000..c8b210277b400e2b6c051282b02606a504cf8813 --- /dev/null +++ b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.1-dpo/result_2024-01-22 08:54:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3532423208191126, + "acc_stderr": 0.013967822714840055, + "acc_norm": 0.4206484641638225, + "acc_norm_stderr": 0.014426211252508397 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3968333001394145, + "acc_stderr": 0.0048824100299354415, + "acc_norm": 0.5318661621190998, + "acc_norm_stderr": 0.004979637330230312 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5440613026819924, + "acc_stderr": 0.017810403925435356, + "acc_norm": 0.5440613026819924, + "acc_norm_stderr": 0.017810403925435356 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.025339003010106522, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.025339003010106522 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.030351527323344948, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.030351527323344948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.02763490726417854, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.02763490726417854 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602842, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008746, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008746 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6, + "acc_stderr": 0.02100420126042008, + "acc_norm": 0.6, + "acc_norm_stderr": 0.02100420126042008 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061177, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.01485499393801008, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.01485499393801008 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687754, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687754 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.031722950043323296, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.031722950043323296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33116036505867014, + "acc_stderr": 0.01202012819598577, + "acc_norm": 0.33116036505867014, + "acc_norm_stderr": 0.01202012819598577 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.03495624522015476, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.03495624522015476 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070265, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070265 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.42521496532720787, + "mc2_stderr": 0.014980622040261423 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5726092089728453, + "acc_stderr": 0.017008129844823153, + "acc_norm": 0.6375442739079102, + "acc_norm_stderr": 0.016527131240453696 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.1-dpo", + "model_sha": "d580b50b1a7f8afd838340f8c27e0c5e9f48b7b5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.1/result_2024-01-22 08:32:16.json b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.1/result_2024-01-22 08:32:16.json new file mode 100644 index 0000000000000000000000000000000000000000..905b09b656355a3732e41f7580fab79ac0680402 --- /dev/null +++ b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.1/result_2024-01-22 08:32:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35580204778157, + "acc_stderr": 0.013990571137918758, + "acc_norm": 0.42150170648464164, + "acc_norm_stderr": 0.014430197069326023 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3963353913563035, + "acc_stderr": 0.004881359589148994, + "acc_norm": 0.5313682533359888, + "acc_norm_stderr": 0.004979952166595551 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5427841634738186, + "acc_stderr": 0.017814385238534437, + "acc_norm": 0.5427841634738186, + "acc_norm_stderr": 0.017814385238534437 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564584, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564584 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813322, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813322 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.03046365674734026, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.03046365674734026 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.02763490726417854, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.02763490726417854 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602842, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008746, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008746 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5963302752293578, + "acc_stderr": 0.021035704856574966, + "acc_norm": 0.5963302752293578, + "acc_norm_stderr": 0.021035704856574966 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.019835176484375376, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.019835176484375376 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372432, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372432 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3551020408163265, + "acc_stderr": 0.030635655150387638, + "acc_norm": 0.3551020408163265, + "acc_norm_stderr": 0.030635655150387638 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.031722950043323296, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.031722950043323296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.012014142101842982, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.012014142101842982 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.034924061041636124, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.034924061041636124 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070265, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070265 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394816, + "mc2": 0.42506050802099943, + "mc2_stderr": 0.01496896371952545 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5655253837072018, + "acc_stderr": 0.01704209862082493, + "acc_norm": 0.6292798110979929, + "acc_norm_stderr": 0.016605801289212616 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.1", + "model_sha": "5c8450e74c9a874b8ab69e28ce3cbaf4000207b2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2-dpo-2/result_2024-01-25 07:59:02.json b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2-dpo-2/result_2024-01-25 07:59:02.json new file mode 100644 index 0000000000000000000000000000000000000000..9d0b108b39a2e757247f7197fb1f71a7117f9c05 --- /dev/null +++ b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2-dpo-2/result_2024-01-25 07:59:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3438566552901024, + "acc_stderr": 0.01388064457015621, + "acc_norm": 0.38822525597269625, + "acc_norm_stderr": 0.014241614207414046 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39494124676359293, + "acc_stderr": 0.004878390226591714, + "acc_norm": 0.5264887472615017, + "acc_norm_stderr": 0.004982774293927772 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.048257293373563895, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.048257293373563895 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5134099616858238, + "acc_stderr": 0.017873531736510385, + "acc_norm": 0.5134099616858238, + "acc_norm_stderr": 0.017873531736510385 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621502, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621502 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051448, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051448 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.02804098138076155, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.02804098138076155 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618554, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618554 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731837, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731837 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230186, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230186 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.03525675167467974, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.03525675167467974 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463087, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463087 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583302, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583302 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5486238532110091, + "acc_stderr": 0.0213357147112688, + "acc_norm": 0.5486238532110091, + "acc_norm_stderr": 0.0213357147112688 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176647, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176647 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.01984828016840116, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.01984828016840116 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697623, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697623 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.03128039084329882, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.03128039084329882 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475342, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475342 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.028064998167040094, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.028064998167040094 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.032335327775334835, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.032335327775334835 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32333767926988266, + "acc_stderr": 0.011946565758447198, + "acc_norm": 0.32333767926988266, + "acc_norm_stderr": 0.011946565758447198 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.016058999026100626, + "mc2": 0.44231342468769663, + "mc2_stderr": 0.015175444883335621 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5938606847697757, + "acc_stderr": 0.016884749503191396, + "acc_norm": 0.6304604486422668, + "acc_norm_stderr": 0.016594883405685438 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2-dpo-2", + "model_sha": "59b2795fd27a846c4f26c6e71fb5fb72574d0ff9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2-dpo-3/result_2024-01-30 00:13:23.json b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2-dpo-3/result_2024-01-30 00:13:23.json new file mode 100644 index 0000000000000000000000000000000000000000..f2b9c67839c40c17a88100c97e1d8e1c680e2aa5 --- /dev/null +++ b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2-dpo-3/result_2024-01-30 00:13:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36006825938566556, + "acc_stderr": 0.01402751681458519, + "acc_norm": 0.4112627986348123, + "acc_norm_stderr": 0.01437944106852208 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3995220075682135, + "acc_stderr": 0.0048879912259502875, + "acc_norm": 0.5293766182035451, + "acc_norm_stderr": 0.004981161746388227 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5134099616858238, + "acc_stderr": 0.017873531736510385, + "acc_norm": 0.5134099616858238, + "acc_norm_stderr": 0.017873531736510385 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.04319223625811331, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.04319223625811331 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101736, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101736 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.02763490726417854, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.02763490726417854 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.03530235517334682, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.03530235517334682 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416908, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416908 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.023973861998992072, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.023973861998992072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873632, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873632 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5486238532110091, + "acc_stderr": 0.021335714711268796, + "acc_norm": 0.5486238532110091, + "acc_norm_stderr": 0.021335714711268796 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488795, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488795 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762637, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762637 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064352, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064352 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.0420327729146776, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.0420327729146776 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.030546745264953195, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.030546745264953195 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.293854748603352, + "acc_stderr": 0.01523507577671961, + "acc_norm": 0.293854748603352, + "acc_norm_stderr": 0.01523507577671961 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.028418208619406797, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.028418208619406797 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3510204081632653, + "acc_stderr": 0.030555316755573637, + "acc_norm": 0.3510204081632653, + "acc_norm_stderr": 0.030555316755573637 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3155149934810952, + "acc_stderr": 0.011869184843058636, + "acc_norm": 0.3155149934810952, + "acc_norm_stderr": 0.011869184843058636 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31946144430844553, + "mc1_stderr": 0.0163226441829605, + "mc2": 0.45470433345322675, + "mc2_stderr": 0.015415678576305275 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5926800472255017, + "acc_stderr": 0.01689245669519127, + "acc_norm": 0.6257378984651711, + "acc_norm_stderr": 0.016637917789798742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2-dpo-3", + "model_sha": "65ff17e2f574d64c727ff839dc37b04147752960", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2-dpo/result_2024-01-24 01:11:44.json b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2-dpo/result_2024-01-24 01:11:44.json new file mode 100644 index 0000000000000000000000000000000000000000..840d64ef555ed70cafed7bf9e6b78a02ab7f78e6 --- /dev/null +++ b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2-dpo/result_2024-01-24 01:11:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.013855831287497731, + "acc_norm": 0.3856655290102389, + "acc_norm_stderr": 0.014224250973257182 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3950408285202151, + "acc_stderr": 0.004878603699686037, + "acc_norm": 0.5263891655048795, + "acc_norm_stderr": 0.004982826916687145 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5146871008939975, + "acc_stderr": 0.01787224802442912, + "acc_norm": 0.5146871008939975, + "acc_norm_stderr": 0.01787224802442912 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.04319223625811331, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.04319223625811331 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621502, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621502 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051448, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051448 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.02804098138076155, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.02804098138076155 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817729, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817729 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463087, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463087 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548914, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548914 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5431192660550459, + "acc_stderr": 0.021357458785226227, + "acc_norm": 0.5431192660550459, + "acc_norm_stderr": 0.021357458785226227 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392868, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392868 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.02847293847803353, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.02847293847803353 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.019886221037501862, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.019886221037501862 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.02689170942834396, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.02689170942834396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.0420327729146776, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.0420327729146776 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289784, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289784 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095285, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095285 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.0323936001739747, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.0323936001739747 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3226857887874837, + "acc_stderr": 0.011940264193195988, + "acc_norm": 0.3226857887874837, + "acc_norm_stderr": 0.011940264193195988 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.015945068581236614, + "mc2": 0.4392108026993697, + "mc2_stderr": 0.015134967640839908 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5985832349468713, + "acc_stderr": 0.01685290785872906, + "acc_norm": 0.6340023612750886, + "acc_norm_stderr": 0.016561489664895714 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2-dpo", + "model_sha": "815ca795b9f43380b4161a7834835d744a7ceb2e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2/result_2024-01-24 00:57:59.json b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2/result_2024-01-24 00:57:59.json new file mode 100644 index 0000000000000000000000000000000000000000..3c74d45a85669e9a59c4efd0df8180e18b9f7c9f --- /dev/null +++ b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2/result_2024-01-24 00:57:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.01384746051889298, + "acc_norm": 0.3839590443686007, + "acc_norm_stderr": 0.01421244498065189 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39474208325034854, + "acc_stderr": 0.004877962644991872, + "acc_norm": 0.526090420235013, + "acc_norm_stderr": 0.004982983592459193 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5095785440613027, + "acc_stderr": 0.017876682275340863, + "acc_norm": 0.5095785440613027, + "acc_norm_stderr": 0.017876682275340863 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.04319223625811331, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.04319223625811331 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621502, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621502 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.02804098138076155, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.02804098138076155 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618554, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618554 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817729, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817729 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101813, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101813 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637793, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637793 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.038818912133343826, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.038818912133343826 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.544954128440367, + "acc_stderr": 0.021350503090925163, + "acc_norm": 0.544954128440367, + "acc_norm_stderr": 0.021350503090925163 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392868, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392868 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.019886221037501862, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.019886221037501862 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.02678917235114024, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.02678917235114024 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.0420327729146776, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.0420327729146776 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289784, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289784 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372434, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372434 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031215, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031215 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3226857887874837, + "acc_stderr": 0.01194026419319599, + "acc_norm": 0.3226857887874837, + "acc_norm_stderr": 0.01194026419319599 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589678, + "mc2": 0.4386680736460445, + "mc2_stderr": 0.015119862017632555 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5938606847697757, + "acc_stderr": 0.016884749503191396, + "acc_norm": 0.6269185360094451, + "acc_norm_stderr": 0.01662731827513746 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.2", + "model_sha": "c5f2709b5bf5d269064e542847db5d3a9cc93bc9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.5/result_2024-02-02 04:49:26.json b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.5/result_2024-02-02 04:49:26.json new file mode 100644 index 0000000000000000000000000000000000000000..94bef0def9435ae68113dbeae7227062b5299085 --- /dev/null +++ b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.5/result_2024-02-02 04:49:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34812286689419797, + "acc_stderr": 0.01392100859517935, + "acc_norm": 0.4112627986348123, + "acc_norm_stderr": 0.014379441068522077 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39245170284803826, + "acc_stderr": 0.0048729844929679975, + "acc_norm": 0.5243975303724357, + "acc_norm_stderr": 0.004983837641502894 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5491698595146871, + "acc_stderr": 0.017793297572699044, + "acc_norm": 0.5491698595146871, + "acc_norm_stderr": 0.017793297572699044 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.032363611119519416, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.032363611119519416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.024939313906940777, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.024939313906940777 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969566, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969566 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871916, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983063, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983063 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03981240543717862, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03981240543717862 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.026864624366756656, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.026864624366756656 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548914, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548914 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323653, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323653 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5706422018348624, + "acc_stderr": 0.021222286397236504, + "acc_norm": 0.5706422018348624, + "acc_norm_stderr": 0.021222286397236504 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238126, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.019977422600227467, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.019977422600227467 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.015024083883322884, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.015024083883322884 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335314, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.34285714285714286, + "acc_stderr": 0.03038726291954773, + "acc_norm": 0.34285714285714286, + "acc_norm_stderr": 0.03038726291954773 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.032007041833595914, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.032007041833595914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330368, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330368 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.4259753093204231, + "mc2_stderr": 0.014983532851791444 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5289256198347108, + "acc_stderr": 0.01716156394991635, + "acc_norm": 0.6080283353010626, + "acc_norm_stderr": 0.016784332119424084 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.5", + "model_sha": "136d3c543af246c9046e17d42c0a357316de8815", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.6/result_2024-02-06 02:51:38.json b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.6/result_2024-02-06 02:51:38.json new file mode 100644 index 0000000000000000000000000000000000000000..da6c01362cba5432e1de04043e9a1e127cc51cae --- /dev/null +++ b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.6/result_2024-02-06 02:51:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.013888816286782112, + "acc_norm": 0.3984641638225256, + "acc_norm_stderr": 0.014306946052735565 + }, + "harness|ko_hellaswag|10": { + "acc": 0.398725353515236, + "acc_stderr": 0.004886353563571851, + "acc_norm": 0.5272854013144792, + "acc_norm_stderr": 0.004982346155911131 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.017769250583533253, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.017769250583533253 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.03536085947529482, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.03536085947529482 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618554, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618554 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394318, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101813, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101813 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336936, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5486238532110091, + "acc_stderr": 0.02133571471126879, + "acc_norm": 0.5486238532110091, + "acc_norm_stderr": 0.02133571471126879 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238126, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791434, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.019910377463105935, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.019910377463105935 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257013, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.01440029642922561, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.01440029642922561 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398864, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398864 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.32653061224489793, + "acc_stderr": 0.030021056238440307, + "acc_norm": 0.32653061224489793, + "acc_norm_stderr": 0.030021056238440307 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31877444589308995, + "acc_stderr": 0.011901895635786095, + "acc_norm": 0.31877444589308995, + "acc_norm_stderr": 0.011901895635786095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.03843566993588717, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.03843566993588717 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502332, + "mc2": 0.4365731923271676, + "mc2_stderr": 0.015091476648832229 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5442739079102715, + "acc_stderr": 0.017122829143292658, + "acc_norm": 0.615112160566706, + "acc_norm_stderr": 0.01672857970149864 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.6", + "model_sha": "b0020e1098ac6f0562aea85ee0fc49064d3858ce", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.8/result_2024-03-01 06:53:02.json b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.8/result_2024-03-01 06:53:02.json new file mode 100644 index 0000000000000000000000000000000000000000..32a80bfd1510bde7f591cfad7d347f77a40e83a6 --- /dev/null +++ b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.8/result_2024-03-01 06:53:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35665529010238906, + "acc_stderr": 0.013998056902620192, + "acc_norm": 0.40187713310580203, + "acc_norm_stderr": 0.014327268614578278 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3957379008165704, + "acc_stderr": 0.004880092083408037, + "acc_norm": 0.5309699263095001, + "acc_norm_stderr": 0.004980200451851671 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.01776925058353325, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.01776925058353325 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977978, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977978 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5466237942122186, + "acc_stderr": 0.028274359854894245, + "acc_norm": 0.5466237942122186, + "acc_norm_stderr": 0.028274359854894245 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.032284106267163895, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.032284106267163895 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.030351527323344958, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.030351527323344958 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149138, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322004, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322004 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5724770642201835, + "acc_stderr": 0.02121091020430043, + "acc_norm": 0.5724770642201835, + "acc_norm_stderr": 0.02121091020430043 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.0200176292142131, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.0200176292142131 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.02755336616510137, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.02755336616510137 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353604, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353604 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.02922719246003203, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.02922719246003203 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.03106721126287248, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.03106721126287248 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.031722950043323296, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.031722950043323296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.011971507294982779, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.011971507294982779 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502342, + "mc2": 0.42702483854882867, + "mc2_stderr": 0.015004259698872787 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5360094451003542, + "acc_stderr": 0.017145715365486664, + "acc_norm": 0.602125147579693, + "acc_norm_stderr": 0.01682795905473339 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v1.8", + "model_sha": "061f243c89e813b852608a835754731687ee3dac", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v2.1/result_2024-02-29 08:59:21.json b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v2.1/result_2024-02-29 08:59:21.json new file mode 100644 index 0000000000000000000000000000000000000000..5be2dce377a96a42d11c2103543317aee071aa6c --- /dev/null +++ b/AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v2.1/result_2024-02-29 08:59:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3515358361774744, + "acc_stderr": 0.01395241369960094, + "acc_norm": 0.42235494880546076, + "acc_norm_stderr": 0.014434138713379974 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39603664608643696, + "acc_stderr": 0.0048807267879886354, + "acc_norm": 0.5270862378012349, + "acc_norm_stderr": 0.004982454383162067 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5542784163473818, + "acc_stderr": 0.01777429728247951, + "acc_norm": 0.5542784163473818, + "acc_norm_stderr": 0.01777429728247951 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840622, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.032284106267163895, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.032284106267163895 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177505, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177505 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.0332085274234831, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.0332085274234831 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568392, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568392 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.029996951858349483, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.029996951858349483 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.0370385119309952, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.0370385119309952 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.024594975128920938, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.024594975128920938 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836185, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836185 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958215, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958215 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5706422018348624, + "acc_stderr": 0.0212222863972365, + "acc_norm": 0.5706422018348624, + "acc_norm_stderr": 0.0212222863972365 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.02835895631342354, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.02835895631342354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.019997973035458333, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.019997973035458333 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.045479609997643757, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.045479609997643757 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.029886910547626974, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.029886910547626974 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.015024083883322874, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.015024083883322874 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031232, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031232 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235926, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235926 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32920469361147325, + "acc_stderr": 0.01200209166690231, + "acc_norm": 0.32920469361147325, + "acc_norm_stderr": 0.01200209166690231 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.035091433756067866, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.035091433756067866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.038254602783800246, + "acc_norm": 0.6, + "acc_norm_stderr": 0.038254602783800246 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.015945068581236614, + "mc2": 0.44148578223532337, + "mc2_stderr": 0.014972368530284016 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5903187721369539, + "acc_stderr": 0.01690756819221948, + "acc_norm": 0.6564344746162928, + "acc_norm_stderr": 0.016327334806429134 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/AIFT-ko-orca-plat-Yi-ko-6b-v2.1", + "model_sha": "11c64f8b31c82fc6f5ebefee3054e9cb5699181b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/PACK-13b-v1.0/result_2023-12-07 02:16:32.json b/AIFT/PACK-13b-v1.0/result_2023-12-07 02:16:32.json new file mode 100644 index 0000000000000000000000000000000000000000..0ee8c405b9da1af281d3a498af38cc4f44a04171 --- /dev/null +++ b/AIFT/PACK-13b-v1.0/result_2023-12-07 02:16:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3378839590443686, + "acc_stderr": 0.01382204792228351, + "acc_norm": 0.37542662116040953, + "acc_norm_stderr": 0.01415063143511173 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37044413463453496, + "acc_stderr": 0.004819367172685971, + "acc_norm": 0.4788886675960964, + "acc_norm_stderr": 0.004985331652408348 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49169859514687103, + "acc_stderr": 0.017877498991072008, + "acc_norm": 0.49169859514687103, + "acc_norm_stderr": 0.017877498991072008 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.02375292871211213, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.02375292871211213 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3950617283950617, + "acc_stderr": 0.02720111766692566, + "acc_norm": 0.3950617283950617, + "acc_norm_stderr": 0.02720111766692566 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3963302752293578, + "acc_stderr": 0.020971469947900525, + "acc_norm": 0.3963302752293578, + "acc_norm_stderr": 0.020971469947900525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.018824219512706207, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.018824219512706207 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534778, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534778 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29044117647058826, + "acc_stderr": 0.027576468622740505, + "acc_norm": 0.29044117647058826, + "acc_norm_stderr": 0.027576468622740505 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.031557828165561644, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.031557828165561644 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3200782268578879, + "acc_stderr": 0.011914791947638522, + "acc_norm": 0.3200782268578879, + "acc_norm_stderr": 0.011914791947638522 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.034107853389047184, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.034107853389047184 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087312, + "mc2": 0.4274629100267272, + "mc2_stderr": 0.015462888327553083 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3482880755608028, + "acc_stderr": 0.016379926739148044, + "acc_norm": 0.4132231404958678, + "acc_norm_stderr": 0.016929480234495232 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/PACK-13b-v1.0", + "model_sha": "27f7b1eb3d926034aa90feb9ebc31788182046dd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/PACK-13b-v1.1/result_2023-12-11 08:43:39.json b/AIFT/PACK-13b-v1.1/result_2023-12-11 08:43:39.json new file mode 100644 index 0000000000000000000000000000000000000000..17592aca2540f47dfceeb80a9c6e6f50e4b5bf12 --- /dev/null +++ b/AIFT/PACK-13b-v1.1/result_2023-12-11 08:43:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3361774744027304, + "acc_stderr": 0.013804855026205761, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.014194389086685261 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3724357697669787, + "acc_stderr": 0.004824655406075561, + "acc_norm": 0.48078072097191793, + "acc_norm_stderr": 0.004986093791041656 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4891443167305236, + "acc_stderr": 0.017875748840242414, + "acc_norm": 0.4891443167305236, + "acc_norm_stderr": 0.017875748840242414 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.037262143543224144, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.037262143543224144 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416827, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416827 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36153846153846153, + "acc_stderr": 0.02435958146539696, + "acc_norm": 0.36153846153846153, + "acc_norm_stderr": 0.02435958146539696 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.02827241018621491, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.02827241018621491 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3622641509433962, + "acc_stderr": 0.029582245128384296, + "acc_norm": 0.3622641509433962, + "acc_norm_stderr": 0.029582245128384296 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731571, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731571 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736412, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463087, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463087 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.027237415094592474, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.027237415094592474 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3963302752293578, + "acc_stderr": 0.02097146994790053, + "acc_norm": 0.3963302752293578, + "acc_norm_stderr": 0.02097146994790053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02791405551046802, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02791405551046802 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.01885008469646872, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.01885008469646872 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169945, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169945 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.03070137211151092, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.03070137211151092 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.027678468642144686, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.027678468642144686 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.031557828165561644, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.031557828165561644 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4936708860759494, + "acc_stderr": 0.032544620107678585, + "acc_norm": 0.4936708860759494, + "acc_norm_stderr": 0.032544620107678585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30182529335071706, + "acc_stderr": 0.01172435051810589, + "acc_norm": 0.30182529335071706, + "acc_norm_stderr": 0.01172435051810589 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.03402272044340704, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.03402272044340704 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087307, + "mc2": 0.42195295057052135, + "mc2_stderr": 0.015423294021851608 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3317591499409681, + "acc_stderr": 0.016187984642157312, + "acc_norm": 0.3955135773317591, + "acc_norm_stderr": 0.01681081590220604 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/PACK-13b-v1.1", + "model_sha": "a547563032d1b762d80a80959f9b00aefab44eb5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/aift-llama2-koen-instruct-dpo-v1.01/result_2023-12-14 06:44:33.json b/AIFT/aift-llama2-koen-instruct-dpo-v1.01/result_2023-12-14 06:44:33.json new file mode 100644 index 0000000000000000000000000000000000000000..60c7c942b96270582d72f0e5a659a36df87535f8 --- /dev/null +++ b/AIFT/aift-llama2-koen-instruct-dpo-v1.01/result_2023-12-14 06:44:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4069965870307167, + "acc_stderr": 0.014356399418009124, + "acc_norm": 0.46245733788395904, + "acc_norm_stderr": 0.01457014449507558 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4193387771360287, + "acc_stderr": 0.00492442401807367, + "acc_norm": 0.5768771161123282, + "acc_norm_stderr": 0.004930448527146668 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.01776925058353325, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.01776925058353325 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.04319223625811331, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.04319223625811331 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168862, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168862 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.032284106267163895, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.032284106267163895 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.024939313906940777, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.024939313906940777 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03088273697413865, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03088273697413865 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101806, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101806 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.026918645383239015, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.026918645383239015 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833935, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833935 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.02131133500970858, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.02131133500970858 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147124, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147124 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490437, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490437 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.019675808135281515, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.019675808135281515 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353604, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353604 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966346, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966346 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.02922719246003203, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.02922719246003203 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.030932858792789848, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.030932858792789848 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34028683181225555, + "acc_stderr": 0.012101217610223782, + "acc_norm": 0.34028683181225555, + "acc_norm_stderr": 0.012101217610223782 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.01510240479735965, + "mc2": 0.40156672902861484, + "mc2_stderr": 0.015311585666350696 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3754427390791027, + "acc_stderr": 0.01664841158951109, + "acc_norm": 0.38134592680047225, + "acc_norm_stderr": 0.016699301768828084 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/aift-llama2-koen-instruct-dpo-v1.01", + "model_sha": "1c1e407910427042580cb11ad8569567c769cf49", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/aift-llama2-koen-instruct-dpo-v1.02/result_2023-12-15 03:06:44.json b/AIFT/aift-llama2-koen-instruct-dpo-v1.02/result_2023-12-15 03:06:44.json new file mode 100644 index 0000000000000000000000000000000000000000..d45ae087804cadf720b98e0a4eae4d53910a5d8c --- /dev/null +++ b/AIFT/aift-llama2-koen-instruct-dpo-v1.02/result_2023-12-15 03:06:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4052901023890785, + "acc_stderr": 0.014346869060229327, + "acc_norm": 0.4658703071672355, + "acc_norm_stderr": 0.014577311315231097 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4192391953794065, + "acc_stderr": 0.004924261467934422, + "acc_norm": 0.5763792073292173, + "acc_norm_stderr": 0.004931219148182245 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.565772669220945, + "acc_stderr": 0.017724589389677785, + "acc_norm": 0.565772669220945, + "acc_norm_stderr": 0.017724589389677785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.032025630761017346, + "acc_norm": 0.4, + "acc_norm_stderr": 0.032025630761017346 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115476, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115476 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056127, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056127 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.03046365674734026, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.03046365674734026 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844086, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844086 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523857, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523857 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.02687408588351835, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.02687408588351835 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.0278074900442762, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.0278074900442762 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5798165137614679, + "acc_stderr": 0.021162420048273515, + "acc_norm": 0.5798165137614679, + "acc_norm_stderr": 0.021162420048273515 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.02852638345214263, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.02852638345214263 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.0404633688397825, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.0404633688397825 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024113, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024113 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2670391061452514, + "acc_stderr": 0.014796502622562546, + "acc_norm": 0.2670391061452514, + "acc_norm_stderr": 0.014796502622562546 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280055, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280055 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34028683181225555, + "acc_stderr": 0.01210121761022378, + "acc_norm": 0.34028683181225555, + "acc_norm_stderr": 0.01210121761022378 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.01494881267906214, + "mc2": 0.39165785820787247, + "mc2_stderr": 0.015096702357183963 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3577331759149941, + "acc_stderr": 0.016479808935749976, + "acc_norm": 0.36481700118063753, + "acc_norm_stderr": 0.01655014433704659 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/aift-llama2-koen-instruct-dpo-v1.02", + "model_sha": "9e9887d8579e1d19943d9d10f0d340620328c852", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/aift-llama2-koen-instruct-v1.0/result_2023-12-14 00:45:21.json b/AIFT/aift-llama2-koen-instruct-v1.0/result_2023-12-14 00:45:21.json new file mode 100644 index 0000000000000000000000000000000000000000..7ddee52d0b12d08952b999bb8ef9a4801bd7f030 --- /dev/null +++ b/AIFT/aift-llama2-koen-instruct-v1.0/result_2023-12-14 00:45:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3771331058020478, + "acc_stderr": 0.0141633668961926, + "acc_norm": 0.4351535836177474, + "acc_norm_stderr": 0.01448798619718605 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4151563433578968, + "acc_stderr": 0.004917419367766031, + "acc_norm": 0.5669189404501095, + "acc_norm_stderr": 0.004944889545497955 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5491698595146871, + "acc_stderr": 0.017793297572699034, + "acc_norm": 0.5491698595146871, + "acc_norm_stderr": 0.017793297572699034 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115476, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115476 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.028333277109562804, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.028333277109562804 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.030365050829115208, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.030365050829115208 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.025348097468097856, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.025348097468097856 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.02345603738398202, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.02345603738398202 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.026907849856282532, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.026907849856282532 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5596330275229358, + "acc_stderr": 0.021284310623761536, + "acc_norm": 0.5596330275229358, + "acc_norm_stderr": 0.021284310623761536 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401147, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401147 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101376, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101376 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016643, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016643 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.012150699768228563, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.012150699768228563 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.39725650408832863, + "mc2_stderr": 0.01469261681765968 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4179456906729634, + "acc_stderr": 0.016957292005279713, + "acc_norm": 0.4817001180637544, + "acc_norm_stderr": 0.01717883663917776 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/aift-llama2-koen-instruct-v1.0", + "model_sha": "54a5a30188cba6af653f20df22ff393472f0e161", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/aift-llama2-koen-instruct-v1.1-dpo-test1/result_2023-12-18 03:42:48.json b/AIFT/aift-llama2-koen-instruct-v1.1-dpo-test1/result_2023-12-18 03:42:48.json new file mode 100644 index 0000000000000000000000000000000000000000..b1c83909ec7cc53c408c356a42a614b946e2e2e7 --- /dev/null +++ b/AIFT/aift-llama2-koen-instruct-v1.1-dpo-test1/result_2023-12-18 03:42:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3822525597269625, + "acc_stderr": 0.014200454049979279, + "acc_norm": 0.44112627986348124, + "acc_norm_stderr": 0.014509747749064664 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41943835889265085, + "acc_stderr": 0.004924586362301652, + "acc_norm": 0.5719976100378411, + "acc_norm_stderr": 0.004937779821908573 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.01776925058353325, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.01776925058353325 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.03526552724601198, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.03526552724601198 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.032422250271150074, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.032422250271150074 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.02489047176993815, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.02489047176993815 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5129032258064516, + "acc_stderr": 0.02843453315268187, + "acc_norm": 0.5129032258064516, + "acc_norm_stderr": 0.02843453315268187 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766107, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766107 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.0233306540545359, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.0233306540545359 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.041227287076512825, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.041227287076512825 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5651376146788991, + "acc_stderr": 0.021254631465609273, + "acc_norm": 0.5651376146788991, + "acc_norm_stderr": 0.021254631465609273 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392869, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392869 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.019780465954777515, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.019780465954777515 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101376, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101376 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.01459362092321074, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.01459362092321074 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254167, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254167 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.03175195237583322, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.03175195237583322 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744858, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.012150699768228563, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.012150699768228563 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367994, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367994 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2460220318237454, + "mc1_stderr": 0.015077219200662574, + "mc2": 0.40225459810500935, + "mc2_stderr": 0.01478258523910622 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38488783943329397, + "acc_stderr": 0.016728579701498665, + "acc_norm": 0.4427390791027155, + "acc_norm_stderr": 0.017077254131556217 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/aift-llama2-koen-instruct-v1.1-dpo-test1", + "model_sha": "a416328b862669edfe25be6c305bc9f5ccc4d727", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/aift-llama2-koen-instruct-v1.1/result_2023-12-15 09:57:09.json b/AIFT/aift-llama2-koen-instruct-v1.1/result_2023-12-15 09:57:09.json new file mode 100644 index 0000000000000000000000000000000000000000..4937eda26f7b6aedf7bf650410de8b0f1b3f82ff --- /dev/null +++ b/AIFT/aift-llama2-koen-instruct-v1.1/result_2023-12-15 09:57:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3822525597269625, + "acc_stderr": 0.014200454049979279, + "acc_norm": 0.4402730375426621, + "acc_norm_stderr": 0.014506769524804243 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41894045010953995, + "acc_stderr": 0.004923772581848503, + "acc_norm": 0.5714997012547302, + "acc_norm_stderr": 0.004938500303990289 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.01776925058353325, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.01776925058353325 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564584, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564584 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.03526552724601198, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.03526552724601198 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.040233822736177476, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.040233822736177476 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.032422250271150074, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.032422250271150074 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767762, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767762 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5129032258064516, + "acc_stderr": 0.02843453315268187, + "acc_norm": 0.5129032258064516, + "acc_norm_stderr": 0.02843453315268187 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768817, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768817 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275798, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275798 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.037242495958177295, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.037242495958177295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.0278074900442762, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.0278074900442762 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5614678899082569, + "acc_stderr": 0.021274713073954562, + "acc_norm": 0.5614678899082569, + "acc_norm_stderr": 0.021274713073954562 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292535, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292535 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.01984828016840115, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.01984828016840115 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534778, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534778 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802749, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.01465578083749773, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.01465578083749773 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335314, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.03175195237583322, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.03175195237583322 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744858, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34810951760104303, + "acc_stderr": 0.0121667389936982, + "acc_norm": 0.34810951760104303, + "acc_norm_stderr": 0.0121667389936982 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367994, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367994 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.4028203603366851, + "mc2_stderr": 0.01477432836961688 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38961038961038963, + "acc_stderr": 0.0167661616718935, + "acc_norm": 0.45218417945690675, + "acc_norm_stderr": 0.017111567130916782 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/aift-llama2-koen-instruct-v1.1", + "model_sha": "202c3e3df0c4a321503df8d4c78da213f1ae5475", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIFT/aift-llama2-koen-instruct-v1.2/result_2023-12-16 03:29:50.json b/AIFT/aift-llama2-koen-instruct-v1.2/result_2023-12-16 03:29:50.json new file mode 100644 index 0000000000000000000000000000000000000000..2c1ce2534191f83e655a9ab08f839116114ce27e --- /dev/null +++ b/AIFT/aift-llama2-koen-instruct-v1.2/result_2023-12-16 03:29:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38310580204778155, + "acc_stderr": 0.01420647266167288, + "acc_norm": 0.4453924914675768, + "acc_norm_stderr": 0.014523987638344074 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42113124875522806, + "acc_stderr": 0.004927314729433555, + "acc_norm": 0.578370842461661, + "acc_norm_stderr": 0.004928105880776078 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.038237270928823064, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.038237270928823064 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5644955300127714, + "acc_stderr": 0.017730589927926588, + "acc_norm": 0.5644955300127714, + "acc_norm_stderr": 0.017730589927926588 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400352, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400352 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.028386198084177687, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.028386198084177687 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.0311669573672359, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.0311669573672359 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.024720713193952148, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.024720713193952148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.041227287076512825, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.041227287076512825 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.038818912133343826, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.038818912133343826 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5688073394495413, + "acc_stderr": 0.02123336503031956, + "acc_norm": 0.5688073394495413, + "acc_norm_stderr": 0.02123336503031956 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292535, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292535 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.019643801557924806, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.019643801557924806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2681564245810056, + "acc_stderr": 0.014816119635317, + "acc_norm": 0.2681564245810056, + "acc_norm_stderr": 0.014816119635317 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396587, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35658409387222945, + "acc_stderr": 0.012233642989273886, + "acc_norm": 0.35658409387222945, + "acc_norm_stderr": 0.012233642989273886 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.015201522246299946, + "mc2": 0.4052899642454083, + "mc2_stderr": 0.014924042516908636 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38488783943329397, + "acc_stderr": 0.016728579701498672, + "acc_norm": 0.4510035419126328, + "acc_norm_stderr": 0.017107618859549357 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIFT/aift-llama2-koen-instruct-v1.2", + "model_sha": "95f3e7cce5bebe90ac4ff8f07597be444e7e1a9e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIJUUD/juud-Mistral-7B-dpo/result_2024-02-08 07:38:59.json b/AIJUUD/juud-Mistral-7B-dpo/result_2024-02-08 07:38:59.json new file mode 100644 index 0000000000000000000000000000000000000000..8cad65882871b453e4ba2f1f87695e8024f7e223 --- /dev/null +++ b/AIJUUD/juud-Mistral-7B-dpo/result_2024-02-08 07:38:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145685, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.014252959848892896 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3835889265086636, + "acc_stderr": 0.004852658876775384, + "acc_norm": 0.4939255128460466, + "acc_norm_stderr": 0.0049894131580347995 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47126436781609193, + "acc_stderr": 0.01785041079438017, + "acc_norm": 0.47126436781609193, + "acc_norm_stderr": 0.01785041079438017 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840667, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840667 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.035476014940069384, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.035476014940069384 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.02948036054954119, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.02948036054954119 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.03028500925900981, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.03028500925900981 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184406, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184406 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.025487187147859372, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.025487187147859372 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.026907849856282532, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.026907849856282532 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.02780749004427619, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.02780749004427619 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.02141822475426465, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.02141822475426465 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.04343525428949098, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.04343525428949098 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412236, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412236 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997865, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997865 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.1877094972067039, + "acc_stderr": 0.013059605303257046, + "acc_norm": 0.1877094972067039, + "acc_norm_stderr": 0.013059605303257046 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.028888193103988633, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.028888193103988633 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.012150699768228575, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.012150699768228575 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386854, + "mc2": 0.4935008531923264, + "mc2_stderr": 0.016152132937777407 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45454545454545453, + "acc_stderr": 0.0171191722080615, + "acc_norm": 0.48406139315230223, + "acc_norm_stderr": 0.017181617837190195 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIJUUD/juud-Mistral-7B-dpo", + "model_sha": "b428f21995854f143b497a36d210276439ae0b87", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIdenU/LLAMA-2-13b-ko-Y24-DPO_v0.1/result_2023-12-18 01:34:17.json b/AIdenU/LLAMA-2-13b-ko-Y24-DPO_v0.1/result_2023-12-18 01:34:17.json new file mode 100644 index 0000000000000000000000000000000000000000..b9dee46b7c30f941ea2f8e573cc4809803eabe0d --- /dev/null +++ b/AIdenU/LLAMA-2-13b-ko-Y24-DPO_v0.1/result_2023-12-18 01:34:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.013621696119173297, + "acc_norm": 0.3779863481228669, + "acc_norm_stderr": 0.014169664520303103 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36446922923720376, + "acc_stderr": 0.004802974070507201, + "acc_norm": 0.46883091017725553, + "acc_norm_stderr": 0.004980076707392429 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.03786720706234215, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.03786720706234215 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4891443167305236, + "acc_stderr": 0.01787574884024242, + "acc_norm": 0.4891443167305236, + "acc_norm_stderr": 0.01787574884024242 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956278, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956278 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416828, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416828 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.033959703819985754, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.033959703819985754 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.02837228779796295, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.02837228779796295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.031937057262002924, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.031937057262002924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.030242233800854498, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.030242233800854498 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276612, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276612 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02256989707491841, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02256989707491841 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4012345679012346, + "acc_stderr": 0.0272725828498398, + "acc_norm": 0.4012345679012346, + "acc_norm_stderr": 0.0272725828498398 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.0453781535493939, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.0453781535493939 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43302752293577984, + "acc_stderr": 0.021244146569074345, + "acc_norm": 0.43302752293577984, + "acc_norm_stderr": 0.021244146569074345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423556, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423556 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.018875682938069436, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.018875682938069436 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.02746470844202212, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.02746470844202212 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.03154696285656628, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.03154696285656628 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335317, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.47257383966244726, + "acc_stderr": 0.032498227183013026, + "acc_norm": 0.47257383966244726, + "acc_norm_stderr": 0.032498227183013026 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30638852672750977, + "acc_stderr": 0.011773980329380731, + "acc_norm": 0.30638852672750977, + "acc_norm_stderr": 0.011773980329380731 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2460220318237454, + "mc1_stderr": 0.015077219200662568, + "mc2": 0.4153514851890886, + "mc2_stderr": 0.01500188114852866 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42266824085005905, + "acc_stderr": 0.0169835060795776, + "acc_norm": 0.5053128689492326, + "acc_norm_stderr": 0.01718938362722971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIdenU/LLAMA-2-13b-ko-Y24-DPO_v0.1", + "model_sha": "10c4f59aa0a45a331f9a3288f05daa29d9dc79df", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIdenU/LLAMA-2-13b-ko-Y24-DPO_v2.0/result_2024-02-19 04:03:59.json b/AIdenU/LLAMA-2-13b-ko-Y24-DPO_v2.0/result_2024-02-19 04:03:59.json new file mode 100644 index 0000000000000000000000000000000000000000..372a36840e36a93484c5410698dffa8a4f8d4174 --- /dev/null +++ b/AIdenU/LLAMA-2-13b-ko-Y24-DPO_v2.0/result_2024-02-19 04:03:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33276450511945393, + "acc_stderr": 0.0137698630461923, + "acc_norm": 0.39334470989761094, + "acc_norm_stderr": 0.014275101465693028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36825333598884685, + "acc_stderr": 0.004813448615404438, + "acc_norm": 0.4763991236805417, + "acc_norm_stderr": 0.004984219681732663 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.03786720706234215, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.03786720706234215 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.42718446601941745, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.42718446601941745, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4929757343550447, + "acc_stderr": 0.017878199003432214, + "acc_norm": 0.4929757343550447, + "acc_norm_stderr": 0.017878199003432214 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956279, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956279 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03547601494006938, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03547601494006938 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.02432173848460237, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.02432173848460237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.03445406271987054, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.03445406271987054 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165582, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165582 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43641618497109824, + "acc_stderr": 0.026700545424943684, + "acc_norm": 0.43641618497109824, + "acc_norm_stderr": 0.026700545424943684 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456024, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456024 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379424, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379424 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42201834862385323, + "acc_stderr": 0.02117499140776317, + "acc_norm": 0.42201834862385323, + "acc_norm_stderr": 0.02117499140776317 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924316, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.028036092273891765, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.028036092273891765 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33169934640522875, + "acc_stderr": 0.01904748523936038, + "acc_norm": 0.33169934640522875, + "acc_norm_stderr": 0.01904748523936038 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509317, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.027146271936625162, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.027146271936625162 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3183673469387755, + "acc_stderr": 0.029822533793982052, + "acc_norm": 0.3183673469387755, + "acc_norm_stderr": 0.029822533793982052 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45147679324894513, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.45147679324894513, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2926988265971317, + "acc_stderr": 0.01162094919584953, + "acc_norm": 0.2926988265971317, + "acc_norm_stderr": 0.01162094919584953 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879076, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879076 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.4796326606446058, + "mc2_stderr": 0.015522483900178127 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46162927981109797, + "acc_stderr": 0.017139660221845564, + "acc_norm": 0.5608028335301063, + "acc_norm_stderr": 0.017062775744780705 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIdenU/LLAMA-2-13b-ko-Y24-DPO_v2.0", + "model_sha": "64a7cfb337e43c31e60f8a3710e55db2b76bc09a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIdenU/LLAMA-2-13b-ko-Y24-DPO_v2.1/result_2024-02-19 04:04:08.json b/AIdenU/LLAMA-2-13b-ko-Y24-DPO_v2.1/result_2024-02-19 04:04:08.json new file mode 100644 index 0000000000000000000000000000000000000000..7ce202713f52a41d7d17e845b908f9f682a78c38 --- /dev/null +++ b/AIdenU/LLAMA-2-13b-ko-Y24-DPO_v2.1/result_2024-02-19 04:04:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31399317406143346, + "acc_stderr": 0.013562691224726284, + "acc_norm": 0.378839590443686, + "acc_norm_stderr": 0.014175915490000322 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3615813582951603, + "acc_stderr": 0.00479476484368527, + "acc_norm": 0.4645488946425015, + "acc_norm_stderr": 0.004977223485342025 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529918, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529918 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.0484674825397724, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.0484674825397724 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5006385696040868, + "acc_stderr": 0.01787994891443169, + "acc_norm": 0.5006385696040868, + "acc_norm_stderr": 0.01787994891443169 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.042320736951515885, + "acc_norm": 0.4, + "acc_norm_stderr": 0.042320736951515885 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288087, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288087 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562793, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562793 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3991031390134529, + "acc_stderr": 0.03286745312567961, + "acc_norm": 0.3991031390134529, + "acc_norm_stderr": 0.03286745312567961 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4494949494949495, + "acc_stderr": 0.0354413249194797, + "acc_norm": 0.4494949494949495, + "acc_norm_stderr": 0.0354413249194797 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370333, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3739495798319328, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.3739495798319328, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35384615384615387, + "acc_stderr": 0.024243783994062188, + "acc_norm": 0.35384615384615387, + "acc_norm_stderr": 0.024243783994062188 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5683760683760684, + "acc_stderr": 0.0324483553531149, + "acc_norm": 0.5683760683760684, + "acc_norm_stderr": 0.0324483553531149 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3849056603773585, + "acc_stderr": 0.029946498567699948, + "acc_norm": 0.3849056603773585, + "acc_norm_stderr": 0.029946498567699948 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.0260671592222758, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.0260671592222758 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.03479185572599661, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.03479185572599661 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.035319879302087305, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.035319879302087305 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.035149425512674394, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.035149425512674394 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776568, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776568 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357336, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.026538189104705484, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.026538189104705484 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.027563010971606672, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.027563010971606672 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4091743119266055, + "acc_stderr": 0.02108067026443373, + "acc_norm": 0.4091743119266055, + "acc_norm_stderr": 0.02108067026443373 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.028074158947600663, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.028074158947600663 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.040179012759817494, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.040179012759817494 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.01911721391149515, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.01911721391149515 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169938, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169938 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.029346665094372927, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.029346665094372927 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23897058823529413, + "acc_stderr": 0.025905280644893006, + "acc_norm": 0.23897058823529413, + "acc_norm_stderr": 0.025905280644893006 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3020408163265306, + "acc_stderr": 0.02939360931987982, + "acc_norm": 0.3020408163265306, + "acc_norm_stderr": 0.02939360931987982 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4388185654008439, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.4388185654008439, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.288135593220339, + "acc_stderr": 0.011567140661324561, + "acc_norm": 0.288135593220339, + "acc_norm_stderr": 0.011567140661324561 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.03393388584958404, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.03393388584958404 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.038517163193983954, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.038517163193983954 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283345, + "mc2": 0.4196251845895743, + "mc2_stderr": 0.015120881881369678 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4214876033057851, + "acc_stderr": 0.016977101932601518, + "acc_norm": 0.5277449822904369, + "acc_norm_stderr": 0.01716386797945601 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIdenU/LLAMA-2-13b-ko-Y24-DPO_v2.1", + "model_sha": "c0f610c27136d4e990134245cb7c9f93e8ceb400", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIdenU/LLAMA-2-13b-ko-Y24_v2.0/result_2024-01-24 23:59:55.json b/AIdenU/LLAMA-2-13b-ko-Y24_v2.0/result_2024-01-24 23:59:55.json new file mode 100644 index 0000000000000000000000000000000000000000..b7c0984302a26103edf94e74e304107582bf726b --- /dev/null +++ b/AIdenU/LLAMA-2-13b-ko-Y24_v2.0/result_2024-01-24 23:59:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31569965870307165, + "acc_stderr": 0.013582571095815291, + "acc_norm": 0.378839590443686, + "acc_norm_stderr": 0.01417591549000032 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36128261302529374, + "acc_stderr": 0.004793904922401889, + "acc_norm": 0.46106353316072496, + "acc_norm_stderr": 0.004974628903829141 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.0484674825397724, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.0484674825397724 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4942528735632184, + "acc_stderr": 0.017878782326129234, + "acc_norm": 0.4942528735632184, + "acc_norm_stderr": 0.017878782326129234 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.042320736951515885, + "acc_norm": 0.4, + "acc_norm_stderr": 0.042320736951515885 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101737, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101737 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197598, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197598 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416827, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416827 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.043482080516448585, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.043482080516448585 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03540294377095368, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03540294377095368 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3739495798319328, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.3739495798319328, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.02432173848460237, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.02432173848460237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568385, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03255326307272487, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03255326307272487 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.03479185572599661, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.03479185572599661 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.03530235517334682, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.03530235517334682 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918417, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357336, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.026538189104705488, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.026538189104705488 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.027431623722415012, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.027431623722415012 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41467889908256883, + "acc_stderr": 0.021122903208602595, + "acc_norm": 0.41467889908256883, + "acc_norm_stderr": 0.021122903208602595 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.019206606848825365, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.019206606848825365 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022128, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952688, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952688 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.0305467452649532, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.0305467452649532 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.25735294117647056, + "acc_stderr": 0.02655651947004153, + "acc_norm": 0.25735294117647056, + "acc_norm_stderr": 0.02655651947004153 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3183673469387755, + "acc_stderr": 0.029822533793982052, + "acc_norm": 0.3183673469387755, + "acc_norm_stderr": 0.029822533793982052 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4472573839662447, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.4472573839662447, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2926988265971317, + "acc_stderr": 0.01162094919584953, + "acc_norm": 0.2926988265971317, + "acc_norm_stderr": 0.01162094919584953 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.03393388584958404, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.03393388584958404 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.40606060606060607, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.40606060606060607, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015006, + "mc2": 0.4252802014875463, + "mc2_stderr": 0.015159718417104805 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4179456906729634, + "acc_stderr": 0.01695729200527972, + "acc_norm": 0.5324675324675324, + "acc_norm_stderr": 0.017154073716682865 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIdenU/LLAMA-2-13b-ko-Y24_v2.0", + "model_sha": "f58dd2241e16a20ea477d50451305dcfa336c881", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIdenU/LLAMA-2-13b-koen-Y24_v1.0/result_2024-02-21 01:51:02.json b/AIdenU/LLAMA-2-13b-koen-Y24_v1.0/result_2024-02-21 01:51:02.json new file mode 100644 index 0000000000000000000000000000000000000000..b45bc3244db53f8864a4193dcfc4a2748479dd1d --- /dev/null +++ b/AIdenU/LLAMA-2-13b-koen-Y24_v1.0/result_2024-02-21 01:51:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31569965870307165, + "acc_stderr": 0.013582571095815291, + "acc_norm": 0.3660409556313993, + "acc_norm_stderr": 0.01407722310847014 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3599880501892053, + "acc_stderr": 0.004790155370993447, + "acc_norm": 0.45956980681139215, + "acc_norm_stderr": 0.004973442060741622 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48659003831417624, + "acc_stderr": 0.017873531736510385, + "acc_norm": 0.48659003831417624, + "acc_norm_stderr": 0.017873531736510385 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.041716541613545426, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.041716541613545426 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197598, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197598 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008732, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008732 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4595959595959596, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.4595959595959596, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.031566630992154156, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.031566630992154156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.02466674491518724, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.02466674491518724 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.594017094017094, + "acc_stderr": 0.03217180182641086, + "acc_norm": 0.594017094017094, + "acc_norm_stderr": 0.03217180182641086 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02606715922227578, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02606715922227578 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736411, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736411 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.023000086859068642, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.023000086859068642 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.03999411135753543, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.03999411135753543 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43641618497109824, + "acc_stderr": 0.026700545424943684, + "acc_norm": 0.43641618497109824, + "acc_norm_stderr": 0.026700545424943684 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.404320987654321, + "acc_stderr": 0.027306625297327688, + "acc_norm": 0.404320987654321, + "acc_norm_stderr": 0.027306625297327688 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.03602573571288442, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.03602573571288442 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278009, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278009 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42935779816513764, + "acc_stderr": 0.021222286397236518, + "acc_norm": 0.42935779816513764, + "acc_norm_stderr": 0.021222286397236518 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.028452639985088006, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.028452639985088006 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.019023726160724553, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.019023726160724553 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169938, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169938 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298825, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298825 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.02858270975389843, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.02858270975389843 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3673469387755102, + "acc_stderr": 0.030862144921087565, + "acc_norm": 0.3673469387755102, + "acc_norm_stderr": 0.030862144921087565 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4641350210970464, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.4641350210970464, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29726205997392435, + "acc_stderr": 0.011673346173086034, + "acc_norm": 0.29726205997392435, + "acc_norm_stderr": 0.011673346173086034 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219374, + "mc2": 0.4349357238291092, + "mc2_stderr": 0.015145789899523338 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42502951593860683, + "acc_stderr": 0.016996016308362887, + "acc_norm": 0.5395513577331759, + "acc_norm_stderr": 0.017136487626049846 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIdenU/LLAMA-2-13b-koen-Y24_v1.0", + "model_sha": "29322c0eaa54ff261284806e15aba5ecb93edcab", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIdenU/Mistral-7B-v0.2-ko-Y24_v2.0/result_2024-04-24 03:33:53.json b/AIdenU/Mistral-7B-v0.2-ko-Y24_v2.0/result_2024-04-24 03:33:53.json new file mode 100644 index 0000000000000000000000000000000000000000..8b667bd03002eb18a78001d289bb5764b197fba3 --- /dev/null +++ b/AIdenU/Mistral-7B-v0.2-ko-Y24_v2.0/result_2024-04-24 03:33:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3464163822525597, + "acc_stderr": 0.013905011180063244, + "acc_norm": 0.40273037542662116, + "acc_norm_stderr": 0.01433223630679014 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38408683529177456, + "acc_stderr": 0.004853845750392158, + "acc_norm": 0.5014937263493328, + "acc_norm_stderr": 0.0049897591448122905 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.037439798259263996, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.037439798259263996 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39846743295019155, + "acc_stderr": 0.01750743860277739, + "acc_norm": 0.39846743295019155, + "acc_norm_stderr": 0.01750743860277739 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.03711725190740753, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.03711725190740753 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.028173917761762885, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.028173917761762885 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.35172413793103446, + "acc_stderr": 0.0397923663749741, + "acc_norm": 0.35172413793103446, + "acc_norm_stderr": 0.0397923663749741 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.028359620870533953, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.028359620870533953 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36923076923076925, + "acc_stderr": 0.024468615241478902, + "acc_norm": 0.36923076923076925, + "acc_norm_stderr": 0.024468615241478902 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36129032258064514, + "acc_stderr": 0.027327548447957543, + "acc_norm": 0.36129032258064514, + "acc_norm_stderr": 0.027327548447957543 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5341880341880342, + "acc_stderr": 0.03267942734081228, + "acc_norm": 0.5341880341880342, + "acc_norm_stderr": 0.03267942734081228 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.373134328358209, + "acc_stderr": 0.03419832608176006, + "acc_norm": 0.373134328358209, + "acc_norm_stderr": 0.03419832608176006 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523867, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523867 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357336, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.026636539741116072, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.026636539741116072 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40414507772020725, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.40414507772020725, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.02142140298254889, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.02142140298254889 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.04065771002562603, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.04065771002562603 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.315359477124183, + "acc_stderr": 0.018798086284886894, + "acc_norm": 0.315359477124183, + "acc_norm_stderr": 0.018798086284886894 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169938, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169938 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012393, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012393 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3486033519553073, + "acc_stderr": 0.015937484656687026, + "acc_norm": 0.3486033519553073, + "acc_norm_stderr": 0.015937484656687026 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.027678468642144717, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.027678468642144717 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.03197694118713672, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.03197694118713672 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3376792698826597, + "acc_stderr": 0.01207856377714556, + "acc_norm": 0.3376792698826597, + "acc_norm_stderr": 0.01207856377714556 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23623011015911874, + "mc1_stderr": 0.014869755015871082, + "mc2": 0.39036697594545927, + "mc2_stderr": 0.015122205372523354 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4592680047225502, + "acc_stderr": 0.017133218276537673, + "acc_norm": 0.5171192443919717, + "acc_norm_stderr": 0.017180275246085633 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIdenU/Mistral-7B-v0.2-ko-Y24_v2.0", + "model_sha": "ac9a15017c3588395547bd147a8b39d523d17a84", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIdenU/Mistral-7b-ko-Y24-DPO_v0.1/result_2023-12-21 04:18:43.json b/AIdenU/Mistral-7b-ko-Y24-DPO_v0.1/result_2023-12-21 04:18:43.json new file mode 100644 index 0000000000000000000000000000000000000000..bebfdb58d26165570cb00af0fca9e60b9a2d1a1f --- /dev/null +++ b/AIdenU/Mistral-7b-ko-Y24-DPO_v0.1/result_2023-12-21 04:18:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35665529010238906, + "acc_stderr": 0.013998056902620196, + "acc_norm": 0.4052901023890785, + "acc_norm_stderr": 0.014346869060229328 + }, + "harness|ko_hellaswag|10": { + "acc": 0.377912766381199, + "acc_stderr": 0.004838747305783345, + "acc_norm": 0.49153555068711413, + "acc_norm_stderr": 0.004989066355449555 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5006385696040868, + "acc_stderr": 0.017879948914431697, + "acc_norm": 0.5006385696040868, + "acc_norm_stderr": 0.017879948914431697 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.0402477840197711, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.0402477840197711 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108102, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.02836504154256457, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.02836504154256457 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539743, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539743 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.028447965476231022, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.028447965476231022 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.02794045713622841, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02794045713622841 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736411, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736411 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.024870815251057093, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.024870815251057093 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.02686462436675664, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.02686462436675664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.03919415545048409, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.03919415545048409 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442205, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442205 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5137614678899083, + "acc_stderr": 0.021429202089874075, + "acc_norm": 0.5137614678899083, + "acc_norm_stderr": 0.021429202089874075 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.01970687580408563, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.01970687580408563 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.01463518561652784, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.01463518561652784 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.028418208619406787, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.028418208619406787 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.032230171959376, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.032230171959376 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.318122555410691, + "acc_stderr": 0.011895407281104104, + "acc_norm": 0.318122555410691, + "acc_norm_stderr": 0.011895407281104104 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674098, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674098 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015021, + "mc2": 0.4379686054133816, + "mc2_stderr": 0.015396278996687385 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4923258559622196, + "acc_stderr": 0.017188329219654276, + "acc_norm": 0.5419126328217237, + "acc_norm_stderr": 0.017129852117911147 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIdenU/Mistral-7b-ko-Y24-DPO_v0.1", + "model_sha": "78813fb52898d37d6c0637b7fb93eb2c5bc23f55", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIdenU/Mistral-7b-ko-Y24_v0.1/result_2023-12-21 04:19:12.json b/AIdenU/Mistral-7b-ko-Y24_v0.1/result_2023-12-21 04:19:12.json new file mode 100644 index 0000000000000000000000000000000000000000..5f45c6f39f933c90f04316c6b6172f95d390a7ed --- /dev/null +++ b/AIdenU/Mistral-7b-ko-Y24_v0.1/result_2023-12-21 04:19:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3506825938566553, + "acc_stderr": 0.01394463593072609, + "acc_norm": 0.39590443686006827, + "acc_norm_stderr": 0.014291228393536588 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37870942043417644, + "acc_stderr": 0.004840742206718092, + "acc_norm": 0.4885480979884485, + "acc_norm_stderr": 0.0049884724594180295 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5044699872286079, + "acc_stderr": 0.017879248970584353, + "acc_norm": 0.5044699872286079, + "acc_norm_stderr": 0.017879248970584353 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789958, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789958 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0356760379963917, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0356760379963917 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752052, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752052 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347666, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347666 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.021436998359765317, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.021436998359765317 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.019821843688271768, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.019821843688271768 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210746, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210746 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335317, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.03197694118713672, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.03197694118713672 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3213820078226858, + "acc_stderr": 0.011927581352265076, + "acc_norm": 0.3213820078226858, + "acc_norm_stderr": 0.011927581352265076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237272, + "mc2": 0.4338150951405425, + "mc2_stderr": 0.01536129905959147 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4982290436835891, + "acc_stderr": 0.01719024627623186, + "acc_norm": 0.5489964580873672, + "acc_norm_stderr": 0.01710761885954935 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIdenU/Mistral-7b-ko-Y24_v0.1", + "model_sha": "a0b72b81f985f7fb06695cae82877ca482947dbf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIdenU/SOLAR-10.7b-ko-Y24_v1.0/result_2024-03-18 01:07:12.json b/AIdenU/SOLAR-10.7b-ko-Y24_v1.0/result_2024-03-18 01:07:12.json new file mode 100644 index 0000000000000000000000000000000000000000..ac62b1093881b637b0affd2b444d9e47e33126db --- /dev/null +++ b/AIdenU/SOLAR-10.7b-ko-Y24_v1.0/result_2024-03-18 01:07:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4197952218430034, + "acc_stderr": 0.01442218122630303, + "acc_norm": 0.48378839590443684, + "acc_norm_stderr": 0.014603708567414947 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4303923521210914, + "acc_stderr": 0.0049411916073179105, + "acc_norm": 0.5866361282613025, + "acc_norm_stderr": 0.004914305798575694 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6608187134502924, + "acc_stderr": 0.03631053496488905, + "acc_norm": 0.6608187134502924, + "acc_norm_stderr": 0.03631053496488905 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6564495530012772, + "acc_stderr": 0.01698214563265247, + "acc_norm": 0.6564495530012772, + "acc_norm_stderr": 0.01698214563265247 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901406, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901406 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.032683358999363386, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.032683358999363386 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6045016077170418, + "acc_stderr": 0.027770918531427838, + "acc_norm": 0.6045016077170418, + "acc_norm_stderr": 0.027770918531427838 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.547085201793722, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.547085201793722, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5840336134453782, + "acc_stderr": 0.0320165010073961, + "acc_norm": 0.5840336134453782, + "acc_norm_stderr": 0.0320165010073961 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5025641025641026, + "acc_stderr": 0.025350672979412184, + "acc_norm": 0.5025641025641026, + "acc_norm_stderr": 0.025350672979412184 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6225806451612903, + "acc_stderr": 0.027575960723278243, + "acc_norm": 0.6225806451612903, + "acc_norm_stderr": 0.027575960723278243 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.811965811965812, + "acc_stderr": 0.02559819368665225, + "acc_norm": 0.811965811965812, + "acc_norm_stderr": 0.02559819368665225 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5358490566037736, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.5358490566037736, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948485, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.038118909889404126, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.038118909889404126 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699947, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699947 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6040462427745664, + "acc_stderr": 0.02632981334194624, + "acc_norm": 0.6040462427745664, + "acc_norm_stderr": 0.02632981334194624 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.027002521034516478, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.027002521034516478 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.03221024508041154, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.03221024508041154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6385321100917432, + "acc_stderr": 0.02059808200993737, + "acc_norm": 0.6385321100917432, + "acc_norm_stderr": 0.02059808200993737 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5359477124183006, + "acc_stderr": 0.028555827516528777, + "acc_norm": 0.5359477124183006, + "acc_norm_stderr": 0.028555827516528777 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.04026097083296564, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.04026097083296564 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.020206653187884782, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.020206653187884782 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40070921985815605, + "acc_stderr": 0.029233465745573083, + "acc_norm": 0.40070921985815605, + "acc_norm_stderr": 0.029233465745573083 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19553072625698323, + "acc_stderr": 0.013264579220945106, + "acc_norm": 0.19553072625698323, + "acc_norm_stderr": 0.013264579220945106 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.03086214492108757, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.03086214492108757 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7088607594936709, + "acc_stderr": 0.029571601065753374, + "acc_norm": 0.7088607594936709, + "acc_norm_stderr": 0.029571601065753374 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4041720990873533, + "acc_stderr": 0.012533504046491367, + "acc_norm": 0.4041720990873533, + "acc_norm_stderr": 0.012533504046491367 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.03364487286088298, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.03364487286088298 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.03713158067481914, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.03713158067481914 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826837, + "mc2": 0.4116463111717996, + "mc2_stderr": 0.014902796745251457 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43565525383707204, + "acc_stderr": 0.017047415229476327, + "acc_norm": 0.4935064935064935, + "acc_norm_stderr": 0.01718890435907731 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIdenU/SOLAR-10.7b-ko-Y24_v1.0", + "model_sha": "12af074c34713e89135226b12f6f59d2036234c4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AIdenU/SOLAR-10b-ko-Y24_v0.1/result_2024-03-05 05:52:20.json b/AIdenU/SOLAR-10b-ko-Y24_v0.1/result_2024-03-05 05:52:20.json new file mode 100644 index 0000000000000000000000000000000000000000..fd4160830a0123da28545872ce567d5f98beb26c --- /dev/null +++ b/AIdenU/SOLAR-10b-ko-Y24_v0.1/result_2024-03-05 05:52:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.014077223108470134, + "acc_norm": 0.43856655290102387, + "acc_norm_stderr": 0.014500682618212867 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38856801433977295, + "acc_stderr": 0.004864286176731826, + "acc_norm": 0.5195180242979486, + "acc_norm_stderr": 0.004985978214937916 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5906432748538012, + "acc_stderr": 0.03771283107626545, + "acc_norm": 0.5906432748538012, + "acc_norm_stderr": 0.03771283107626545 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.611749680715198, + "acc_stderr": 0.017427673295544305, + "acc_norm": 0.611749680715198, + "acc_norm_stderr": 0.017427673295544305 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108102, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5884244372990354, + "acc_stderr": 0.02795048149440127, + "acc_norm": 0.5884244372990354, + "acc_norm_stderr": 0.02795048149440127 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6919191919191919, + "acc_stderr": 0.03289477330098614, + "acc_norm": 0.6919191919191919, + "acc_norm_stderr": 0.03289477330098614 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6428571428571429, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.6428571428571429, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5870967741935483, + "acc_stderr": 0.028009138125400384, + "acc_norm": 0.5870967741935483, + "acc_norm_stderr": 0.028009138125400384 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.02777883590493543, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.02777883590493543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808093, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808093 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857406, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857406 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851105, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851105 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.026261677607806642, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.026261677607806642 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5432098765432098, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.5432098765432098, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6580310880829016, + "acc_stderr": 0.034234651001042844, + "acc_norm": 0.6580310880829016, + "acc_norm_stderr": 0.034234651001042844 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6458715596330276, + "acc_stderr": 0.0205047290138291, + "acc_norm": 0.6458715596330276, + "acc_norm_stderr": 0.0205047290138291 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.0404633688397825, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.0404633688397825 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.02018014484330729, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.02018014484330729 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611317, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19329608938547485, + "acc_stderr": 0.013206868561343236, + "acc_norm": 0.19329608938547485, + "acc_norm_stderr": 0.013206868561343236 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.46691176470588236, + "acc_stderr": 0.030306257722468314, + "acc_norm": 0.46691176470588236, + "acc_norm_stderr": 0.030306257722468314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6244897959183674, + "acc_stderr": 0.031001209039894843, + "acc_norm": 0.6244897959183674, + "acc_norm_stderr": 0.031001209039894843 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3917861799217731, + "acc_stderr": 0.012467564418145127, + "acc_norm": 0.3917861799217731, + "acc_norm_stderr": 0.012467564418145127 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.034760990605016355, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.034760990605016355 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165634, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165634 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.01520152224629995, + "mc2": 0.4077067414481265, + "mc2_stderr": 0.015554571222918666 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4911452184179457, + "acc_stderr": 0.017187658199336743, + "acc_norm": 0.5419126328217237, + "acc_norm_stderr": 0.017129852117911147 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AIdenU/SOLAR-10b-ko-Y24_v0.1", + "model_sha": "f6dfd1940d6721300b496f25914d633d405b7740", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alibaba-NLP/gte-Qwen2-7B-instruct/result_2024-07-16 18:10:30.json b/Alibaba-NLP/gte-Qwen2-7B-instruct/result_2024-07-16 18:10:30.json new file mode 100644 index 0000000000000000000000000000000000000000..6147a430e6caaf10b1c90b5a3541da562b9cef69 --- /dev/null +++ b/Alibaba-NLP/gte-Qwen2-7B-instruct/result_2024-07-16 18:10:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29266211604095566, + "acc_stderr": 0.013295916103619411, + "acc_norm": 0.36689419795221845, + "acc_norm_stderr": 0.014084133118104303 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2984465245966939, + "acc_stderr": 0.004566412808642455, + "acc_norm": 0.3603863772156941, + "acc_norm_stderr": 0.004791313101877055 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036155076303109344, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036155076303109344 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7572815533980582, + "acc_stderr": 0.04245022486384493, + "acc_norm": 0.7572815533980582, + "acc_norm_stderr": 0.04245022486384493 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6143039591315453, + "acc_stderr": 0.01740647661921291, + "acc_norm": 0.6143039591315453, + "acc_norm_stderr": 0.01740647661921291 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5319148936170213, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.5319148936170213, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.028125340983972718, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.028125340983972718 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.702020202020202, + "acc_stderr": 0.03258630383836556, + "acc_norm": 0.702020202020202, + "acc_norm_stderr": 0.03258630383836556 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.04810840148082636, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.04810840148082636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6008403361344538, + "acc_stderr": 0.03181110032413926, + "acc_norm": 0.6008403361344538, + "acc_norm_stderr": 0.03181110032413926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6051282051282051, + "acc_stderr": 0.024784316942156413, + "acc_norm": 0.6051282051282051, + "acc_norm_stderr": 0.024784316942156413 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252611, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252611 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.5615763546798029, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.5615763546798029, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6129032258064516, + "acc_stderr": 0.0277093596750325, + "acc_norm": 0.6129032258064516, + "acc_norm_stderr": 0.0277093596750325 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.811965811965812, + "acc_stderr": 0.025598193686652244, + "acc_norm": 0.811965811965812, + "acc_norm_stderr": 0.025598193686652244 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5509433962264151, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.5509433962264151, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.40370370370370373, + "acc_stderr": 0.029914812342227624, + "acc_norm": 0.40370370370370373, + "acc_norm_stderr": 0.029914812342227624 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.544973544973545, + "acc_stderr": 0.02564692836104939, + "acc_norm": 0.544973544973545, + "acc_norm_stderr": 0.02564692836104939 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923183, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923183 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5635838150289018, + "acc_stderr": 0.026700545424943684, + "acc_norm": 0.5635838150289018, + "acc_norm_stderr": 0.026700545424943684 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138938, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138938 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5987654320987654, + "acc_stderr": 0.027272582849839803, + "acc_norm": 0.5987654320987654, + "acc_norm_stderr": 0.027272582849839803 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6528497409326425, + "acc_stderr": 0.03435696168361355, + "acc_norm": 0.6528497409326425, + "acc_norm_stderr": 0.03435696168361355 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6972477064220184, + "acc_stderr": 0.01969871143475635, + "acc_norm": 0.6972477064220184, + "acc_norm_stderr": 0.01969871143475635 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.044631127206771704, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.044631127206771704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.028275490156791462, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.028275490156791462 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6513157894736842, + "acc_stderr": 0.03878139888797611, + "acc_norm": 0.6513157894736842, + "acc_norm_stderr": 0.03878139888797611 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.020226106567657814, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.020226106567657814 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.028893955412115886, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.028893955412115886 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5601851851851852, + "acc_stderr": 0.03385177976044811, + "acc_norm": 0.5601851851851852, + "acc_norm_stderr": 0.03385177976044811 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3787709497206704, + "acc_stderr": 0.01622353351036513, + "acc_norm": 0.3787709497206704, + "acc_norm_stderr": 0.01622353351036513 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03025437257397669, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03025437257397669 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6612244897959184, + "acc_stderr": 0.030299506562154188, + "acc_norm": 0.6612244897959184, + "acc_norm_stderr": 0.030299506562154188 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7341772151898734, + "acc_stderr": 0.02875679962965833, + "acc_norm": 0.7341772151898734, + "acc_norm_stderr": 0.02875679962965833 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38265971316818775, + "acc_stderr": 0.012413595882893279, + "acc_norm": 0.38265971316818775, + "acc_norm_stderr": 0.012413595882893279 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.03393388584958404, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.03393388584958404 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512569, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512569 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.0153218216884762, + "mc2": 0.49811141402996134, + "mc2_stderr": 0.01690674036318429 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3234946871310508, + "acc_stderr": 0.01608362729048367, + "acc_norm": 0.4946871310507674, + "acc_norm_stderr": 0.01718938362722969 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alibaba-NLP/gte-Qwen2-7B-instruct", + "model_sha": "e26182b2122f4435e8b3ebecbf363990f409b45b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI-Team/Alpha-LLM-Mistral7B-v2/result_2024-04-04 06:59:03.json b/Alphacode-AI-Team/Alpha-LLM-Mistral7B-v2/result_2024-04-04 06:59:03.json new file mode 100644 index 0000000000000000000000000000000000000000..28e60f4816323f42e5e9e23f565691a879b2816c --- /dev/null +++ b/Alphacode-AI-Team/Alpha-LLM-Mistral7B-v2/result_2024-04-04 06:59:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4761092150170648, + "acc_stderr": 0.014594701798071657, + "acc_norm": 0.515358361774744, + "acc_norm_stderr": 0.014604496129394916 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5443138816968731, + "acc_stderr": 0.004970145708188002, + "acc_norm": 0.6666998605855408, + "acc_norm_stderr": 0.004704293898729907 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5159642401021711, + "acc_stderr": 0.01787084750608174, + "acc_norm": 0.5159642401021711, + "acc_norm_stderr": 0.01787084750608174 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111305, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111305 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.03343577705583064, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.03343577705583064 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.047840607041056527, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.047840607041056527 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.592436974789916, + "acc_stderr": 0.03191863374478466, + "acc_norm": 0.592436974789916, + "acc_norm_stderr": 0.03191863374478466 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5307692307692308, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.5307692307692308, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.02841498501970786, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.02841498501970786 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688173, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688173 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009787, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009787 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342665, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5357798165137615, + "acc_stderr": 0.021382364775701893, + "acc_norm": 0.5357798165137615, + "acc_norm_stderr": 0.021382364775701893 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42320261437908496, + "acc_stderr": 0.019987809769482057, + "acc_norm": 0.42320261437908496, + "acc_norm_stderr": 0.019987809769482057 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503796, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503796 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833586, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833586 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966348, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966348 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35919165580182527, + "acc_stderr": 0.012253386187584252, + "acc_norm": 0.35919165580182527, + "acc_norm_stderr": 0.012253386187584252 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.42472460220318237, + "mc1_stderr": 0.017304000957167488, + "mc2": 0.5765535731166324, + "mc2_stderr": 0.01654086044742999 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.500590318772137, + "acc_stderr": 0.017190342123448586, + "acc_norm": 0.5336481700118064, + "acc_norm_stderr": 0.017151384117131865 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI-Team/Alpha-LLM-Mistral7B-v2", + "model_sha": "f785c18fbe6076e9e9492c4aad0e5a4a46bdc522", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI-Team/Alpha-LLM-Mistral7B-v3/result_2024-04-06 02:55:52.json b/Alphacode-AI-Team/Alpha-LLM-Mistral7B-v3/result_2024-04-06 02:55:52.json new file mode 100644 index 0000000000000000000000000000000000000000..00069715ce184d84c7927de41ab683179b597e1d --- /dev/null +++ b/Alphacode-AI-Team/Alpha-LLM-Mistral7B-v3/result_2024-04-06 02:55:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2167235494880546, + "acc_stderr": 0.01204015671348119, + "acc_norm": 0.25, + "acc_norm_stderr": 0.012653835621466646 + }, + "harness|ko_hellaswag|10": { + "acc": 0.304919338777136, + "acc_stderr": 0.004594323838650339, + "acc_norm": 0.34485162318263296, + "acc_norm_stderr": 0.004743484528346658 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.042450224863844956, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.042450224863844956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2937420178799489, + "acc_stderr": 0.01628775938849167, + "acc_norm": 0.2937420178799489, + "acc_norm_stderr": 0.01628775938849167 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357776, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357776 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.035509201856896294, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.035509201856896294 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31189710610932475, + "acc_stderr": 0.02631185807185415, + "acc_norm": 0.31189710610932475, + "acc_norm_stderr": 0.02631185807185415 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2645739910313901, + "acc_stderr": 0.029605103217038336, + "acc_norm": 0.2645739910313901, + "acc_norm_stderr": 0.029605103217038336 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2878787878787879, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.2878787878787879, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808779, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808779 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36554621848739494, + "acc_stderr": 0.0312821770636846, + "acc_norm": 0.36554621848739494, + "acc_norm_stderr": 0.0312821770636846 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.024121125416941176, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.024121125416941176 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.030712730070982592, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.030712730070982592 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.026148685930671746, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.026148685930671746 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.032059534537892925, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.032059534537892925 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.25660377358490566, + "acc_stderr": 0.026880647889051996, + "acc_norm": 0.25660377358490566, + "acc_norm_stderr": 0.026880647889051996 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.043091187099464585, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.043091187099464585 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959316, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959316 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.0349610148119118, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.0349610148119118 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2328042328042328, + "acc_stderr": 0.02176596167215453, + "acc_norm": 0.2328042328042328, + "acc_norm_stderr": 0.02176596167215453 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869334, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30346820809248554, + "acc_stderr": 0.024752411960917205, + "acc_norm": 0.30346820809248554, + "acc_norm_stderr": 0.024752411960917205 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615625, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615625 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24691358024691357, + "acc_stderr": 0.023993501709042107, + "acc_norm": 0.24691358024691357, + "acc_norm_stderr": 0.023993501709042107 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28073394495412846, + "acc_stderr": 0.019266055045871616, + "acc_norm": 0.28073394495412846, + "acc_norm_stderr": 0.019266055045871616 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.026568921015457152, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.026568921015457152 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810537, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810537 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.017035229258034038, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.017035229258034038 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140252, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140252 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419072, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419072 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.033851779760448106, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.033851779760448106 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220513, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220513 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.030932858792789838, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.030932858792789838 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24771838331160365, + "acc_stderr": 0.011025499291443738, + "acc_norm": 0.24771838331160365, + "acc_norm_stderr": 0.011025499291443738 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.031922715695482995, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.031922715695482995 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.21909424724602203, + "mc1_stderr": 0.014480038578757478, + "mc2": 0.38148342854178163, + "mc2_stderr": 0.01616827274309981 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22195985832349469, + "acc_stderr": 0.01428739461682119, + "acc_norm": 0.29161747343565525, + "acc_norm_stderr": 0.01562627669007024 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI-Team/Alpha-LLM-Mistral7B-v3", + "model_sha": "5ec8ad85ea57ed8cc826a572d144f7591fec94f0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI-Team/Alpha-LLM-Mistral7B-v4/result_2024-04-07 08:55:56.json b/Alphacode-AI-Team/Alpha-LLM-Mistral7B-v4/result_2024-04-07 08:55:56.json new file mode 100644 index 0000000000000000000000000000000000000000..e288b2cebca8fb26dceddd5de98c2f5fac0f9301 --- /dev/null +++ b/Alphacode-AI-Team/Alpha-LLM-Mistral7B-v4/result_2024-04-07 08:55:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.18430034129692832, + "acc_stderr": 0.011330517933037411, + "acc_norm": 0.21160409556313994, + "acc_norm_stderr": 0.011935916358632857 + }, + "harness|ko_hellaswag|10": { + "acc": 0.29247161919936265, + "acc_stderr": 0.004539680764142175, + "acc_norm": 0.32354112726548495, + "acc_norm_stderr": 0.004668710689192412 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.03026745755489847, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.03026745755489847 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1553398058252427, + "acc_stderr": 0.03586594738573975, + "acc_norm": 0.1553398058252427, + "acc_norm_stderr": 0.03586594738573975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26436781609195403, + "acc_stderr": 0.01576998484069052, + "acc_norm": 0.26436781609195403, + "acc_norm_stderr": 0.01576998484069052 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.035478541985608264, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.035478541985608264 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.029241883869628817, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.029241883869628817 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370519, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370519 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26688102893890675, + "acc_stderr": 0.025122637608816646, + "acc_norm": 0.26688102893890675, + "acc_norm_stderr": 0.025122637608816646 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.030898610882477515, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.030898610882477515 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.31313131313131315, + "acc_stderr": 0.033042050878136525, + "acc_norm": 0.31313131313131315, + "acc_norm_stderr": 0.033042050878136525 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378948, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378948 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02390115797940252, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02390115797940252 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21674876847290642, + "acc_stderr": 0.028990331252516235, + "acc_norm": 0.21674876847290642, + "acc_norm_stderr": 0.028990331252516235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.026522709674667765, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.026522709674667765 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.20754716981132076, + "acc_stderr": 0.02495991802891127, + "acc_norm": 0.20754716981132076, + "acc_norm_stderr": 0.02495991802891127 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721376, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721376 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2935323383084577, + "acc_stderr": 0.03220024104534205, + "acc_norm": 0.2935323383084577, + "acc_norm_stderr": 0.03220024104534205 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.03368762932259431, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.03368762932259431 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25, + "acc_stderr": 0.02409347123262133, + "acc_norm": 0.25, + "acc_norm_stderr": 0.02409347123262133 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565319, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565319 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30275229357798167, + "acc_stderr": 0.01969871143475635, + "acc_norm": 0.30275229357798167, + "acc_norm_stderr": 0.01969871143475635 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238106, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238106 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.025457756696667878, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.025457756696667878 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810537, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810537 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.016729937565537537, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.016729937565537537 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.02525786135943241, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.02525786135943241 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.04327040932578728, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.04327040932578728 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.033953227263757976, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.033953227263757976 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596452, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596452 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.31020408163265306, + "acc_stderr": 0.029613459872484378, + "acc_norm": 0.31020408163265306, + "acc_norm_stderr": 0.029613459872484378 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842555, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842555 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2561929595827901, + "acc_stderr": 0.01114917315311058, + "acc_norm": 0.2561929595827901, + "acc_norm_stderr": 0.01114917315311058 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693275, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693275 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156479, + "mc2": 0.4518199326851351, + "mc2_stderr": 0.016577184952558062 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.256198347107438, + "acc_stderr": 0.015008301644712976, + "acc_norm": 0.3577331759149941, + "acc_norm_stderr": 0.01647980893574998 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI-Team/Alpha-LLM-Mistral7B-v4", + "model_sha": "99b2981a5fb3f2171396eeddc8833e6bf992a46c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI-Team/Alpha-LLM-Mistral7B/result_2024-04-02 07:00:11.json b/Alphacode-AI-Team/Alpha-LLM-Mistral7B/result_2024-04-02 07:00:11.json new file mode 100644 index 0000000000000000000000000000000000000000..2dbcbec63b86cfb624ad9a94ceaedbc61a04724e --- /dev/null +++ b/Alphacode-AI-Team/Alpha-LLM-Mistral7B/result_2024-04-02 07:00:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.257679180887372, + "acc_stderr": 0.012780770562768412, + "acc_norm": 0.30204778156996587, + "acc_norm_stderr": 0.013417519144716426 + }, + "harness|ko_hellaswag|10": { + "acc": 0.29396534554869547, + "acc_stderr": 0.004546451825028362, + "acc_norm": 0.34385580561641105, + "acc_norm_stderr": 0.004740229212473454 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041694, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041694 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.016905207420803554, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.016905207420803554 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.03148955829745529, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.03148955829745529 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31189710610932475, + "acc_stderr": 0.02631185807185416, + "acc_norm": 0.31189710610932475, + "acc_norm_stderr": 0.02631185807185416 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35858585858585856, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.35858585858585856, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617749, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617749 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.30512820512820515, + "acc_stderr": 0.023346335293325884, + "acc_norm": 0.30512820512820515, + "acc_norm_stderr": 0.023346335293325884 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3709677419354839, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.3709677419354839, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.42735042735042733, + "acc_stderr": 0.032408473935163266, + "acc_norm": 0.42735042735042733, + "acc_norm_stderr": 0.032408473935163266 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.028049186315695245, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.028049186315695245 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228405, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228405 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.033333333333333326, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.033333333333333326 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047742, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047742 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.025624723994030454, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.025624723994030454 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.02483605786829468, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.02483605786829468 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.033088185944157494, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.033088185944157494 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25688073394495414, + "acc_stderr": 0.01873249292834245, + "acc_norm": 0.25688073394495414, + "acc_norm_stderr": 0.01873249292834245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.027684181883302884, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.027684181883302884 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4297520661157025, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.018185218954318082, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.018185218954318082 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833586, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833586 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2571428571428571, + "acc_stderr": 0.027979823538744546, + "acc_norm": 0.2571428571428571, + "acc_norm_stderr": 0.027979823538744546 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.32489451476793246, + "acc_stderr": 0.03048603938910529, + "acc_norm": 0.32489451476793246, + "acc_norm_stderr": 0.03048603938910529 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2757496740547588, + "acc_stderr": 0.011413813609161006, + "acc_norm": 0.2757496740547588, + "acc_norm_stderr": 0.011413813609161006 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.032566854844603886, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.032566854844603886 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512567, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512567 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.42334059741938457, + "mc2_stderr": 0.01570835996765872 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.1959858323494687, + "acc_stderr": 0.013647685567768863, + "acc_norm": 0.27744982290436837, + "acc_norm_stderr": 0.015393630236605973 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI-Team/Alpha-LLM-Mistral7B", + "model_sha": "b6bdb6d015b931995f6b3fdc2d4896372784b807", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI/AlphaMist7B-slr-v1/result_2024-04-08 15:32:46.json b/Alphacode-AI/AlphaMist7B-slr-v1/result_2024-04-08 15:32:46.json new file mode 100644 index 0000000000000000000000000000000000000000..56e98b983982c053bdd0c07249dffe87b4e7bdba --- /dev/null +++ b/Alphacode-AI/AlphaMist7B-slr-v1/result_2024-04-08 15:32:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4761092150170648, + "acc_stderr": 0.014594701798071657, + "acc_norm": 0.515358361774744, + "acc_norm_stderr": 0.014604496129394916 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5443138816968731, + "acc_stderr": 0.004970145708188002, + "acc_norm": 0.6666998605855408, + "acc_norm_stderr": 0.004704293898729907 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5159642401021711, + "acc_stderr": 0.01787084750608174, + "acc_norm": 0.5159642401021711, + "acc_norm_stderr": 0.01787084750608174 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111305, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111305 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.03343577705583064, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.03343577705583064 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.047840607041056527, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.047840607041056527 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.592436974789916, + "acc_stderr": 0.03191863374478466, + "acc_norm": 0.592436974789916, + "acc_norm_stderr": 0.03191863374478466 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5307692307692308, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.5307692307692308, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.02841498501970786, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.02841498501970786 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688173, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688173 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009787, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009787 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342665, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5357798165137615, + "acc_stderr": 0.021382364775701893, + "acc_norm": 0.5357798165137615, + "acc_norm_stderr": 0.021382364775701893 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42320261437908496, + "acc_stderr": 0.019987809769482057, + "acc_norm": 0.42320261437908496, + "acc_norm_stderr": 0.019987809769482057 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503796, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503796 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833586, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833586 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966348, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966348 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35919165580182527, + "acc_stderr": 0.012253386187584252, + "acc_norm": 0.35919165580182527, + "acc_norm_stderr": 0.012253386187584252 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.42472460220318237, + "mc1_stderr": 0.017304000957167488, + "mc2": 0.5765535731166324, + "mc2_stderr": 0.01654086044742999 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.500590318772137, + "acc_stderr": 0.017190342123448586, + "acc_norm": 0.5336481700118064, + "acc_norm_stderr": 0.017151384117131865 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI/AlphaMist7B-slr-v1", + "model_sha": "5ee0a1cf93102ba493b1399cc5d0cf61923eb1ee", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI/AlphaMist7B-slr-v2/result_2024-04-08 15:32:51.json b/Alphacode-AI/AlphaMist7B-slr-v2/result_2024-04-08 15:32:51.json new file mode 100644 index 0000000000000000000000000000000000000000..156841c8b6be2e8a42858ca0931cb6d3cfc3579f --- /dev/null +++ b/Alphacode-AI/AlphaMist7B-slr-v2/result_2024-04-08 15:32:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.47440273037542663, + "acc_stderr": 0.014592230885298959, + "acc_norm": 0.515358361774744, + "acc_norm_stderr": 0.014604496129394916 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5727942640908186, + "acc_stderr": 0.00493661642892264, + "acc_norm": 0.69398526190002, + "acc_norm_stderr": 0.004598940722374079 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5236270753512133, + "acc_stderr": 0.017859989765176453, + "acc_norm": 0.5236270753512133, + "acc_norm_stderr": 0.017859989765176453 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357766, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357766 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863537, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863537 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.03343577705583064, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.03343577705583064 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016338, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016338 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5358974358974359, + "acc_stderr": 0.02528558599001786, + "acc_norm": 0.5358974358974359, + "acc_norm_stderr": 0.02528558599001786 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162933, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162933 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.03035152732334494, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.03035152732334494 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.0478200179138006, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.0478200179138006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228405, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228405 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887249, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887249 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.024594975128920938, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.024594975128920938 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353992, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353992 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.02862441255016795, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.02862441255016795 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.01995597514583555, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.01995597514583555 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.033622774366080424, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.033622774366080424 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210758, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210758 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03025437257397669, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03025437257397669 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687578, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.012198140605353604, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.012198140605353604 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.42962056303549573, + "mc1_stderr": 0.01732923458040911, + "mc2": 0.5783267067449626, + "mc2_stderr": 0.01641473247415343 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4970484061393152, + "acc_stderr": 0.017190054580194694, + "acc_norm": 0.525383707201889, + "acc_norm_stderr": 0.01716818720142925 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI/AlphaMist7B-slr-v2", + "model_sha": "286b2fa985e38a1b51e174e448c75b570a6ee620", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI/AlphaMist7B-slr-v3/result_2024-04-09 04:11:04.json b/Alphacode-AI/AlphaMist7B-slr-v3/result_2024-04-09 04:11:04.json new file mode 100644 index 0000000000000000000000000000000000000000..dc4a93a7ed9773c8aea4b93343730bb10e157adb --- /dev/null +++ b/Alphacode-AI/AlphaMist7B-slr-v3/result_2024-04-09 04:11:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.47525597269624575, + "acc_stderr": 0.014593487694937731, + "acc_norm": 0.5247440273037542, + "acc_norm_stderr": 0.014593487694937745 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5826528579964151, + "acc_stderr": 0.004921133864931886, + "acc_norm": 0.6986656044612627, + "acc_norm_stderr": 0.004578999029127978 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.524904214559387, + "acc_stderr": 0.017857770704901035, + "acc_norm": 0.524904214559387, + "acc_norm_stderr": 0.017857770704901035 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923325, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923325 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016338, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016338 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.592436974789916, + "acc_stderr": 0.03191863374478466, + "acc_norm": 0.592436974789916, + "acc_norm_stderr": 0.03191863374478466 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.541025641025641, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.541025641025641, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.02841498501970786, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.02841498501970786 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.03035152732334494, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.03035152732334494 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983056, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983056 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02784081149587193, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02784081149587193 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887249, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887249 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.024594975128920938, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.024594975128920938 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.02780749004427619, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.02780749004427619 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5394495412844037, + "acc_stderr": 0.021370494609995093, + "acc_norm": 0.5394495412844037, + "acc_norm_stderr": 0.021370494609995093 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.02862441255016795, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.02862441255016795 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296557, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296557 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42320261437908496, + "acc_stderr": 0.019987809769482057, + "acc_norm": 0.42320261437908496, + "acc_norm_stderr": 0.019987809769482057 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.033622774366080424, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.033622774366080424 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966353, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966353 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.03181425118197786, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.03181425118197786 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741518, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741518 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.43329253365973075, + "mc1_stderr": 0.0173470244501075, + "mc2": 0.5822559262822837, + "mc2_stderr": 0.01635883379303052 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4946871310507674, + "acc_stderr": 0.017189383627229687, + "acc_norm": 0.5218417945690673, + "acc_norm_stderr": 0.01717394447429438 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI/AlphaMist7B-slr-v3", + "model_sha": "414791cfe50a623a23b27d8aaca074575ac80a2b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI/AlphaMist7B-slr-v4-slow/result_2024-05-10 06:26:32.json b/Alphacode-AI/AlphaMist7B-slr-v4-slow/result_2024-05-10 06:26:32.json new file mode 100644 index 0000000000000000000000000000000000000000..e261f0458cf785d2d9153457dcfeb4c0f1d75fd7 --- /dev/null +++ b/Alphacode-AI/AlphaMist7B-slr-v4-slow/result_2024-05-10 06:26:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4872013651877133, + "acc_stderr": 0.014606603181012544, + "acc_norm": 0.5366894197952219, + "acc_norm_stderr": 0.014572000527756993 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6100378410675165, + "acc_stderr": 0.004867445945277157, + "acc_norm": 0.7231627165903206, + "acc_norm_stderr": 0.004465209290388975 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.048257293373563895, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.048257293373563895 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.524904214559387, + "acc_stderr": 0.017857770704901035, + "acc_norm": 0.524904214559387, + "acc_norm_stderr": 0.017857770704901035 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768081, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768081 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.03343577705583064, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.03343577705583064 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232963, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232963 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.592436974789916, + "acc_stderr": 0.03191863374478466, + "acc_norm": 0.592436974789916, + "acc_norm_stderr": 0.03191863374478466 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.025317649726448666, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.025317649726448666 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.030463656747340265, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.030463656747340265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342603, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342603 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5394495412844037, + "acc_stderr": 0.0213704946099951, + "acc_norm": 0.5394495412844037, + "acc_norm_stderr": 0.0213704946099951 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.01987380200506118, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.01987380200506118 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503796, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503796 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2681564245810056, + "acc_stderr": 0.014816119635316996, + "acc_norm": 0.2681564245810056, + "acc_norm_stderr": 0.014816119635316996 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596455, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596455 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.031784718745647304, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.031784718745647304 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3474576271186441, + "acc_stderr": 0.012161417729749798, + "acc_norm": 0.3474576271186441, + "acc_norm_stderr": 0.012161417729749798 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4614443084455324, + "mc1_stderr": 0.017451384104637455, + "mc2": 0.6015431734592782, + "mc2_stderr": 0.016416843690364818 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190192, + "acc_norm": 0.5112160566706021, + "acc_norm_stderr": 0.01718602846948929 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI/AlphaMist7B-slr-v4-slow", + "model_sha": "ec6f6d9fa8f99e9a1197ab981bc496ceab23797b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI/AlphaMist7B-slr-v4-slow2/result_2024-05-13 01:43:05.json b/Alphacode-AI/AlphaMist7B-slr-v4-slow2/result_2024-05-13 01:43:05.json new file mode 100644 index 0000000000000000000000000000000000000000..0187d4dc1489b1e28b214cbd71414bafc2c3d48a --- /dev/null +++ b/Alphacode-AI/AlphaMist7B-slr-v4-slow2/result_2024-05-13 01:43:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.48890784982935154, + "acc_stderr": 0.01460779491401306, + "acc_norm": 0.53839590443686, + "acc_norm_stderr": 0.01456824555029636 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6321449910376419, + "acc_stderr": 0.004812361060493917, + "acc_norm": 0.7341167098187612, + "acc_norm_stderr": 0.004408994868650099 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.048257293373563895, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.048257293373563895 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5134099616858238, + "acc_stderr": 0.017873531736510368, + "acc_norm": 0.5134099616858238, + "acc_norm_stderr": 0.017873531736510368 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685516, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685516 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.040824829046386284, + "acc_norm": 0.4, + "acc_norm_stderr": 0.040824829046386284 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929777, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5282051282051282, + "acc_stderr": 0.02531063925493387, + "acc_norm": 0.5282051282051282, + "acc_norm_stderr": 0.02531063925493387 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.0483036602463533, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.0483036602463533 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162933, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162933 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.028434533152681855, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.028434533152681855 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983052, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602842, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.027807490044276198, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.027807490044276198 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.03594413711272436, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.03594413711272436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5394495412844037, + "acc_stderr": 0.021370494609995093, + "acc_norm": 0.5394495412844037, + "acc_norm_stderr": 0.021370494609995093 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.01987380200506118, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.01987380200506118 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966346, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966346 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03025437257397669, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03025437257397669 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.012198140605353609, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.012198140605353609 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4785801713586291, + "mc1_stderr": 0.017487432144711806, + "mc2": 0.621189713542375, + "mc2_stderr": 0.016320011615209882 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.017177301992342544, + "acc_norm": 0.5182998819362455, + "acc_norm_stderr": 0.017178836639177752 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI/AlphaMist7B-slr-v4-slow2", + "model_sha": "672cbca0feabcfba309c030ea54304b7fd44a292", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI/AlphaMist7B-slr-v4/result_2024-04-30 07:27:31.json b/Alphacode-AI/AlphaMist7B-slr-v4/result_2024-04-30 07:27:31.json new file mode 100644 index 0000000000000000000000000000000000000000..4390a8ef17a8ddbfec2875ded03f6d3414250104 --- /dev/null +++ b/Alphacode-AI/AlphaMist7B-slr-v4/result_2024-04-30 07:27:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46245733788395904, + "acc_stderr": 0.014570144495075576, + "acc_norm": 0.5119453924914675, + "acc_norm_stderr": 0.014607220340597164 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5524795857398924, + "acc_stderr": 0.004962220512548354, + "acc_norm": 0.6762597092212707, + "acc_norm_stderr": 0.004669459891917706 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.017850410794380173, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.017850410794380173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357773, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357773 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.028386198084177687, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.028386198084177687 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.03343577705583064, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.03343577705583064 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016338, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016338 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5840336134453782, + "acc_stderr": 0.03201650100739611, + "acc_norm": 0.5840336134453782, + "acc_norm_stderr": 0.03201650100739611 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.025294608023986462, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.025294608023986462 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653315, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983052, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340496, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.02455229220934266, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.02455229220934266 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353996, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353996 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.02861462475280544, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.02861462475280544 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041019, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041019 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.019944914136873573, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.019944914136873573 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.0443280405529152, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.0443280405529152 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261466, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261466 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.03027332507734576, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.03027332507734576 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937599, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937599 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3494132985658409, + "acc_stderr": 0.012177306252786686, + "acc_norm": 0.3494132985658409, + "acc_norm_stderr": 0.012177306252786686 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.41003671970624234, + "mc1_stderr": 0.017217844717449318, + "mc2": 0.5590255533733325, + "mc2_stderr": 0.016245353668620174 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4887839433293979, + "acc_stderr": 0.017186028469489283, + "acc_norm": 0.5088547815820543, + "acc_norm_stderr": 0.01718765819933674 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI/AlphaMist7B-slr-v4", + "model_sha": "250d95ccadb41f05938faaaa100786574ba92400", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI/AlphaMist7B-slr-v4_1/result_2024-04-10 08:23:33.json b/Alphacode-AI/AlphaMist7B-slr-v4_1/result_2024-04-10 08:23:33.json new file mode 100644 index 0000000000000000000000000000000000000000..45bd929ebb41316ead0af8e59cfc11113b69e140 --- /dev/null +++ b/Alphacode-AI/AlphaMist7B-slr-v4_1/result_2024-04-10 08:23:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46245733788395904, + "acc_stderr": 0.014570144495075576, + "acc_norm": 0.5093856655290102, + "acc_norm_stderr": 0.014608816322065003 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5617406891057558, + "acc_stderr": 0.004951594063272057, + "acc_norm": 0.6797450707030472, + "acc_norm_stderr": 0.004656208951541448 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.048257293373563895, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.048257293373563895 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.017867695938429774, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.017867695938429774 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5798319327731093, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.5798319327731093, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5307692307692308, + "acc_stderr": 0.025302958890850158, + "acc_norm": 0.5307692307692308, + "acc_norm_stderr": 0.025302958890850158 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.0483036602463533, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.0483036602463533 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467523, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467523 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.02763490726417854, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.02763490726417854 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673281, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673281 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752052, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752052 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.026918645383239004, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.026918645383239004 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833935, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833935 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.03561587327685884, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.03561587327685884 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5357798165137615, + "acc_stderr": 0.021382364775701896, + "acc_norm": 0.5357798165137615, + "acc_norm_stderr": 0.021382364775701896 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.02862441255016795, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.02862441255016795 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437538, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437538 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.02755336616510137, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.02755336616510137 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285714, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.033723432716530624, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.033723432716530624 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331165, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331165 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.03027332507734576, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.03027332507734576 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510144, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510144 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3435462842242503, + "acc_stderr": 0.012128961174190156, + "acc_norm": 0.3435462842242503, + "acc_norm_stderr": 0.012128961174190156 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.40269277845777235, + "mc1_stderr": 0.017168830935187222, + "mc2": 0.5704428288497218, + "mc2_stderr": 0.01623760406081459 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.01718286443499856, + "acc_norm": 0.5171192443919717, + "acc_norm_stderr": 0.01718027524608563 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI/AlphaMist7B-slr-v4_1", + "model_sha": "ea74366badcf5b383d55ed046644e877311926c0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI/AlphaMist7B-slr-v4_1t/result_2024-05-09 09:51:27.json b/Alphacode-AI/AlphaMist7B-slr-v4_1t/result_2024-05-09 09:51:27.json new file mode 100644 index 0000000000000000000000000000000000000000..b588fd5cf74757beb0794df6798d96ba2ca9af3b --- /dev/null +++ b/Alphacode-AI/AlphaMist7B-slr-v4_1t/result_2024-05-09 09:51:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46075085324232085, + "acc_stderr": 0.014566303676636584, + "acc_norm": 0.4948805460750853, + "acc_norm_stderr": 0.014610624890309166 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5609440350527783, + "acc_stderr": 0.004952576863315216, + "acc_norm": 0.6825333598884684, + "acc_norm_stderr": 0.004645393477680675 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5236270753512133, + "acc_stderr": 0.017859989765176453, + "acc_norm": 0.5236270753512133, + "acc_norm_stderr": 0.017859989765176453 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996793, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996793 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056128, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056128 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5672268907563025, + "acc_stderr": 0.032183581077426124, + "acc_norm": 0.5672268907563025, + "acc_norm_stderr": 0.032183581077426124 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5461538461538461, + "acc_stderr": 0.025242770987126184, + "acc_norm": 0.5461538461538461, + "acc_norm_stderr": 0.025242770987126184 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942638, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942638 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228416, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228416 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5192660550458715, + "acc_stderr": 0.02142140298254889, + "acc_norm": 0.5192660550458715, + "acc_norm_stderr": 0.02142140298254889 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.019944914136873576, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.019944914136873576 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042405, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.033723432716530624, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.033723432716530624 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.01446589382985994, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.01446589382985994 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.03181425118197786, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.03181425118197786 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937599, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937599 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.012150699768228567, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.012150699768228567 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015473, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015473 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.47123623011015914, + "mc1_stderr": 0.01747451384852553, + "mc2": 0.6124134981522595, + "mc2_stderr": 0.016386828394196633 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46162927981109797, + "acc_stderr": 0.01713966022184555, + "acc_norm": 0.4911452184179457, + "acc_norm_stderr": 0.01718765819933674 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI/AlphaMist7B-slr-v4_1t", + "model_sha": "184042aeac0149f90bb897d34b5bb13fcca619c5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI/AlphaMist7B-slr-v4_2/result_2024-04-10 08:23:28.json b/Alphacode-AI/AlphaMist7B-slr-v4_2/result_2024-04-10 08:23:28.json new file mode 100644 index 0000000000000000000000000000000000000000..912c65a32559e7e8465a1e8d36f880e5e81bc5d9 --- /dev/null +++ b/Alphacode-AI/AlphaMist7B-slr-v4_2/result_2024-04-10 08:23:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46075085324232085, + "acc_stderr": 0.014566303676636581, + "acc_norm": 0.5068259385665529, + "acc_norm_stderr": 0.014610029151379813 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5566620195180243, + "acc_stderr": 0.004957637648426469, + "acc_norm": 0.6767576180043816, + "acc_norm_stderr": 0.004667585072717508 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.017867695938429774, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.017867695938429774 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.031410821975962386, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.031410821975962386 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5966386554621849, + "acc_stderr": 0.031866081214088314, + "acc_norm": 0.5966386554621849, + "acc_norm_stderr": 0.031866081214088314 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5205128205128206, + "acc_stderr": 0.02532966316348994, + "acc_norm": 0.5205128205128206, + "acc_norm_stderr": 0.02532966316348994 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.0483036602463533, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.0483036602463533 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649038, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649038 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813332, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270648, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270648 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871927, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871927 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752052, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752052 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833932, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833932 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353996, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353996 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.04463112720677172, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.04463112720677172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.02862441255016795, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.02862441255016795 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.01999797303545833, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.01999797303545833 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03372343271653062, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03372343271653062 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859936, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859936 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.030320243265004137, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.030320243265004137 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.03189141832421396, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.03189141832421396 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937599, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937599 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3468057366362451, + "acc_stderr": 0.012156071332318706, + "acc_norm": 0.3468057366362451, + "acc_norm_stderr": 0.012156071332318706 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398394, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398394 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.401468788249694, + "mc1_stderr": 0.01716027390169365, + "mc2": 0.5698290730200505, + "mc2_stderr": 0.016066932404443 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.017168187201429253, + "acc_norm": 0.5064935064935064, + "acc_norm_stderr": 0.017188904359077307 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI/AlphaMist7B-slr-v4_2", + "model_sha": "ec94cc7faa8fca35edf7e37a75b352fab398c805", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI/AlphaMist7B-slr-v4_3/result_2024-04-10 08:23:22.json b/Alphacode-AI/AlphaMist7B-slr-v4_3/result_2024-04-10 08:23:22.json new file mode 100644 index 0000000000000000000000000000000000000000..2116c417e18e20fb0613986d491cf5c3f37837bc --- /dev/null +++ b/Alphacode-AI/AlphaMist7B-slr-v4_3/result_2024-04-10 08:23:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46075085324232085, + "acc_stderr": 0.014566303676636581, + "acc_norm": 0.5110921501706485, + "acc_norm_stderr": 0.014607794914013053 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5623381796454889, + "acc_stderr": 0.0049508484569845395, + "acc_norm": 0.6819358693487353, + "acc_norm_stderr": 0.00464772722244538 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.03833185275213025, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.03833185275213025 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.017869330154003698, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.017869330154003698 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.03148955829745531, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.03148955829745531 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6050420168067226, + "acc_stderr": 0.031753678460966245, + "acc_norm": 0.6050420168067226, + "acc_norm_stderr": 0.031753678460966245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5205128205128206, + "acc_stderr": 0.02532966316348994, + "acc_norm": 0.5205128205128206, + "acc_norm_stderr": 0.02532966316348994 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.0483036602463533, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.0483036602463533 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653315, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270648, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270648 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.0248708152510571, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.0248708152510571 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.02775653525734767, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.02775653525734767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5357798165137615, + "acc_stderr": 0.021382364775701893, + "acc_norm": 0.5357798165137615, + "acc_norm_stderr": 0.021382364775701893 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4199346405228758, + "acc_stderr": 0.019966811178256483, + "acc_norm": 0.4199346405228758, + "acc_norm_stderr": 0.019966811178256483 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503796, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503796 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03372343271653062, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03372343271653062 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098416, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098416 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47794117647058826, + "acc_stderr": 0.030343264224213528, + "acc_norm": 0.47794117647058826, + "acc_norm_stderr": 0.030343264224213528 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.03189141832421396, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.03189141832421396 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.012134433741002575, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.012134433741002575 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4121212121212121, + "acc_stderr": 0.03843566993588717, + "acc_norm": 0.4121212121212121, + "acc_norm_stderr": 0.03843566993588717 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.412484700122399, + "mc1_stderr": 0.01723329939957123, + "mc2": 0.5763189341031936, + "mc2_stderr": 0.01604850874359036 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4757969303423849, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5112160566706021, + "acc_norm_stderr": 0.01718602846948929 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI/AlphaMist7B-slr-v4_3", + "model_sha": "9d4a81741a952049d8f8fbf5d3fdc0fa9259de42", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI/Alphacode-MALI-11B/result_2024-05-16 17:59:12.json b/Alphacode-AI/Alphacode-MALI-11B/result_2024-05-16 17:59:12.json new file mode 100644 index 0000000000000000000000000000000000000000..c1315a1dab30078812ad3092c5c96972d38e0151 --- /dev/null +++ b/Alphacode-AI/Alphacode-MALI-11B/result_2024-05-16 17:59:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4121160409556314, + "acc_stderr": 0.014383915302225396, + "acc_norm": 0.46331058020477817, + "acc_norm_stderr": 0.014572000527756986 + }, + "harness|ko_hellaswag|10": { + "acc": 0.47470623381796456, + "acc_stderr": 0.004983392650570957, + "acc_norm": 0.6061541525592511, + "acc_norm_stderr": 0.004876028037941935 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.017879948914431662, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.017879948914431662 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768081, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768081 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357776, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357776 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840688, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840688 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232963, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232963 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4948717948717949, + "acc_stderr": 0.02534967290683866, + "acc_norm": 0.4948717948717949, + "acc_norm_stderr": 0.02534967290683866 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.0497569851956243, + "acc_norm": 0.57, + "acc_norm_stderr": 0.0497569851956243 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.026335739404055803, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.026335739404055803 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.02455229220934266, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.02455229220934266 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5321100917431193, + "acc_stderr": 0.02139307122268081, + "acc_norm": 0.5321100917431193, + "acc_norm_stderr": 0.02139307122268081 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874142, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874142 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762637, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762637 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02728160834446941, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02728160834446941 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613538, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613538 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.0338517797604481, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.0338517797604481 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.014487500852850426, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.014487500852850426 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003483, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003483 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5387755102040817, + "acc_stderr": 0.03191282052669279, + "acc_norm": 0.5387755102040817, + "acc_norm_stderr": 0.03191282052669279 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131775, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131775 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585892, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585892 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524753, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524753 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.379436964504284, + "mc1_stderr": 0.016987039266142985, + "mc2": 0.5368841042959204, + "mc2_stderr": 0.016440909071812868 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4722550177095632, + "acc_stderr": 0.017163867979456016, + "acc_norm": 0.4852420306965762, + "acc_norm_stderr": 0.01718286443499856 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI/Alphacode-MALI-11B", + "model_sha": "19ad7329d6e9229a08aba44138442eb73d47f1cc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI/Alphacode-MALI-11B/result_2024-05-28 08:23:46.json b/Alphacode-AI/Alphacode-MALI-11B/result_2024-05-28 08:23:46.json new file mode 100644 index 0000000000000000000000000000000000000000..4c0bb50eae12ebd13111d9e510af5c75340f328b --- /dev/null +++ b/Alphacode-AI/Alphacode-MALI-11B/result_2024-05-28 08:23:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41723549488054607, + "acc_stderr": 0.014409825518403082, + "acc_norm": 0.46075085324232085, + "acc_norm_stderr": 0.014566303676636578 + }, + "harness|ko_hellaswag|10": { + "acc": 0.47779326827325236, + "acc_stderr": 0.004984857671187103, + "acc_norm": 0.604461262696674, + "acc_norm_stderr": 0.004879667889198489 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.508301404853129, + "acc_stderr": 0.017877498991072, + "acc_norm": 0.508301404853129, + "acc_norm_stderr": 0.017877498991072 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996793, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996793 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768081, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768081 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.028365041542564563, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.028365041542564563 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.0433643270799318, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.0433643270799318 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954942, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954942 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768817, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768817 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009798, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009798 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275805, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275805 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5321100917431193, + "acc_stderr": 0.021393071222680814, + "acc_norm": 0.5321100917431193, + "acc_norm_stderr": 0.021393071222680814 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874142, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874142 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.01972205893961806, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.01972205893961806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503796, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503796 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859936, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859936 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714874, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714874 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687578, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34159061277705344, + "acc_stderr": 0.012112391320842854, + "acc_norm": 0.34159061277705344, + "acc_norm_stderr": 0.012112391320842854 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674098, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674098 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3769889840881273, + "mc1_stderr": 0.016965517578930354, + "mc2": 0.5382838571029986, + "mc2_stderr": 0.01627147352020677 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48760330578512395, + "acc_stderr": 0.01718506973267654, + "acc_norm": 0.4935064935064935, + "acc_norm_stderr": 0.017188904359077318 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI/Alphacode-MALI-11B", + "model_sha": "19ad7329d6e9229a08aba44138442eb73d47f1cc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI/Alphacode-MALI-11B_slowtest/result_2024-05-29 02:19:00.json b/Alphacode-AI/Alphacode-MALI-11B_slowtest/result_2024-05-29 02:19:00.json new file mode 100644 index 0000000000000000000000000000000000000000..aad602ef21e7eef80da44648094820888b0b939d --- /dev/null +++ b/Alphacode-AI/Alphacode-MALI-11B_slowtest/result_2024-05-29 02:19:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41723549488054607, + "acc_stderr": 0.014409825518403082, + "acc_norm": 0.46075085324232085, + "acc_norm_stderr": 0.014566303676636578 + }, + "harness|ko_hellaswag|10": { + "acc": 0.47779326827325236, + "acc_stderr": 0.004984857671187103, + "acc_norm": 0.604461262696674, + "acc_norm_stderr": 0.004879667889198489 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.508301404853129, + "acc_stderr": 0.017877498991072, + "acc_norm": 0.508301404853129, + "acc_norm_stderr": 0.017877498991072 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996793, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996793 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768081, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768081 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.028365041542564563, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.028365041542564563 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.0433643270799318, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.0433643270799318 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954942, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954942 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768817, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768817 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009798, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009798 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275805, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275805 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5321100917431193, + "acc_stderr": 0.021393071222680814, + "acc_norm": 0.5321100917431193, + "acc_norm_stderr": 0.021393071222680814 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874142, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874142 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.01972205893961806, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.01972205893961806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503796, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503796 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859936, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859936 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714874, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714874 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687578, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34159061277705344, + "acc_stderr": 0.012112391320842854, + "acc_norm": 0.34159061277705344, + "acc_norm_stderr": 0.012112391320842854 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674098, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674098 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3769889840881273, + "mc1_stderr": 0.016965517578930354, + "mc2": 0.5382838571029986, + "mc2_stderr": 0.01627147352020677 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48760330578512395, + "acc_stderr": 0.01718506973267654, + "acc_norm": 0.4935064935064935, + "acc_norm_stderr": 0.017188904359077318 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI/Alphacode-MALI-11B_slowtest", + "model_sha": "1d16d7a6826cb2b97ea0792078c28184d136fa40", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI/Alphacode-MALI-9B/result_2024-05-20 05:09:30.json b/Alphacode-AI/Alphacode-MALI-9B/result_2024-05-20 05:09:30.json new file mode 100644 index 0000000000000000000000000000000000000000..fda1373447b1608ee9840f40fcbf918ab77f3755 --- /dev/null +++ b/Alphacode-AI/Alphacode-MALI-9B/result_2024-05-20 05:09:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4232081911262799, + "acc_stderr": 0.014438036220848027, + "acc_norm": 0.4812286689419795, + "acc_norm_stderr": 0.014601090150633966 + }, + "harness|ko_hellaswag|10": { + "acc": 0.49263095000995816, + "acc_stderr": 0.004989239462835217, + "acc_norm": 0.620991834295957, + "acc_norm_stderr": 0.004841486716855771 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5044699872286079, + "acc_stderr": 0.017879248970584356, + "acc_norm": 0.5044699872286079, + "acc_norm_stderr": 0.017879248970584356 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.032025630761017346, + "acc_norm": 0.4, + "acc_norm_stderr": 0.032025630761017346 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.028380322849077124, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.028380322849077124 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.025317649726448666, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.025317649726448666 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.02841498501970786, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.02841498501970786 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342596, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823018, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602842, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.02138786335035399, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.02138786335035399 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225882, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225882 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.01979448890002411, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.01979448890002411 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.0278891393005348, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.0278891393005348 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291521, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291521 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925305, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925305 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4963235294117647, + "acc_stderr": 0.030372015885428188, + "acc_norm": 0.4963235294117647, + "acc_norm_stderr": 0.030372015885428188 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.031996152328062855, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.031996152328062855 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614193, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614193 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.01197150729498278, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.01197150729498278 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.40606060606060607, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.40606060606060607, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3708690330477356, + "mc1_stderr": 0.016909693580248814, + "mc2": 0.5395410838294554, + "mc2_stderr": 0.01617805241690159 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4769775678866588, + "acc_stderr": 0.017172121546727634, + "acc_norm": 0.4852420306965762, + "acc_norm_stderr": 0.01718286443499856 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI/Alphacode-MALI-9B", + "model_sha": "1745ecdc892dcab1a796228ad7b6e22cd2c45222", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI/Alphallama3-8B/result_2024-05-02 12:00:21.json b/Alphacode-AI/Alphallama3-8B/result_2024-05-02 12:00:21.json new file mode 100644 index 0000000000000000000000000000000000000000..bb6c5c00f5296cc0bdeaee0fca233adba78a1b9c --- /dev/null +++ b/Alphacode-AI/Alphallama3-8B/result_2024-05-02 12:00:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36945392491467577, + "acc_stderr": 0.014104578366491894, + "acc_norm": 0.4180887372013652, + "acc_norm_stderr": 0.014413988396996083 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38836885082652856, + "acc_stderr": 0.00486383136484808, + "acc_norm": 0.4993029277036447, + "acc_norm_stderr": 0.00498977656227611 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066165, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066165 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.04453254836326466, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.04453254836326466 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.40485312899106, + "acc_stderr": 0.01755324646772025, + "acc_norm": 0.40485312899106, + "acc_norm_stderr": 0.01755324646772025 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40514469453376206, + "acc_stderr": 0.02788238379132595, + "acc_norm": 0.40514469453376206, + "acc_norm_stderr": 0.02788238379132595 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2914798206278027, + "acc_stderr": 0.030500283176545916, + "acc_norm": 0.2914798206278027, + "acc_norm_stderr": 0.030500283176545916 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.04039314978724561, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.04039314978724561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3787878787878788, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.3787878787878788, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364397, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364397 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931673, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931673 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.02432173848460235, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.02432173848460235 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.033764582465095665, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.033764582465095665 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347357, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347357 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.0327453193884235, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.0327453193884235 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3584905660377358, + "acc_stderr": 0.029514703583981755, + "acc_norm": 0.3584905660377358, + "acc_norm_stderr": 0.029514703583981755 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.04653429807913509, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.04653429807913509 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.034564257450869995, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.034564257450869995 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.0233306540545359, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.0233306540545359 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.33815028901734107, + "acc_stderr": 0.025469770149400175, + "acc_norm": 0.33815028901734107, + "acc_norm_stderr": 0.025469770149400175 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38271604938271603, + "acc_stderr": 0.027044538138402616, + "acc_norm": 0.38271604938271603, + "acc_norm_stderr": 0.027044538138402616 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.03423465100104282, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.03423465100104282 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3541284403669725, + "acc_stderr": 0.02050472901382911, + "acc_norm": 0.3541284403669725, + "acc_norm_stderr": 0.02050472901382911 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523809, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523809 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.018999707383162673, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.018999707383162673 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460994, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.22685185185185186, + "acc_stderr": 0.028561650102422276, + "acc_norm": 0.22685185185185186, + "acc_norm_stderr": 0.028561650102422276 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.02747227447323382, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.02747227447323382 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.02752963744017493, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.02752963744017493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4345991561181435, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.4345991561181435, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28226857887874834, + "acc_stderr": 0.011495852176241947, + "acc_norm": 0.28226857887874834, + "acc_norm_stderr": 0.011495852176241947 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237017, + "mc2": 0.4188327081335226, + "mc2_stderr": 0.015492466155742542 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3364817001180638, + "acc_stderr": 0.016245085294386546, + "acc_norm": 0.4805194805194805, + "acc_norm_stderr": 0.017177301992342547 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI/Alphallama3-8B", + "model_sha": "a202fe10779c9936312b61e504bc2b76dac063dc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Alphacode-AI/Alphallama3-8B_v2/result_2024-05-09 06:09:30.json b/Alphacode-AI/Alphallama3-8B_v2/result_2024-05-09 06:09:30.json new file mode 100644 index 0000000000000000000000000000000000000000..e3f08749d60b031238332fcefd777255d9beb6af --- /dev/null +++ b/Alphacode-AI/Alphallama3-8B_v2/result_2024-05-09 06:09:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3796928327645051, + "acc_stderr": 0.014182119866974872, + "acc_norm": 0.43430034129692835, + "acc_norm_stderr": 0.014484703048857359 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40061740689105757, + "acc_stderr": 0.004890221012015059, + "acc_norm": 0.5117506472814181, + "acc_norm_stderr": 0.004988403265931467 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.038110796698335316, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.038110796698335316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4329501915708812, + "acc_stderr": 0.017718469101513982, + "acc_norm": 0.4329501915708812, + "acc_norm_stderr": 0.017718469101513982 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.02817391776176288, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.02817391776176288 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928276, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.46464646464646464, + "acc_stderr": 0.03553436368828063, + "acc_norm": 0.46464646464646464, + "acc_norm_stderr": 0.03553436368828063 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.03169380235712997, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.03169380235712997 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36923076923076925, + "acc_stderr": 0.02446861524147891, + "acc_norm": 0.36923076923076925, + "acc_norm_stderr": 0.02446861524147891 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051621, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051621 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.030242233800854498, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.030242233800854498 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.0386155754625517, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.0386155754625517 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02351729433596329, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02351729433596329 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869355, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869355 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.025992472029306397, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.025992472029306397 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.038818912133343826, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.038818912133343826 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005138, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005138 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38860103626943004, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.38860103626943004, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579859, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579859 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43853211009174314, + "acc_stderr": 0.02127471307395457, + "acc_norm": 0.43853211009174314, + "acc_norm_stderr": 0.02127471307395457 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.040260970832965585, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.040260970832965585 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32516339869281047, + "acc_stderr": 0.01895088677080631, + "acc_norm": 0.32516339869281047, + "acc_norm_stderr": 0.01895088677080631 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.026244920349843014, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.026244920349843014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.028765111718046965, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.028765111718046965 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574894, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574894 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.027146271936625166, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.027146271936625166 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.027833023871399666, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.027833023871399666 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4936708860759494, + "acc_stderr": 0.032544620107678585, + "acc_norm": 0.4936708860759494, + "acc_norm_stderr": 0.032544620107678585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2790091264667536, + "acc_stderr": 0.011455208832803538, + "acc_norm": 0.2790091264667536, + "acc_norm_stderr": 0.011455208832803538 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.03374499356319355, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.03374499356319355 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.0381549430868893, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.0381549430868893 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.01620331667355969, + "mc2": 0.4613430780367225, + "mc2_stderr": 0.01569291605244769 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3600944510035419, + "acc_stderr": 0.01650368672044007, + "acc_norm": 0.4769775678866588, + "acc_norm_stderr": 0.01717212154672764 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Alphacode-AI/Alphallama3-8B_v2", + "model_sha": "33782439baaab0d356d31e007874b2e4e96c13eb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/AtAndDev/ShortKingv0.1/result_2023-09-29 19:59:47.json b/AtAndDev/ShortKingv0.1/result_2023-09-29 19:59:47.json new file mode 100644 index 0000000000000000000000000000000000000000..52ed0ae111f0fc859f3bcb9a50e444d1b486e227 --- /dev/null +++ b/AtAndDev/ShortKingv0.1/result_2023-09-29 19:59:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19112627986348124, + "acc_stderr": 0.011490055292778596, + "acc_norm": 0.24829351535836178, + "acc_norm_stderr": 0.012624912868089764 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2756423023302131, + "acc_stderr": 0.0044592414745187915, + "acc_norm": 0.29884485162318264, + "acc_norm_stderr": 0.004568161710399566 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.03424042924691582, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.03424042924691582 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.015818450894777555, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.015818450894777555 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617722, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617722 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.02977164271249123, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.02977164271249123 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.03484331592680588, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.03484331592680588 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.20257234726688103, + "acc_stderr": 0.022827317491059686, + "acc_norm": 0.20257234726688103, + "acc_norm_stderr": 0.022827317491059686 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.029376616484945644, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.029376616484945644 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924812, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924812 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.035240689515674474, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.035240689515674474 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.226890756302521, + "acc_stderr": 0.027205371538279476, + "acc_norm": 0.226890756302521, + "acc_norm_stderr": 0.027205371538279476 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2153846153846154, + "acc_stderr": 0.020843034557462878, + "acc_norm": 0.2153846153846154, + "acc_norm_stderr": 0.020843034557462878 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.13793103448275862, + "acc_stderr": 0.024261984301044565, + "acc_norm": 0.13793103448275862, + "acc_norm_stderr": 0.024261984301044565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.20967741935483872, + "acc_stderr": 0.023157879349083536, + "acc_norm": 0.20967741935483872, + "acc_norm_stderr": 0.023157879349083536 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2094017094017094, + "acc_stderr": 0.026655699653922737, + "acc_norm": 0.2094017094017094, + "acc_norm_stderr": 0.026655699653922737 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.026616482980501715, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.026616482980501715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.03036049015401464, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.03036049015401464 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173043, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173043 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.023618678310069363, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.023618678310069363 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02378858355165854, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02378858355165854 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.02925282329180363, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.02925282329180363 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322004, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322004 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21100917431192662, + "acc_stderr": 0.017493922404112648, + "acc_norm": 0.21100917431192662, + "acc_norm_stderr": 0.017493922404112648 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238106, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238106 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.024288619466046116, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.024288619466046116 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.19008264462809918, + "acc_stderr": 0.03581796951709282, + "acc_norm": 0.19008264462809918, + "acc_norm_stderr": 0.03581796951709282 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.017740899509177795, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.017740899509177795 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460997, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460997 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23016759776536314, + "acc_stderr": 0.014078339253425809, + "acc_norm": 0.23016759776536314, + "acc_norm_stderr": 0.014078339253425809 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.02767846864214471, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.02767846864214471 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3142857142857143, + "acc_stderr": 0.029719329422417468, + "acc_norm": 0.3142857142857143, + "acc_norm_stderr": 0.029719329422417468 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.02904133351059804, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.02904133351059804 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.010986307870045526, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.010986307870045526 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.03149328104507955, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.03149328104507955 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589674, + "mc2": 0.49219803033147647, + "mc2_stderr": 0.015947492879186672 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2361275088547816, + "acc_stderr": 0.014601536093324397, + "acc_norm": 0.27508854781582054, + "acc_norm_stderr": 0.015353010757952649 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "AtAndDev/ShortKingv0.1", + "model_sha": "6cd9b5bc13ee15b5e7e7cfb46477bc6a7c0b5d47", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BAAI/Infinity-Instruct-3M-0613-Mistral-7B/result_2024-08-05 19:38:23.json b/BAAI/Infinity-Instruct-3M-0613-Mistral-7B/result_2024-08-05 19:38:23.json new file mode 100644 index 0000000000000000000000000000000000000000..efe05a0f92728ccfba31194fb4f5504b5784bab3 --- /dev/null +++ b/BAAI/Infinity-Instruct-3M-0613-Mistral-7B/result_2024-08-05 19:38:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33361774744027306, + "acc_stderr": 0.013778687054176541, + "acc_norm": 0.3873720136518771, + "acc_norm_stderr": 0.014235872487909876 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37064329814777935, + "acc_stderr": 0.0048198999453424925, + "acc_norm": 0.4827723561043617, + "acc_norm_stderr": 0.004986818680313441 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.017867695938429778, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.017867695938429778 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400352, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400352 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841585, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841585 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736125, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736125 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.03076213487450049, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.03076213487450049 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.02977384701253297, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.02977384701253297 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.03479185572599661, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.03479185572599661 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.025075981767601688, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.025075981767601688 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.021414757058175502, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.021414757058175502 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.019706875804085627, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.019706875804085627 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611334, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611334 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010212, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010212 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2111731843575419, + "acc_stderr": 0.013650276794312199, + "acc_norm": 0.2111731843575419, + "acc_norm_stderr": 0.013650276794312199 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841195, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.012014142101842982, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.012014142101842982 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3182374541003672, + "mc1_stderr": 0.01630598864892059, + "mc2": 0.48659504962443784, + "mc2_stderr": 0.015814727827810682 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4380165289256198, + "acc_stderr": 0.017057753702160287, + "acc_norm": 0.4722550177095632, + "acc_norm_stderr": 0.01716386797945601 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BAAI/Infinity-Instruct-3M-0613-Mistral-7B", + "model_sha": "d8ffa6d11ef4ef2e6441326383b3d857591d822e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BAAI/Infinity-Instruct-3M-0625-Llama3-8B/result_2024-07-18 22:34:41.json b/BAAI/Infinity-Instruct-3M-0625-Llama3-8B/result_2024-07-18 22:34:41.json new file mode 100644 index 0000000000000000000000000000000000000000..9bf3a000ae09b68faea9192d7fa6946df6ad9492 --- /dev/null +++ b/BAAI/Infinity-Instruct-3M-0625-Llama3-8B/result_2024-07-18 22:34:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40017064846416384, + "acc_stderr": 0.014317197787809186, + "acc_norm": 0.44197952218430037, + "acc_norm_stderr": 0.014512682523128345 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39075881298546106, + "acc_stderr": 0.004869232758103322, + "acc_norm": 0.5248954391555467, + "acc_norm_stderr": 0.004983592410934173 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6257309941520468, + "acc_stderr": 0.03711601185389481, + "acc_norm": 0.6257309941520468, + "acc_norm_stderr": 0.03711601185389481 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041696, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5721583652618135, + "acc_stderr": 0.017692787927803724, + "acc_norm": 0.5721583652618135, + "acc_norm_stderr": 0.017692787927803724 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.042667634040995814, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.042667634040995814 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4851063829787234, + "acc_stderr": 0.03267151848924777, + "acc_norm": 0.4851063829787234, + "acc_norm_stderr": 0.03267151848924777 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.02832032583010591, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.02832032583010591 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.032422250271150053, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.032422250271150053 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.025317649726448673, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.025317649726448673 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5032258064516129, + "acc_stderr": 0.02844341422643831, + "acc_norm": 0.5032258064516129, + "acc_norm_stderr": 0.02844341422643831 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922737, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922737 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.029318203645206865, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.029318203645206865 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.037786210790920545, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.037786210790920545 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067877, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067877 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5751445086705202, + "acc_stderr": 0.026613350840261733, + "acc_norm": 0.5751445086705202, + "acc_norm_stderr": 0.026613350840261733 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5524691358024691, + "acc_stderr": 0.0276671385694227, + "acc_norm": 0.5524691358024691, + "acc_norm_stderr": 0.0276671385694227 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.03541508578884021, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.03541508578884021 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5944954128440367, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.5944954128440367, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.02858034106513829, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.02858034106513829 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.0398497965330287, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.0398497965330287 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.040633027314866725, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.040633027314866725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.02016552331390791, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.02016552331390791 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347247, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347247 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5089285714285714, + "acc_stderr": 0.04745033255489123, + "acc_norm": 0.5089285714285714, + "acc_norm_stderr": 0.04745033255489123 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.01475690648326066, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.01475690648326066 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6040816326530613, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.6040816326530613, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3559322033898305, + "acc_stderr": 0.012228645537277575, + "acc_norm": 0.3559322033898305, + "acc_norm_stderr": 0.012228645537277575 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6078431372549019, + "acc_stderr": 0.03426712349247273, + "acc_norm": 0.6078431372549019, + "acc_norm_stderr": 0.03426712349247273 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03756335775187896, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03756335775187896 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32313341493268055, + "mc1_stderr": 0.016371836286454604, + "mc2": 0.48886729185685895, + "mc2_stderr": 0.01547303599735754 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5017709563164109, + "acc_stderr": 0.017190246276231863, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.01707725413155622 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BAAI/Infinity-Instruct-3M-0625-Llama3-8B", + "model_sha": "7be7c0ff1e35c3bb781c47222da99a1724f5f1da", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BAAI/Infinity-Instruct-3M-0625-Mistral-7B/result_2024-07-18 22:34:53.json b/BAAI/Infinity-Instruct-3M-0625-Mistral-7B/result_2024-07-18 22:34:53.json new file mode 100644 index 0000000000000000000000000000000000000000..b4c2908edf14bb9eca38e33cd32be03534a37d87 --- /dev/null +++ b/BAAI/Infinity-Instruct-3M-0625-Mistral-7B/result_2024-07-18 22:34:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32593856655290104, + "acc_stderr": 0.013697432466693249, + "acc_norm": 0.3839590443686007, + "acc_norm_stderr": 0.014212444980651894 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37492531368253335, + "acc_stderr": 0.004831142570475497, + "acc_norm": 0.4856602270464051, + "acc_norm_stderr": 0.004987728900897592 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.017874698667491355, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.017874698667491355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079021, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079021 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.0354413249194797, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.0354413249194797 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.027236013946196687, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.027236013946196687 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251976, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251976 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268815, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268815 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247077, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247077 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137602, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.03919415545048409, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.03919415545048409 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5155963302752293, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.5155963302752293, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.02862930519400355, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.02862930519400355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.0198984127176359, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.0198984127176359 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611334, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611334 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696044, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22905027932960895, + "acc_stderr": 0.014054314935614548, + "acc_norm": 0.22905027932960895, + "acc_norm_stderr": 0.014054314935614548 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.02841820861940679, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.02841820861940679 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.031843998738112236, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.031843998738112236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34224250325945244, + "acc_stderr": 0.012117939998705867, + "acc_norm": 0.34224250325945244, + "acc_norm_stderr": 0.012117939998705867 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133033, + "mc2": 0.47167018965487756, + "mc2_stderr": 0.01567124236565171 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46635182998819363, + "acc_stderr": 0.017151384117131865, + "acc_norm": 0.5088547815820543, + "acc_norm_stderr": 0.017187658199336736 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BAAI/Infinity-Instruct-3M-0625-Mistral-7B", + "model_sha": "302e3ae0bcc50dae3fb69fc1b08b518398e8c407", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BAAI/Infinity-Instruct-3M-0625-Qwen2-7B/result_2024-07-16 18:06:36.json b/BAAI/Infinity-Instruct-3M-0625-Qwen2-7B/result_2024-07-16 18:06:36.json new file mode 100644 index 0000000000000000000000000000000000000000..082b184a49316edff5e3974ef3fbd190b81612d2 --- /dev/null +++ b/BAAI/Infinity-Instruct-3M-0625-Qwen2-7B/result_2024-07-16 18:06:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3822525597269625, + "acc_stderr": 0.01420045404997929, + "acc_norm": 0.4308873720136519, + "acc_norm_stderr": 0.014471133392642482 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39255128460466043, + "acc_stderr": 0.004873203269366302, + "acc_norm": 0.5232025492929695, + "acc_norm_stderr": 0.004984405935541091 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.672514619883041, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.672514619883041, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7572815533980582, + "acc_stderr": 0.04245022486384493, + "acc_norm": 0.7572815533980582, + "acc_norm_stderr": 0.04245022486384493 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6590038314176245, + "acc_stderr": 0.01695178138322332, + "acc_norm": 0.6590038314176245, + "acc_norm_stderr": 0.01695178138322332 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.6127659574468085, + "acc_stderr": 0.03184389265339525, + "acc_norm": 0.6127659574468085, + "acc_norm_stderr": 0.03184389265339525 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6463022508038585, + "acc_stderr": 0.027155208103200868, + "acc_norm": 0.6463022508038585, + "acc_norm_stderr": 0.027155208103200868 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5919282511210763, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.5919282511210763, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7373737373737373, + "acc_stderr": 0.03135305009533086, + "acc_norm": 0.7373737373737373, + "acc_norm_stderr": 0.03135305009533086 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6137931034482759, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.6137931034482759, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.031041941304059278, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.031041941304059278 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6564102564102564, + "acc_stderr": 0.024078696580635495, + "acc_norm": 0.6564102564102564, + "acc_norm_stderr": 0.024078696580635495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.7, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.7, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.035158955511656986, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.035158955511656986 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6580645161290323, + "acc_stderr": 0.026985289576552725, + "acc_norm": 0.6580645161290323, + "acc_norm_stderr": 0.026985289576552725 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.026246772946890467, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.026246772946890467 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6075471698113207, + "acc_stderr": 0.03005258057955784, + "acc_norm": 0.6075471698113207, + "acc_norm_stderr": 0.03005258057955784 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.48518518518518516, + "acc_stderr": 0.030472153249328598, + "acc_norm": 0.48518518518518516, + "acc_norm_stderr": 0.030472153249328598 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.039837983066598075, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.039837983066598075 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7512437810945274, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.7512437810945274, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.544973544973545, + "acc_stderr": 0.025646928361049395, + "acc_norm": 0.544973544973545, + "acc_norm_stderr": 0.025646928361049395 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5486111111111112, + "acc_stderr": 0.041614023984032786, + "acc_norm": 0.5486111111111112, + "acc_norm_stderr": 0.041614023984032786 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6069364161849711, + "acc_stderr": 0.026296227915613674, + "acc_norm": 0.6069364161849711, + "acc_norm_stderr": 0.026296227915613674 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6134969325153374, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.6134969325153374, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6234567901234568, + "acc_stderr": 0.026959344518747784, + "acc_norm": 0.6234567901234568, + "acc_norm_stderr": 0.026959344518747784 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6632124352331606, + "acc_stderr": 0.03410780251836184, + "acc_norm": 0.6632124352331606, + "acc_norm_stderr": 0.03410780251836184 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.04702880432049615, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.04702880432049615 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.671559633027523, + "acc_stderr": 0.020135902797298395, + "acc_norm": 0.671559633027523, + "acc_norm_stderr": 0.020135902797298395 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.5396825396825397, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.5396825396825397, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.027914055510467998, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.027914055510467998 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.7171052631578947, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.7171052631578947, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5522875816993464, + "acc_stderr": 0.02011692534742242, + "acc_norm": 0.5522875816993464, + "acc_norm_stderr": 0.02011692534742242 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4432624113475177, + "acc_stderr": 0.029634838473766006, + "acc_norm": 0.4432624113475177, + "acc_norm_stderr": 0.029634838473766006 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.03381200005643525, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.03381200005643525 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.38100558659217876, + "acc_stderr": 0.01624202883405361, + "acc_norm": 0.38100558659217876, + "acc_norm_stderr": 0.01624202883405361 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.76, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.76, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5625, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.5625, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6408163265306123, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.6408163265306123, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7130801687763713, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.7130801687763713, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41460234680573665, + "acc_stderr": 0.012582597058908284, + "acc_norm": 0.41460234680573665, + "acc_norm_stderr": 0.012582597058908284 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7009803921568627, + "acc_stderr": 0.032133257173736184, + "acc_norm": 0.7009803921568627, + "acc_norm_stderr": 0.032133257173736184 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7151515151515152, + "acc_stderr": 0.03524390844511781, + "acc_norm": 0.7151515151515152, + "acc_norm_stderr": 0.03524390844511781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32313341493268055, + "mc1_stderr": 0.016371836286454607, + "mc2": 0.4864952770687232, + "mc2_stderr": 0.015792887889536457 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6068476977567887, + "acc_stderr": 0.016793262801287078, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.016272952997019134 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BAAI/Infinity-Instruct-3M-0625-Qwen2-7B", + "model_sha": "503c24156d7682458686a7b5324f7f886e63470d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B/result_2024-07-26 19:29:20.json b/BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B/result_2024-07-26 19:29:20.json new file mode 100644 index 0000000000000000000000000000000000000000..a4330f00ccbba717a8a6981c6410a12961a89a12 --- /dev/null +++ b/BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B/result_2024-07-26 19:29:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3216723549488055, + "acc_stderr": 0.013650488084494162, + "acc_norm": 0.35409556313993173, + "acc_norm_stderr": 0.013975454122756555 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3429595698068114, + "acc_stderr": 0.004737279691036193, + "acc_norm": 0.42581159131647084, + "acc_norm_stderr": 0.004934549009645965 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.04931801994220416, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.04931801994220416 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4840357598978289, + "acc_stderr": 0.017870847506081713, + "acc_norm": 0.4840357598978289, + "acc_norm_stderr": 0.017870847506081713 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.037498507091740206, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.037498507091740206 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.032683358999363366, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.032683358999363366 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984524, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984524 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.028438677998909565, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.028438677998909565 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942638, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942638 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.03074634997572347, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.03074634997572347 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.44074074074074077, + "acc_stderr": 0.03027067115728406, + "acc_norm": 0.44074074074074077, + "acc_norm_stderr": 0.03027067115728406 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5, + "acc_stderr": 0.025751310131230234, + "acc_norm": 0.5, + "acc_norm_stderr": 0.025751310131230234 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377906, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377906 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442205, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442205 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.046570472605949646, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.046570472605949646 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.04463112720677171, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.04463112720677171 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138286, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138286 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.01988622103750187, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.01988622103750187 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.02899908090480618, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.02899908090480618 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833587, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833587 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321616, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321616 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3139664804469274, + "acc_stderr": 0.01552192393352364, + "acc_norm": 0.3139664804469274, + "acc_norm_stderr": 0.01552192393352364 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.76, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.76, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.03164209487942942, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.03164209487942942 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34876140808344197, + "acc_stderr": 0.012172035157127116, + "acc_norm": 0.34876140808344197, + "acc_norm_stderr": 0.012172035157127116 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.0346022832723917, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.0346022832723917 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32558139534883723, + "mc1_stderr": 0.016403989469907822, + "mc2": 0.49630747628492006, + "mc2_stderr": 0.0158004545260126 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4675324675324675, + "acc_stderr": 0.01715407371668286, + "acc_norm": 0.4911452184179457, + "acc_norm_stderr": 0.017187658199336736 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BAAI/Infinity-Instruct-3M-0625-Yi-1.5-9B", + "model_sha": "a42c86c61b98ca4fdf238d688fe6ea11cf414d29", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BAAI/Infinity-Instruct-7M-0729-Llama3_1-8B/result_2024-08-05 19:30:15.json b/BAAI/Infinity-Instruct-7M-0729-Llama3_1-8B/result_2024-08-05 19:30:15.json new file mode 100644 index 0000000000000000000000000000000000000000..69a226749fc7347ebab579f07c2f3a7fb7331d98 --- /dev/null +++ b/BAAI/Infinity-Instruct-7M-0729-Llama3_1-8B/result_2024-08-05 19:30:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3967576791808874, + "acc_stderr": 0.014296513020180639, + "acc_norm": 0.44880546075085326, + "acc_norm_stderr": 0.014534599585097674 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3968333001394145, + "acc_stderr": 0.004882410029935442, + "acc_norm": 0.5291774546903008, + "acc_norm_stderr": 0.004981278326428014 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.04541609446503948, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.04541609446503948 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5798212005108557, + "acc_stderr": 0.017650651363078033, + "acc_norm": 0.5798212005108557, + "acc_norm_stderr": 0.017650651363078033 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.02837327096106942, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.02837327096106942 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.034273086529999344, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.034273086529999344 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929777, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5672268907563025, + "acc_stderr": 0.032183581077426124, + "acc_norm": 0.5672268907563025, + "acc_norm_stderr": 0.032183581077426124 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.025334667080954887, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.025334667080954887 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.535483870967742, + "acc_stderr": 0.028372287797962942, + "acc_norm": 0.535483870967742, + "acc_norm_stderr": 0.028372287797962942 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.027236013946196673, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.027236013946196673 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851302, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851302 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.02813325257881564, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.02813325257881564 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7014925373134329, + "acc_stderr": 0.03235743789355043, + "acc_norm": 0.7014925373134329, + "acc_norm_stderr": 0.03235743789355043 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.0379401267469703, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.0379401267469703 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137285, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137285 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.026589231142174263, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.026589231142174263 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5679012345679012, + "acc_stderr": 0.027563010971606676, + "acc_norm": 0.5679012345679012, + "acc_norm_stderr": 0.027563010971606676 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008585, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008585 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5853211009174312, + "acc_stderr": 0.021122903208602592, + "acc_norm": 0.5853211009174312, + "acc_norm_stderr": 0.021122903208602592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5592105263157895, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.5592105263157895, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.020184583359102202, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.020184583359102202 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125145, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125145 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5, + "acc_stderr": 0.04745789978762494, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04745789978762494 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608043, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.36201117318435755, + "acc_stderr": 0.01607306735015309, + "acc_norm": 0.36201117318435755, + "acc_norm_stderr": 0.01607306735015309 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.029624663581159703, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.029624663581159703 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6244897959183674, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.6244897959183674, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6919831223628692, + "acc_stderr": 0.030052389335605695, + "acc_norm": 0.6919831223628692, + "acc_norm_stderr": 0.030052389335605695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35658409387222945, + "acc_stderr": 0.012233642989273891, + "acc_norm": 0.35658409387222945, + "acc_norm_stderr": 0.012233642989273891 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5931372549019608, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.5931372549019608, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31334149326805383, + "mc1_stderr": 0.016238065069059615, + "mc2": 0.4815711845097606, + "mc2_stderr": 0.015471768776896768 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5584415584415584, + "acc_stderr": 0.017072525875563103, + "acc_norm": 0.6174734356552538, + "acc_norm_stderr": 0.016709165387228827 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BAAI/Infinity-Instruct-7M-0729-Llama3_1-8B", + "model_sha": "0aca33fd7500a781d041e8bf7e5e3789b03f54f4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BAAI/Infinity-Instruct-7M-0729-mistral-7B/result_2024-08-05 19:37:54.json b/BAAI/Infinity-Instruct-7M-0729-mistral-7B/result_2024-08-05 19:37:54.json new file mode 100644 index 0000000000000000000000000000000000000000..3dba1be06c393a1d2dce92c3fe6f6576033f308f --- /dev/null +++ b/BAAI/Infinity-Instruct-7M-0729-mistral-7B/result_2024-08-05 19:37:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3310580204778157, + "acc_stderr": 0.013752062419817836, + "acc_norm": 0.39334470989761094, + "acc_norm_stderr": 0.014275101465693028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3717386974706234, + "acc_stderr": 0.004822814501358899, + "acc_norm": 0.4796853216490739, + "acc_norm_stderr": 0.0049856612829985774 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4929757343550447, + "acc_stderr": 0.017878199003432214, + "acc_norm": 0.4929757343550447, + "acc_norm_stderr": 0.017878199003432214 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.038786267710023616, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.038786267710023616 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.03515520728670417, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.03515520728670417 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.025230381238934833, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.025230381238934833 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.03053333843046751, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.03053333843046751 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114996, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114996 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778657, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778657 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159788, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159788 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051208, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051208 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377562, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377562 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.028568699752225868, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.028568699752225868 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041019, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041019 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779205, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.019821843688271768, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.019821843688271768 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125146, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.49107142857142855, + "acc_stderr": 0.04745033255489123, + "acc_norm": 0.49107142857142855, + "acc_norm_stderr": 0.04745033255489123 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2670391061452514, + "acc_stderr": 0.014796502622562565, + "acc_norm": 0.2670391061452514, + "acc_norm_stderr": 0.014796502622562565 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983572, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983572 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5877551020408164, + "acc_stderr": 0.0315123604467427, + "acc_norm": 0.5877551020408164, + "acc_norm_stderr": 0.0315123604467427 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3604954367666232, + "acc_stderr": 0.012263110237299238, + "acc_norm": 0.3604954367666232, + "acc_norm_stderr": 0.012263110237299238 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31211750305997554, + "mc1_stderr": 0.01622075676952091, + "mc2": 0.48014840242412327, + "mc2_stderr": 0.01579287734813472 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48760330578512395, + "acc_stderr": 0.017185069732676524, + "acc_norm": 0.5076741440377804, + "acc_norm_stderr": 0.017188329219654273 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BAAI/Infinity-Instruct-7M-0729-mistral-7B", + "model_sha": "36651591cb13346ecbde23832013e024029700fa", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/llama-2-ko-7b-it-v1.0.0/result_2023-11-15 11:34:52.json b/BM-K/llama-2-ko-7b-it-v1.0.0/result_2023-11-15 11:34:52.json new file mode 100644 index 0000000000000000000000000000000000000000..e730522c11119bda3e69fddc79ec406f1be5e942 --- /dev/null +++ b/BM-K/llama-2-ko-7b-it-v1.0.0/result_2023-11-15 11:34:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34897610921501704, + "acc_stderr": 0.0139289334613825, + "acc_norm": 0.38310580204778155, + "acc_norm_stderr": 0.01420647266167288 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3973312089225254, + "acc_stderr": 0.00488345518890897, + "acc_norm": 0.518621788488349, + "acc_norm_stderr": 0.004986319587524962 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.036602988340491624, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.036602988340491624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690879, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690879 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.35759897828863346, + "acc_stderr": 0.017139488998803288, + "acc_norm": 0.35759897828863346, + "acc_norm_stderr": 0.017139488998803288 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.029644006577009618, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.029644006577009618 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.033844291552331346, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.033844291552331346 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3729903536977492, + "acc_stderr": 0.027466610213140105, + "acc_norm": 0.3729903536977492, + "acc_norm_stderr": 0.027466610213140105 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.26905829596412556, + "acc_stderr": 0.029763779406874975, + "acc_norm": 0.26905829596412556, + "acc_norm_stderr": 0.029763779406874975 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.04142313771996664, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.04142313771996664 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419036, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419036 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277723, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277723 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.021362027725222738, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.021362027725222738 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.025560604721022877, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.025560604721022877 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432118, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432118 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.04309118709946458, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.04309118709946458 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3880597014925373, + "acc_stderr": 0.034457899643627506, + "acc_norm": 0.3880597014925373, + "acc_norm_stderr": 0.034457899643627506 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321658, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321658 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776578, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776578 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624576, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624576 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.33815028901734107, + "acc_stderr": 0.02546977014940017, + "acc_norm": 0.33815028901734107, + "acc_norm_stderr": 0.02546977014940017 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.03351953879521269, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.03351953879521269 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3549382716049383, + "acc_stderr": 0.026624152478845853, + "acc_norm": 0.3549382716049383, + "acc_norm_stderr": 0.026624152478845853 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147601, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147601 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3211009174311927, + "acc_stderr": 0.020018149772733747, + "acc_norm": 0.3211009174311927, + "acc_norm_stderr": 0.020018149772733747 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102149, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102149 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.026787453111906532, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.026787453111906532 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4628099173553719, + "acc_stderr": 0.045517111961042175, + "acc_norm": 0.4628099173553719, + "acc_norm_stderr": 0.045517111961042175 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.03842498559395268, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.03842498559395268 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.018550634502952964, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.018550634502952964 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.02971127586000534, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.02971127586000534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468638, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468638 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.027257202606114948, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.027257202606114948 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2816326530612245, + "acc_stderr": 0.0287951855742913, + "acc_norm": 0.2816326530612245, + "acc_norm_stderr": 0.0287951855742913 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35864978902953587, + "acc_stderr": 0.031219569445301843, + "acc_norm": 0.35864978902953587, + "acc_norm_stderr": 0.031219569445301843 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2907431551499348, + "acc_stderr": 0.011598062372851981, + "acc_norm": 0.2907431551499348, + "acc_norm_stderr": 0.011598062372851981 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693268, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693268 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511784, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511784 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752329, + "mc2": 0.3762518297834469, + "mc2_stderr": 0.015197001689915996 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31759149940968123, + "acc_stderr": 0.016005581876229306, + "acc_norm": 0.3990554899645809, + "acc_norm_stderr": 0.0168363772928493 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/llama-2-ko-7b-it-v1.0.0", + "model_sha": "d77fd44b31382f84fa4b8b9afd63a92ded7bde93", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.0.1/result_2023-11-07 22:19:25.json b/BM-K/mistral-7b-it-v1.0.1/result_2023-11-07 22:19:25.json new file mode 100644 index 0000000000000000000000000000000000000000..a701d6d75b0d2935ccf9c496fbfb8913e73bd1f8 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.0.1/result_2023-11-07 22:19:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35665529010238906, + "acc_stderr": 0.013998056902620196, + "acc_norm": 0.41467576791808874, + "acc_norm_stderr": 0.014397070564409174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38329018123879705, + "acc_stderr": 0.004851944170671259, + "acc_norm": 0.4987054371639116, + "acc_norm_stderr": 0.004989764686738831 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5159642401021711, + "acc_stderr": 0.017870847506081738, + "acc_norm": 0.5159642401021711, + "acc_norm_stderr": 0.017870847506081738 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.031709956060406545, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.031709956060406545 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234353, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234353 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.13725490196078433, + "acc_stderr": 0.03424084669891524, + "acc_norm": 0.13725490196078433, + "acc_norm_stderr": 0.03424084669891524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042335, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042335 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019413, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019413 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748842, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748842 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113114, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.03461199429040014, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.03461199429040014 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.0248708152510571, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.0248708152510571 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668784, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668784 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46605504587155966, + "acc_stderr": 0.021387863350354, + "acc_norm": 0.46605504587155966, + "acc_norm_stderr": 0.021387863350354 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.01982184368827177, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.01982184368827177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.027374128882631157, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.027374128882631157 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298825, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298825 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767864, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767864 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.02850145286039656, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.02850145286039656 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214936, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214936 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.01582614243950235, + "mc2": 0.453794908688158, + "mc2_stderr": 0.015317536289389658 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3600944510035419, + "acc_stderr": 0.016503686720440072, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.017185069732676514 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.0.1", + "model_sha": "710fbce5dd54e5794f1bcdf4f53d3c0ceeafb405", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.0.2/result_2023-11-08 13:07:51.json b/BM-K/mistral-7b-it-v1.0.2/result_2023-11-08 13:07:51.json new file mode 100644 index 0000000000000000000000000000000000000000..dd9170f52fe04ac3c2158d342c9ccb4cb0387369 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.0.2/result_2023-11-08 13:07:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3387372013651877, + "acc_stderr": 0.013830568927974332, + "acc_norm": 0.4035836177474403, + "acc_norm_stderr": 0.01433715891426845 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3811989643497311, + "acc_stderr": 0.00484688692976347, + "acc_norm": 0.4954192391953794, + "acc_norm_stderr": 0.004989572002196691 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.508301404853129, + "acc_stderr": 0.017877498991072, + "acc_norm": 0.508301404853129, + "acc_norm_stderr": 0.017877498991072 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771124, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771124 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685516, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685516 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042335, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042335 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173078, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683522, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683522 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.02143642095552942, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.02143642095552942 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.02852638345214264, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.02852638345214264 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777473, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777473 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412236, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412236 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.02847350127296378, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.02847350127296378 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21899441340782122, + "acc_stderr": 0.013831676687303205, + "acc_norm": 0.21899441340782122, + "acc_norm_stderr": 0.013831676687303205 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824873, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824873 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33833116036505867, + "acc_stderr": 0.012084265626344204, + "acc_norm": 0.33833116036505867, + "acc_norm_stderr": 0.012084265626344204 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394812, + "mc2": 0.4533712341088757, + "mc2_stderr": 0.015449105919584536 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3825265643447462, + "acc_stderr": 0.016709165387228806, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.017189767032130824 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.0.2", + "model_sha": "06668a57b990007d15d178c94aabd162d6af9531", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.0.3/result_2023-11-09 00:04:55.json b/BM-K/mistral-7b-it-v1.0.3/result_2023-11-09 00:04:55.json new file mode 100644 index 0000000000000000000000000000000000000000..6d334a3e6b2a76970498f6b9f193e985ac9106ec --- /dev/null +++ b/BM-K/mistral-7b-it-v1.0.3/result_2023-11-09 00:04:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3370307167235495, + "acc_stderr": 0.013813476652902272, + "acc_norm": 0.3993174061433447, + "acc_norm_stderr": 0.014312094557946707 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3791077474606652, + "acc_stderr": 0.00484173445350666, + "acc_norm": 0.4907388966341366, + "acc_norm_stderr": 0.00498892541052277 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.04944901092973779, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.04944901092973779 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.51213282247765, + "acc_stderr": 0.017874698667491334, + "acc_norm": 0.51213282247765, + "acc_norm_stderr": 0.017874698667491334 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562786, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562786 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094785, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094785 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684973, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684973 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618554, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618554 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608466, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608466 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.034288678487786564, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.034288678487786564 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817729, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817729 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602842, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548914, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548914 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.027807490044276198, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.027807490044276198 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995093, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995093 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523811, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523811 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412243, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412243 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639875, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639875 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3213820078226858, + "acc_stderr": 0.01192758135226508, + "acc_norm": 0.3213820078226858, + "acc_norm_stderr": 0.01192758135226508 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03454236585380611, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03454236585380611 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.038783721137112745, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.038783721137112745 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608753, + "mc2": 0.44405801770483816, + "mc2_stderr": 0.015315267499738446 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32585596221959856, + "acc_stderr": 0.016114023894800333, + "acc_norm": 0.46162927981109797, + "acc_norm_stderr": 0.017139660221845564 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.0.3", + "model_sha": "5d368e894e2091bd003ae65710009016b7bafaa0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.0/result_2023-11-07 08:10:27.json b/BM-K/mistral-7b-it-v1.0/result_2023-11-07 08:10:27.json new file mode 100644 index 0000000000000000000000000000000000000000..1e38491cc801cec5d6f7063a3dc7a867ad00a108 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.0/result_2023-11-07 08:10:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35409556313993173, + "acc_stderr": 0.013975454122756562, + "acc_norm": 0.4087030716723549, + "acc_norm_stderr": 0.014365750345427 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3875721967735511, + "acc_stderr": 0.004862003566798545, + "acc_norm": 0.504282015534754, + "acc_norm_stderr": 0.004989598426249547 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.51213282247765, + "acc_stderr": 0.01787469866749133, + "acc_norm": 0.51213282247765, + "acc_norm_stderr": 0.01787469866749133 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685516, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685516 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.028396770444111288, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.028396770444111288 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.03191863374478465, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.03191863374478465 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.02834378725054062, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.02834378725054062 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688173, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688173 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.03032594578928611, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.03032594578928611 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804723, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804723 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817729, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817729 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332786, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332786 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.026882643434022895, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.026882643434022895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48256880733944957, + "acc_stderr": 0.02142429187185315, + "acc_norm": 0.48256880733944957, + "acc_norm_stderr": 0.02142429187185315 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523811, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523811 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215934, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215934 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291518, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291518 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983576, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983576 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32985658409387225, + "acc_stderr": 0.012008129938540472, + "acc_norm": 0.32985658409387225, + "acc_norm_stderr": 0.012008129938540472 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608742, + "mc2": 0.4447858809482175, + "mc2_stderr": 0.015211057250300537 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3860684769775679, + "acc_stderr": 0.016738130760321743, + "acc_norm": 0.5100354191263282, + "acc_norm_stderr": 0.017186891286894067 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.0", + "model_sha": "f5bfb9dc4f4dd8b64d45c9a158e3982959b18035", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.1.0/result_2023-11-13 23:21:20.json b/BM-K/mistral-7b-it-v1.1.0/result_2023-11-13 23:21:20.json new file mode 100644 index 0000000000000000000000000000000000000000..24fd7efe9d273c171df8874fbc06202cec653931 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.1.0/result_2023-11-13 23:21:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36177474402730375, + "acc_stderr": 0.014041957945038078, + "acc_norm": 0.4232081911262799, + "acc_norm_stderr": 0.01443803622084802 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37223660625373434, + "acc_stderr": 0.004824130528590597, + "acc_norm": 0.47610037841067515, + "acc_norm_stderr": 0.004984077906216103 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4840357598978289, + "acc_stderr": 0.017870847506081727, + "acc_norm": 0.4840357598978289, + "acc_norm_stderr": 0.017870847506081727 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.028043399858210628, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.028043399858210628 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330315, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330315 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828063, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828063 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767762, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767762 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.031937057262002924, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.031937057262002924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123935, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123935 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.0368122963339432, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.0368122963339432 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596433, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596433 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960719, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960719 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.026756255129663765, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.026756255129663765 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005138, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005138 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.03602573571288441, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.03602573571288441 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46422018348623856, + "acc_stderr": 0.0213823647757019, + "acc_norm": 0.46422018348623856, + "acc_norm_stderr": 0.0213823647757019 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147125, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147125 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.01933314202079706, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.01933314202079706 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469417, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469417 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961464, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961464 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.029674288281311183, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.029674288281311183 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.32653061224489793, + "acc_stderr": 0.030021056238440303, + "acc_norm": 0.32653061224489793, + "acc_norm_stderr": 0.030021056238440303 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5274261603375527, + "acc_stderr": 0.03249822718301303, + "acc_norm": 0.5274261603375527, + "acc_norm_stderr": 0.03249822718301303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29791395045632335, + "acc_stderr": 0.01168071734040005, + "acc_norm": 0.29791395045632335, + "acc_norm_stderr": 0.01168071734040005 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.01568092936402462, + "mc2": 0.4553516695896828, + "mc2_stderr": 0.01619950826163877 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3069657615112161, + "acc_stderr": 0.015857588095362814, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.016366945603281273 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.1.0", + "model_sha": "7cf13a6ab9a4f0231b168a8102d784fc6c22510a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.2.0/result_2023-11-14 04:32:36.json b/BM-K/mistral-7b-it-v1.2.0/result_2023-11-14 04:32:36.json new file mode 100644 index 0000000000000000000000000000000000000000..516b11cd189154e88b6dea143165cb4df98d8922 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.2.0/result_2023-11-14 04:32:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33532423208191126, + "acc_stderr": 0.013796182947785564, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.014194389086685251 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3736307508464449, + "acc_stderr": 0.00482778628907485, + "acc_norm": 0.4671380203146783, + "acc_norm_stderr": 0.004978992721242828 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.42911877394636017, + "acc_stderr": 0.017699388483126795, + "acc_norm": 0.42911877394636017, + "acc_norm_stderr": 0.017699388483126795 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42765273311897106, + "acc_stderr": 0.02809924077580955, + "acc_norm": 0.42765273311897106, + "acc_norm_stderr": 0.02809924077580955 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929187, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929187 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.0355580405176393, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.0355580405176393 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370332, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370332 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.040233822736177476, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.040233822736177476 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.02515826601686856, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.02515826601686856 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.031804252043841, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.031804252043841 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286102, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286102 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114975, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114975 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488585, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488585 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.0387410285981808, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.0387410285981808 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38271604938271603, + "acc_stderr": 0.027044538138402616, + "acc_norm": 0.38271604938271603, + "acc_norm_stderr": 0.027044538138402616 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.01933314202079706, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.01933314202079706 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534778, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534778 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031225, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031225 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.011787910251664587, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.011787910251664587 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627877, + "mc2": 0.4571128110826051, + "mc2_stderr": 0.0163313732350845 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30460448642266824, + "acc_stderr": 0.015823367273129395, + "acc_norm": 0.3293978748524203, + "acc_norm_stderr": 0.016158746868147146 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.2.0", + "model_sha": "d233f62c06362a1008b268f72e919bd6eaf96166", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.3.0/result_2023-11-15 15:01:55.json b/BM-K/mistral-7b-it-v1.3.0/result_2023-11-15 15:01:55.json new file mode 100644 index 0000000000000000000000000000000000000000..2c9ee20c57981fc28f50a832431111814655d8a5 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.3.0/result_2023-11-15 15:01:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145687, + "acc_norm": 0.3873720136518771, + "acc_norm_stderr": 0.014235872487909872 + }, + "harness|ko_hellaswag|10": { + "acc": 0.382194781915953, + "acc_stderr": 0.004849306998727764, + "acc_norm": 0.4986058554072894, + "acc_norm_stderr": 0.004989762014739187 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47126436781609193, + "acc_stderr": 0.01785041079438017, + "acc_norm": 0.47126436781609193, + "acc_norm_stderr": 0.01785041079438017 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923325, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923325 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.035534363688280626, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.035534363688280626 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.034953345821629324, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.034953345821629324 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836914, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836914 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473075, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817729, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817729 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.02690290045866664, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.02690290045866664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442203, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442203 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556054, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556054 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762637, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762637 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30837988826815643, + "acc_stderr": 0.01544571691099888, + "acc_norm": 0.30837988826815643, + "acc_norm_stderr": 0.01544571691099888 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776125, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776125 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687578, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.510548523206751, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.510548523206751, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897634, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897634 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608742, + "mc2": 0.4538855040890016, + "mc2_stderr": 0.015473472871845475 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5371900826446281, + "acc_stderr": 0.017142736117643304, + "acc_norm": 0.5796930342384888, + "acc_norm_stderr": 0.01697059828117771 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.3.0", + "model_sha": "995ca1c4360613685103c646f290b0062770ec7b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.5.0/result_2023-11-16 23:47:33.json b/BM-K/mistral-7b-it-v1.5.0/result_2023-11-16 23:47:33.json new file mode 100644 index 0000000000000000000000000000000000000000..cd5e57126b5dded8fd760c56a4e69784828a3510 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.5.0/result_2023-11-16 23:47:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.01384746051889298, + "acc_norm": 0.37457337883959047, + "acc_norm_stderr": 0.014144193471893446 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3906592312288389, + "acc_stderr": 0.004869010152280755, + "acc_norm": 0.5010953993228441, + "acc_norm_stderr": 0.004989769436956922 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.01783579880629064, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.01783579880629064 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984548, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984548 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923325, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923325 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649038, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649038 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942638, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942638 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796183, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028417, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.026882643434022895, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.026882643434022895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347666, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347666 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.021414757058175506, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.021414757058175506 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852387, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852387 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.01959402113657745, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.01959402113657745 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347233, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347233 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010078, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010078 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.03171752824062665, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.03171752824062665 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452225, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452225 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674098, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674098 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087307, + "mc2": 0.4312897833619012, + "mc2_stderr": 0.015536038118672747 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4037780401416765, + "acc_stderr": 0.01686903154029863, + "acc_norm": 0.42502951593860683, + "acc_norm_stderr": 0.016996016308362887 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.5.0", + "model_sha": "59b094a8741371d220147b53e7536af0fcf27d2e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.6.0/result_2023-11-19 13:15:31.json b/BM-K/mistral-7b-it-v1.6.0/result_2023-11-19 13:15:31.json new file mode 100644 index 0000000000000000000000000000000000000000..7070772540166679df37f733b6c267dedbd89e59 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.6.0/result_2023-11-19 13:15:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.01384746051889298, + "acc_norm": 0.3728668941979522, + "acc_norm_stderr": 0.014131176760131163 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3867755427205736, + "acc_stderr": 0.00486016207633099, + "acc_norm": 0.4992033459470225, + "acc_norm_stderr": 0.0049897750778356495 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47509578544061304, + "acc_stderr": 0.017857770704901018, + "acc_norm": 0.47509578544061304, + "acc_norm_stderr": 0.017857770704901018 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085335, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085335 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657553, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657553 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849724, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849724 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173095, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173095 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983045, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983045 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752056, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379414, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379414 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270699, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270699 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47522935779816516, + "acc_stderr": 0.021410999753635918, + "acc_norm": 0.47522935779816516, + "acc_norm_stderr": 0.021410999753635918 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.043758884927270605, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.043758884927270605 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033522, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033522 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094597, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094597 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.04327040932578728, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.04327040932578728 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2860335195530726, + "acc_stderr": 0.015113972129062125, + "acc_norm": 0.2860335195530726, + "acc_norm_stderr": 0.015113972129062125 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.02841820861940679, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.02841820861940679 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.01206708307945223, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.01206708307945223 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.034107853389047184, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.034107853389047184 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608742, + "mc2": 0.4436091279270421, + "mc2_stderr": 0.015994798162179236 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5194805194805194, + "acc_stderr": 0.01717730199234254, + "acc_norm": 0.5430932703659976, + "acc_norm_stderr": 0.017126389093086777 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.6.0", + "model_sha": "b149b065fe748591389f5ce440e424a118880e26", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.7.0/result_2023-11-20 09:22:14.json b/BM-K/mistral-7b-it-v1.7.0/result_2023-11-20 09:22:14.json new file mode 100644 index 0000000000000000000000000000000000000000..f37243c7fe16a115b6d13aab9e4168d9c1042971 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.7.0/result_2023-11-20 09:22:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.01388881628678211, + "acc_norm": 0.386518771331058, + "acc_norm_stderr": 0.014230084761910474 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3873730332603067, + "acc_stderr": 0.004861544478451848, + "acc_norm": 0.5089623580959968, + "acc_norm_stderr": 0.004988979750014428 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.017869330154003705, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.017869330154003705 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956278, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956278 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828063, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828063 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929778, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.0345905881588323, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.0345905881588323 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748842, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748842 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948482, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948482 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817729, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817729 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332783, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.026882643434022885, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.026882643434022885 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48990825688073397, + "acc_stderr": 0.021432956203453316, + "acc_norm": 0.48990825688073397, + "acc_norm_stderr": 0.021432956203453316 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.02862930519400355, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.02862930519400355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.01980828131744984, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.01980828131744984 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.03385177976044811, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.03385177976044811 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220517, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220517 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824862, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824862 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33116036505867014, + "acc_stderr": 0.012020128195985774, + "acc_norm": 0.33116036505867014, + "acc_norm_stderr": 0.012020128195985774 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133033, + "mc2": 0.4714881280704747, + "mc2_stderr": 0.01554412183162796 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5230224321133412, + "acc_stderr": 0.017172121546727634, + "acc_norm": 0.577331759149941, + "acc_norm_stderr": 0.016983506079577604 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.7.0", + "model_sha": "f62174ae285bf46cc453305f1e0b76899a8bcf82", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.7.1/result_2023-11-21 00:39:48.json b/BM-K/mistral-7b-it-v1.7.1/result_2023-11-21 00:39:48.json new file mode 100644 index 0000000000000000000000000000000000000000..15007c601f1e8b8413cca3350d78e53d76fa95ab --- /dev/null +++ b/BM-K/mistral-7b-it-v1.7.1/result_2023-11-21 00:39:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145685, + "acc_norm": 0.39590443686006827, + "acc_norm_stderr": 0.014291228393536587 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38717386974706236, + "acc_stderr": 0.00486108453408704, + "acc_norm": 0.5087631945827524, + "acc_norm_stderr": 0.004989014986235632 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.048257293373563895, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.048257293373563895 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4763729246487867, + "acc_stderr": 0.017859989765176453, + "acc_norm": 0.4763729246487867, + "acc_norm_stderr": 0.017859989765176453 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.032232762667117124, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.032232762667117124 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006938, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006938 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126174, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126174 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999998, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999998 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113114, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699954, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699954 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401147, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401147 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611306, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611306 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252336, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252336 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095285, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095285 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411127, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411127 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.01197150729498278, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.01197150729498278 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.0158663464013843, + "mc2": 0.46609022121434857, + "mc2_stderr": 0.015577378664296664 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5112160566706021, + "acc_stderr": 0.01718602846948929, + "acc_norm": 0.564344746162928, + "acc_norm_stderr": 0.017047415229476323 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.7.1", + "model_sha": "d9b8182352893418c91400683819f41d7be1e292", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.7.2/result_2023-11-30 06:16:02.json b/BM-K/mistral-7b-it-v1.7.2/result_2023-11-30 06:16:02.json new file mode 100644 index 0000000000000000000000000000000000000000..b1564b599ed7912f00cbf8a3b2326986c647a44e --- /dev/null +++ b/BM-K/mistral-7b-it-v1.7.2/result_2023-11-30 06:16:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.01407722310847014, + "acc_norm": 0.41467576791808874, + "acc_norm_stderr": 0.014397070564409175 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3996215893248357, + "acc_stderr": 0.004888194985997395, + "acc_norm": 0.5103565026887075, + "acc_norm_stderr": 0.004988710917169331 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.038200425866029654, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.038200425866029654 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5134099616858238, + "acc_stderr": 0.017873531736510354, + "acc_norm": 0.5134099616858238, + "acc_norm_stderr": 0.017873531736510354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.02839089739686352, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.02839089739686352 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.033408675019233246, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.033408675019233246 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.03515520728670417, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.03515520728670417 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653315, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674078, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113115, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113115 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0242785680243077, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0242785680243077 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.03919415545048409, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.03919415545048409 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635464, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635464 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.019861155193829153, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.019861155193829153 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650137, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650137 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802749, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29608938547486036, + "acc_stderr": 0.015268677317602298, + "acc_norm": 0.29608938547486036, + "acc_norm_stderr": 0.015268677317602298 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3435462842242503, + "acc_stderr": 0.012128961174190156, + "acc_norm": 0.3435462842242503, + "acc_norm_stderr": 0.012128961174190156 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875835, + "mc2": 0.4265196372341518, + "mc2_stderr": 0.015607268833983732 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3707201889020071, + "acc_stderr": 0.0166058012892126, + "acc_norm": 0.3955135773317591, + "acc_norm_stderr": 0.01681081590220604 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.7.2", + "model_sha": "dbbe8fc186f5371e9ca608b9650e387b20de7ae3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-7b-it-v1.7.3/result_2023-12-04 23:53:23.json b/BM-K/mistral-7b-it-v1.7.3/result_2023-12-04 23:53:23.json new file mode 100644 index 0000000000000000000000000000000000000000..33826632a9c63e1d93747609071b74609b112975 --- /dev/null +++ b/BM-K/mistral-7b-it-v1.7.3/result_2023-12-04 23:53:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34215017064846415, + "acc_stderr": 0.013864152159177278, + "acc_norm": 0.3771331058020478, + "acc_norm_stderr": 0.014163366896192598 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37880900219079866, + "acc_stderr": 0.00484099059349469, + "acc_norm": 0.4870543716391157, + "acc_norm_stderr": 0.004988108663179766 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4623243933588761, + "acc_stderr": 0.017829131764287198, + "acc_norm": 0.4623243933588761, + "acc_norm_stderr": 0.017829131764287198 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.03097669299853441, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.03097669299853441 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.02821768355665231, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.02821768355665231 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929186, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929186 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828061, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370333, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3871794871794872, + "acc_stderr": 0.02469721693087895, + "acc_norm": 0.3871794871794872, + "acc_norm_stderr": 0.02469721693087895 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.028040981380761543, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.028040981380761543 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.03023638994217309, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.03023638994217309 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702862, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702862 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228405, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228405 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.035676037996391706, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.035676037996391706 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.02375292871211214, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.02375292871211214 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.026756255129663765, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.026756255129663765 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.02740204204026994, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.02740204204026994 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214334, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.04343525428949098, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.04343525428949098 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142628, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142628 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.01957695312208883, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.01957695312208883 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22346368715083798, + "acc_stderr": 0.013932068638579771, + "acc_norm": 0.22346368715083798, + "acc_norm_stderr": 0.013932068638579771 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.02714627193662517, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.02714627193662517 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235926, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235926 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3324641460234681, + "acc_stderr": 0.012032022332260514, + "acc_norm": 0.3324641460234681, + "acc_norm_stderr": 0.012032022332260514 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.0154610276272536, + "mc2": 0.42128232388140774, + "mc2_stderr": 0.015666521111746597 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45336481700118064, + "acc_stderr": 0.017115418225226872, + "acc_norm": 0.49940968122786306, + "acc_norm_stderr": 0.017190342123448662 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-7b-it-v1.7.3", + "model_sha": "485019f55449b1adffd92e017a85bfe922cac2dd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-ko-7b-it-v2.0.0/result_2023-12-23 00:56:47.json b/BM-K/mistral-ko-7b-it-v2.0.0/result_2023-12-23 00:56:47.json new file mode 100644 index 0000000000000000000000000000000000000000..4bde63b32c80f56968c7065f2cb13f71c188c9ee --- /dev/null +++ b/BM-K/mistral-ko-7b-it-v2.0.0/result_2023-12-23 00:56:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34812286689419797, + "acc_stderr": 0.013921008595179344, + "acc_norm": 0.39078498293515357, + "acc_norm_stderr": 0.014258563880513778 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37970523800039835, + "acc_stderr": 0.0048432163250902655, + "acc_norm": 0.4901414060944035, + "acc_norm_stderr": 0.004988811384747425 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.017852981266633955, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.017852981266633955 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562786, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562786 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923325, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923325 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948485, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129274, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129274 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.02686462436675666, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.02686462436675666 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470867, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470867 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214334, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7603305785123967, + "acc_stderr": 0.03896878985070417, + "acc_norm": 0.7603305785123967, + "acc_norm_stderr": 0.03896878985070417 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611317, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3039106145251397, + "acc_stderr": 0.015382845587584518, + "acc_norm": 0.3039106145251397, + "acc_norm_stderr": 0.015382845587584518 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824866, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824866 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.031717528240626645, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.031717528240626645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.011989936640666544, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.011989936640666544 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674098, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674098 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522512, + "mc2": 0.43306568977437526, + "mc2_stderr": 0.015345933860590263 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4935064935064935, + "acc_stderr": 0.01718890435907731, + "acc_norm": 0.5348288075560803, + "acc_norm_stderr": 0.017148598015747425 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-ko-7b-it-v2.0.0", + "model_sha": "344b5f989128dc9f7a1bb3a1e8bbfe8b50a4159d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/mistral-ko-7b-it-v2.0.1/result_2023-12-26 12:34:11.json b/BM-K/mistral-ko-7b-it-v2.0.1/result_2023-12-26 12:34:11.json new file mode 100644 index 0000000000000000000000000000000000000000..5921661ce1bafc157205e5758972e8eeea671cef --- /dev/null +++ b/BM-K/mistral-ko-7b-it-v2.0.1/result_2023-12-26 12:34:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3395904436860068, + "acc_stderr": 0.013839039762820164, + "acc_norm": 0.378839590443686, + "acc_norm_stderr": 0.014175915490000322 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37890858394742083, + "acc_stderr": 0.004841238763529383, + "acc_norm": 0.4918342959569807, + "acc_norm_stderr": 0.004989115942570063 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47509578544061304, + "acc_stderr": 0.017857770704901018, + "acc_norm": 0.47509578544061304, + "acc_norm_stderr": 0.017857770704901018 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255099, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255099 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006938, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006938 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752045, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752045 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413317, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413317 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48623853211009177, + "acc_stderr": 0.02142920208987408, + "acc_norm": 0.48623853211009177, + "acc_norm_stderr": 0.02142920208987408 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138286, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138286 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401154, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401154 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29608938547486036, + "acc_stderr": 0.01526867731760228, + "acc_norm": 0.29608938547486036, + "acc_norm_stderr": 0.01526867731760228 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877746, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687578, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.01201414210184298, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.01201414210184298 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216740976, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216740976 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219376, + "mc2": 0.44088319088488914, + "mc2_stderr": 0.01533044885511757 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5277449822904369, + "acc_stderr": 0.01716386797945602, + "acc_norm": 0.5749704840613932, + "acc_norm_stderr": 0.016996016308362887 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/mistral-ko-7b-it-v2.0.1", + "model_sha": "5482aa57e129559221c5109620df556b75e70f3a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.0/result_2023-10-06 06:41:38.json b/BM-K/polyglot-ko-1.3b-it-v1.0/result_2023-10-06 06:41:38.json new file mode 100644 index 0000000000000000000000000000000000000000..f44ad05440599fd4ef4e477f7afa330315b0b06d --- /dev/null +++ b/BM-K/polyglot-ko-1.3b-it-v1.0/result_2023-10-06 06:41:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.22696245733788395, + "acc_stderr": 0.012240491536132861, + "acc_norm": 0.2773037542662116, + "acc_norm_stderr": 0.013082095839059374 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33578968333001397, + "acc_stderr": 0.004713006072807706, + "acc_norm": 0.41585341565425216, + "acc_norm_stderr": 0.0049186120989440285 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26309067688378035, + "acc_stderr": 0.015745497169049057, + "acc_norm": 0.26309067688378035, + "acc_norm_stderr": 0.015745497169049057 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.037125378336148665, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.037125378336148665 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.251063829787234, + "acc_stderr": 0.028346963777162452, + "acc_norm": 0.251063829787234, + "acc_norm_stderr": 0.028346963777162452 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818784, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818784 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306085, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306085 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.031544498882702866, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.031544498882702866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149351, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149351 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24789915966386555, + "acc_stderr": 0.028047967224176892, + "acc_norm": 0.24789915966386555, + "acc_norm_stderr": 0.028047967224176892 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.020932445774463185, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.020932445774463185 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.031447125816782426, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.031447125816782426 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885193, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.02812096650391441, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.02812096650391441 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.026749899771241238, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.026749899771241238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.04172343038705383, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.04172343038705383 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114993, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114993 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.03076944496729602, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.03076944496729602 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.19653179190751446, + "acc_stderr": 0.03029957466478815, + "acc_norm": 0.19653179190751446, + "acc_norm_stderr": 0.03029957466478815 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3374233128834356, + "acc_stderr": 0.03714908409935574, + "acc_norm": 0.3374233128834356, + "acc_norm_stderr": 0.03714908409935574 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.02465968518596728, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.02465968518596728 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860688, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860688 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.15789473684210525, + "acc_stderr": 0.03430265978485698, + "acc_norm": 0.15789473684210525, + "acc_norm_stderr": 0.03430265978485698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24036697247706423, + "acc_stderr": 0.01832060732096407, + "acc_norm": 0.24036697247706423, + "acc_norm_stderr": 0.01832060732096407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604673, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604673 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.02417084087934102, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.02417084087934102 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.23140495867768596, + "acc_stderr": 0.03849856098794088, + "acc_norm": 0.23140495867768596, + "acc_norm_stderr": 0.03849856098794088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882924, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.01755581809132226, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.01755581809132226 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.026244920349843014, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.026244920349843014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.033981108902946366, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.033981108902946366 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.14, + "acc_stderr": 0.03487350880197771, + "acc_norm": 0.14, + "acc_norm_stderr": 0.03487350880197771 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.02824568739146292, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.02824568739146292 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.028263889943784586, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.028263889943784586 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658335, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23728813559322035, + "acc_stderr": 0.01086543669078027, + "acc_norm": 0.23728813559322035, + "acc_norm_stderr": 0.01086543669078027 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02933116229425173, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02933116229425173 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.031922715695483, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.031922715695483 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715008, + "mc2": 0.41338491158026774, + "mc2_stderr": 0.01512108388775634 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3116883116883117, + "acc_stderr": 0.015924567607358324, + "acc_norm": 0.3919716646989374, + "acc_norm_stderr": 0.016784332119424077 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/polyglot-ko-1.3b-it-v1.0", + "model_sha": "2f5b0dfed443e3a89c13a13b48d6fe6838c86e67", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.1/result_2023-10-06 07:23:29.json b/BM-K/polyglot-ko-1.3b-it-v1.1/result_2023-10-06 07:23:29.json new file mode 100644 index 0000000000000000000000000000000000000000..68b6b12533d10aa6245d0830b35d2862378720a7 --- /dev/null +++ b/BM-K/polyglot-ko-1.3b-it-v1.1/result_2023-10-06 07:23:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2363481228668942, + "acc_stderr": 0.012414960524301818, + "acc_norm": 0.2841296928327645, + "acc_norm_stderr": 0.013179442447653887 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3379804819757021, + "acc_stderr": 0.004720551323547123, + "acc_norm": 0.4192391953794065, + "acc_norm_stderr": 0.004924261467934422 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457923, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457923 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24521072796934865, + "acc_stderr": 0.01538435228454394, + "acc_norm": 0.24521072796934865, + "acc_norm_stderr": 0.01538435228454394 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.037498507091740234, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.037498507091740234 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.028504856470514192, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.028504856470514192 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2289156626506024, + "acc_stderr": 0.03270745277352477, + "acc_norm": 0.2289156626506024, + "acc_norm_stderr": 0.03270745277352477 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.026003301117885142, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.026003301117885142 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.037276735755969174, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.037276735755969174 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.032087795587867514, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.032087795587867514 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727771, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727771 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.02127839386358628, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.02127839386358628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.02564938106302926, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.02564938106302926 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.25213675213675213, + "acc_stderr": 0.02844796547623101, + "acc_norm": 0.25213675213675213, + "acc_norm_stderr": 0.02844796547623101 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.02674989977124124, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.02674989977124124 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072773, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072773 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1791907514450867, + "acc_stderr": 0.0292425130590633, + "acc_norm": 0.1791907514450867, + "acc_norm_stderr": 0.0292425130590633 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194974, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194974 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2398843930635838, + "acc_stderr": 0.022989592543123567, + "acc_norm": 0.2398843930635838, + "acc_norm_stderr": 0.022989592543123567 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2993827160493827, + "acc_stderr": 0.02548311560119547, + "acc_norm": 0.2993827160493827, + "acc_norm_stderr": 0.02548311560119547 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916647, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916647 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.03646758875075566, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.03646758875075566 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796617, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604672, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604672 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.14, + "acc_stderr": 0.03487350880197771, + "acc_norm": 0.14, + "acc_norm_stderr": 0.03487350880197771 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.040261875275912046, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.040261875275912046 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.017848089574913222, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.017848089574913222 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.03381200005643525, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.03381200005643525 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.029674288281311183, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.029674288281311183 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.02916273841024977, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.02916273841024977 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24050632911392406, + "acc_stderr": 0.02782078198114968, + "acc_norm": 0.24050632911392406, + "acc_norm_stderr": 0.02782078198114968 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23989569752281617, + "acc_stderr": 0.010906282617981653, + "acc_norm": 0.23989569752281617, + "acc_norm_stderr": 0.010906282617981653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591362, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591362 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.031922715695482995, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.031922715695482995 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.01505186948671501, + "mc2": 0.4174341547322483, + "mc2_stderr": 0.015183101828823979 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31286894923258557, + "acc_stderr": 0.015941010118302658, + "acc_norm": 0.3872491145218418, + "acc_norm_stderr": 0.016747577991642785 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/polyglot-ko-1.3b-it-v1.1", + "model_sha": "78f227625af9b7013b69de4ef2a203ac71bdda5b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.2/result_2023-10-09 06:14:19.json b/BM-K/polyglot-ko-1.3b-it-v1.2/result_2023-10-09 06:14:19.json new file mode 100644 index 0000000000000000000000000000000000000000..d806392165ae8dee63a4c6bb4d96b8c71e37d9b8 --- /dev/null +++ b/BM-K/polyglot-ko-1.3b-it-v1.2/result_2023-10-09 06:14:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2551194539249147, + "acc_stderr": 0.012739038695202109, + "acc_norm": 0.30119453924914674, + "acc_norm_stderr": 0.01340674176784762 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3379804819757021, + "acc_stderr": 0.0047205513235471196, + "acc_norm": 0.4176458872734515, + "acc_norm_stderr": 0.00492163264510238 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.29118773946360155, + "acc_stderr": 0.016246087069701393, + "acc_norm": 0.29118773946360155, + "acc_norm_stderr": 0.016246087069701393 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.028659179374292326, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.028659179374292326 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944967, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944967 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2540192926045016, + "acc_stderr": 0.02472386150477169, + "acc_norm": 0.2540192926045016, + "acc_norm_stderr": 0.02472386150477169 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.031493846709941306, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.031493846709941306 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287414, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287414 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932026, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932026 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121626, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121626 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.25384615384615383, + "acc_stderr": 0.022066054378726257, + "acc_norm": 0.25384615384615383, + "acc_norm_stderr": 0.022066054378726257 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243838, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243838 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2903225806451613, + "acc_stderr": 0.025822106119415898, + "acc_norm": 0.2903225806451613, + "acc_norm_stderr": 0.025822106119415898 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23504273504273504, + "acc_stderr": 0.027778835904935437, + "acc_norm": 0.23504273504273504, + "acc_norm_stderr": 0.027778835904935437 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.027611163402399715, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.027611163402399715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.039559328617958335, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.039559328617958335 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275815, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275815 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.033742355504256936, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.033742355504256936 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.208955223880597, + "acc_stderr": 0.028748298931728665, + "acc_norm": 0.208955223880597, + "acc_norm_stderr": 0.028748298931728665 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.031265112061730424, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.031265112061730424 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.024105712607754307, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.024105712607754307 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.025171041915309684, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.025171041915309684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29533678756476683, + "acc_stderr": 0.0329229663915514, + "acc_norm": 0.29533678756476683, + "acc_norm_stderr": 0.0329229663915514 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.018272575810231863, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.018272575810231863 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.025058503316958157, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.025058503316958157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2231404958677686, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.2231404958677686, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17105263157894737, + "acc_stderr": 0.0306436070716771, + "acc_norm": 0.17105263157894737, + "acc_norm_stderr": 0.0306436070716771 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26633986928104575, + "acc_stderr": 0.01788318813466719, + "acc_norm": 0.26633986928104575, + "acc_norm_stderr": 0.01788318813466719 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190735, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190735 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.02896370257079101, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.02896370257079101 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2670391061452514, + "acc_stderr": 0.014796502622562544, + "acc_norm": 0.2670391061452514, + "acc_norm_stderr": 0.014796502622562544 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2536764705882353, + "acc_stderr": 0.026431329870789513, + "acc_norm": 0.2536764705882353, + "acc_norm_stderr": 0.026431329870789513 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.31020408163265306, + "acc_stderr": 0.029613459872484378, + "acc_norm": 0.31020408163265306, + "acc_norm_stderr": 0.029613459872484378 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23598435462842243, + "acc_stderr": 0.010844802669662682, + "acc_norm": 0.23598435462842243, + "acc_norm_stderr": 0.010844802669662682 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.031145570659486782, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.031145570659486782 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2252141982864137, + "mc1_stderr": 0.01462324076802348, + "mc2": 0.4080616788903193, + "mc2_stderr": 0.015242253889585933 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31759149940968123, + "acc_stderr": 0.016005581876229306, + "acc_norm": 0.40731995277449823, + "acc_norm_stderr": 0.01689245669519127 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/polyglot-ko-1.3b-it-v1.2", + "model_sha": "d1a6abed1624c40b91b5df3acb5e245e281adc18", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.3/result_2023-10-09 06:23:09.json b/BM-K/polyglot-ko-1.3b-it-v1.3/result_2023-10-09 06:23:09.json new file mode 100644 index 0000000000000000000000000000000000000000..6841584f85702163e924102d33e9c561d27da7bf --- /dev/null +++ b/BM-K/polyglot-ko-1.3b-it-v1.3/result_2023-10-09 06:23:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2619453924914676, + "acc_stderr": 0.012849054826858117, + "acc_norm": 0.30802047781569963, + "acc_norm_stderr": 0.01349142951729204 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33957379008165706, + "acc_stderr": 0.0047259676848064045, + "acc_norm": 0.4195379406492731, + "acc_norm_stderr": 0.004924748500639348 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2784163473818646, + "acc_stderr": 0.01602829518899247, + "acc_norm": 0.2784163473818646, + "acc_norm_stderr": 0.01602829518899247 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.028659179374292326, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.028659179374292326 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.036293353299478595, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.036293353299478595 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.29596412556053814, + "acc_stderr": 0.030636591348699796, + "acc_norm": 0.29596412556053814, + "acc_norm_stderr": 0.030636591348699796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.03154449888270287, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.03154449888270287 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.1724137931034483, + "acc_stderr": 0.031478307902595745, + "acc_norm": 0.1724137931034483, + "acc_norm_stderr": 0.031478307902595745 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.030176808288974337, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.030176808288974337 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.021278393863586282, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.021278393863586282 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.02967833314144446, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.02967833314144446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.02564938106302925, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.02564938106302925 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24786324786324787, + "acc_stderr": 0.028286324075564393, + "acc_norm": 0.24786324786324787, + "acc_norm_stderr": 0.028286324075564393 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507383, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507383 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.034791855725996586, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.034791855725996586 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.031157150869355568, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.031157150869355568 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641144, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641144 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.02468531686725781, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.02468531686725781 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.0244772228561351, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.0244772228561351 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.03097543638684542, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.03097543638684542 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21100917431192662, + "acc_stderr": 0.01749392240411265, + "acc_norm": 0.21100917431192662, + "acc_norm_stderr": 0.01749392240411265 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17105263157894737, + "acc_stderr": 0.030643607071677105, + "acc_norm": 0.17105263157894737, + "acc_norm_stderr": 0.030643607071677105 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.017917974069594722, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.017917974069594722 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.02813968944485966, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.02813968944485966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.014655780837497717, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.014655780837497717 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.0358870281282637, + "acc_norm": 0.15, + "acc_norm_stderr": 0.0358870281282637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.027472274473233818, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.027472274473233818 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3020408163265306, + "acc_stderr": 0.029393609319879818, + "acc_norm": 0.3020408163265306, + "acc_norm_stderr": 0.029393609319879818 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460288, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460288 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23728813559322035, + "acc_stderr": 0.010865436690780272, + "acc_norm": 0.23728813559322035, + "acc_norm_stderr": 0.010865436690780272 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.03058759135160425, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.03058759135160425 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22888616891064872, + "mc1_stderr": 0.014706994909055027, + "mc2": 0.4031826036090223, + "mc2_stderr": 0.0151985432197755 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2975206611570248, + "acc_stderr": 0.01571774220508992, + "acc_norm": 0.37662337662337664, + "acc_norm_stderr": 0.016658799874051975 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/polyglot-ko-1.3b-it-v1.3", + "model_sha": "1df1840d994fed4d5806ca38746639407c9bb970", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.4/result_2023-10-09 06:31:19.json b/BM-K/polyglot-ko-1.3b-it-v1.4/result_2023-10-09 06:31:19.json new file mode 100644 index 0000000000000000000000000000000000000000..9006fa923ed269b630abfc3e6c94da3aa9d2a483 --- /dev/null +++ b/BM-K/polyglot-ko-1.3b-it-v1.4/result_2023-10-09 06:31:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25170648464163825, + "acc_stderr": 0.012682496334042963, + "acc_norm": 0.30887372013651876, + "acc_norm_stderr": 0.013501770929344003 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34096793467436765, + "acc_stderr": 0.004730658073041557, + "acc_norm": 0.4206333399721171, + "acc_norm_stderr": 0.004926518439372268 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.04453254836326467, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.04453254836326467 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.01598281477469563, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.01598281477469563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.03633384414073462, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.03633384414073462 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.15, + "acc_stderr": 0.03588702812826373, + "acc_norm": 0.15, + "acc_norm_stderr": 0.03588702812826373 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23829787234042554, + "acc_stderr": 0.027851252973889802, + "acc_norm": 0.23829787234042554, + "acc_norm_stderr": 0.027851252973889802 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824665, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824665 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2508038585209003, + "acc_stderr": 0.024619771956697165, + "acc_norm": 0.2508038585209003, + "acc_norm_stderr": 0.024619771956697165 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.02715715047956382, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.02715715047956382 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.31313131313131315, + "acc_stderr": 0.033042050878136525, + "acc_norm": 0.31313131313131315, + "acc_norm_stderr": 0.033042050878136525 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808779, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808779 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36134453781512604, + "acc_stderr": 0.031204691225150006, + "acc_norm": 0.36134453781512604, + "acc_norm_stderr": 0.031204691225150006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28974358974358977, + "acc_stderr": 0.023000628243687968, + "acc_norm": 0.28974358974358977, + "acc_norm_stderr": 0.023000628243687968 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358608, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358608 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.025560604721022884, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.025560604721022884 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2094017094017094, + "acc_stderr": 0.026655699653922737, + "acc_norm": 0.2094017094017094, + "acc_norm_stderr": 0.026655699653922737 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.027611163402399715, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.027611163402399715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.04309118709946459, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.04309118709946459 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.18407960199004975, + "acc_stderr": 0.02740385941078684, + "acc_norm": 0.18407960199004975, + "acc_norm_stderr": 0.02740385941078684 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.02226181769240018, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.02226181769240018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080342, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080342 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036843, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036843 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2630057803468208, + "acc_stderr": 0.023703099525258172, + "acc_norm": 0.2630057803468208, + "acc_norm_stderr": 0.023703099525258172 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724148, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724148 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25617283950617287, + "acc_stderr": 0.024288533637726095, + "acc_norm": 0.25617283950617287, + "acc_norm_stderr": 0.024288533637726095 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.03161877917935411, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.03161877917935411 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24770642201834864, + "acc_stderr": 0.018508143602547805, + "acc_norm": 0.24770642201834864, + "acc_norm_stderr": 0.018508143602547805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242557, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242557 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573982, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573982 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325004, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325004 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.017322789207784326, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.017322789207784326 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791047, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791047 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.014736926383761987, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.014736926383761987 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19117647058823528, + "acc_stderr": 0.023886881922440362, + "acc_norm": 0.19117647058823528, + "acc_norm_stderr": 0.023886881922440362 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2816326530612245, + "acc_stderr": 0.028795185574291282, + "acc_norm": 0.2816326530612245, + "acc_norm_stderr": 0.028795185574291282 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24472573839662448, + "acc_stderr": 0.02798569938703642, + "acc_norm": 0.24472573839662448, + "acc_norm_stderr": 0.02798569938703642 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23272490221642764, + "acc_stderr": 0.0107925955538885, + "acc_norm": 0.23272490221642764, + "acc_norm_stderr": 0.0107925955538885 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.029983733055913623, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.029983733055913623 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23623011015911874, + "mc1_stderr": 0.014869755015871096, + "mc2": 0.414131633910044, + "mc2_stderr": 0.015365810716919849 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3105076741440378, + "acc_stderr": 0.015908004528762003, + "acc_norm": 0.3742621015348288, + "acc_norm_stderr": 0.016637917789798742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/polyglot-ko-1.3b-it-v1.4", + "model_sha": "acbd40970c01a4b40debc0d9a9ac096a74673d74", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.6/result_2023-11-06 01:10:50.json b/BM-K/polyglot-ko-1.3b-it-v1.6/result_2023-11-06 01:10:50.json new file mode 100644 index 0000000000000000000000000000000000000000..7700635f470576d89fb808f70f17dfc0c101cd14 --- /dev/null +++ b/BM-K/polyglot-ko-1.3b-it-v1.6/result_2023-11-06 01:10:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25597269624573377, + "acc_stderr": 0.012753013241244508, + "acc_norm": 0.295221843003413, + "acc_norm_stderr": 0.013329750293382316 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3419637522405895, + "acc_stderr": 0.0047339804707992195, + "acc_norm": 0.4192391953794065, + "acc_norm_stderr": 0.004924261467934419 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.042450224863844935, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.042450224863844935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27330779054916987, + "acc_stderr": 0.01593668106262856, + "acc_norm": 0.27330779054916987, + "acc_norm_stderr": 0.01593668106262856 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.03547854198560826, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.03547854198560826 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.027678452578212383, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.027678452578212383 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2914798206278027, + "acc_stderr": 0.030500283176545913, + "acc_norm": 0.2914798206278027, + "acc_norm_stderr": 0.030500283176545913 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.031544498882702866, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.031544498882702866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924812, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924812 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02242127361292372, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02242127361292372 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.0401910747255735, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.0401910747255735 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335127, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335127 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.25213675213675213, + "acc_stderr": 0.02844796547623101, + "acc_norm": 0.25213675213675213, + "acc_norm_stderr": 0.02844796547623101 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.026749899771241238, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.026749899771241238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.0430911870994646, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.0430911870994646 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871937, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871937 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.03014777593540922, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.03014777593540922 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.03063114553919882, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.03063114553919882 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.02286083830923207, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.02286083830923207 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0230836585869842, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0230836585869842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2716049382716049, + "acc_stderr": 0.024748624490537368, + "acc_norm": 0.2716049382716049, + "acc_norm_stderr": 0.024748624490537368 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.031821550509166484, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.031821550509166484 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23486238532110093, + "acc_stderr": 0.018175110510343595, + "acc_norm": 0.23486238532110093, + "acc_norm_stderr": 0.018175110510343595 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.025160998214292456, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.025160998214292456 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.1487603305785124, + "acc_stderr": 0.03248470083807195, + "acc_norm": 0.1487603305785124, + "acc_norm_stderr": 0.03248470083807195 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2630718954248366, + "acc_stderr": 0.017812676542320657, + "acc_norm": 0.2630718954248366, + "acc_norm_stderr": 0.017812676542320657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510937, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510937 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.01446589382985992, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.01446589382985992 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.027365861131513812, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.027365861131513812 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.0282638899437846, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.0282638899437846 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598035, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598035 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23468057366362452, + "acc_stderr": 0.010824026872449346, + "acc_norm": 0.23468057366362452, + "acc_norm_stderr": 0.010824026872449346 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02933116229425172, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02933116229425172 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.033175059300091805, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.033175059300091805 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23255813953488372, + "mc1_stderr": 0.014789157531080517, + "mc2": 0.40663525842480935, + "mc2_stderr": 0.01551567406322468 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3293978748524203, + "acc_stderr": 0.016158746868147143, + "acc_norm": 0.40731995277449823, + "acc_norm_stderr": 0.01689245669519127 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/polyglot-ko-1.3b-it-v1.6", + "model_sha": "97def0549ef147c96d755ba79a29c3efcdb3f737", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/polyglot-ko-1.3b-it-v1.7/result_2023-11-06 08:02:10.json b/BM-K/polyglot-ko-1.3b-it-v1.7/result_2023-11-06 08:02:10.json new file mode 100644 index 0000000000000000000000000000000000000000..44105c3f4c680e0901c3a5b4eced875fa7bfb0a1 --- /dev/null +++ b/BM-K/polyglot-ko-1.3b-it-v1.7/result_2023-11-06 08:02:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.23720136518771331, + "acc_stderr": 0.012430399829260861, + "acc_norm": 0.2960750853242321, + "acc_norm_stderr": 0.01334091608524627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3437562238597889, + "acc_stderr": 0.004739902411944544, + "acc_norm": 0.4171479784903406, + "acc_norm_stderr": 0.004920800313232743 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21637426900584794, + "acc_stderr": 0.031581495393387324, + "acc_norm": 0.21637426900584794, + "acc_norm_stderr": 0.031581495393387324 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.04620284082280039, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.04620284082280039 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25287356321839083, + "acc_stderr": 0.015543377313719681, + "acc_norm": 0.25287356321839083, + "acc_norm_stderr": 0.015543377313719681 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2, + "acc_stderr": 0.0261488180184245, + "acc_norm": 0.2, + "acc_norm_stderr": 0.0261488180184245 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1686746987951807, + "acc_stderr": 0.029152009627856544, + "acc_norm": 0.1686746987951807, + "acc_norm_stderr": 0.029152009627856544 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2379421221864952, + "acc_stderr": 0.024185150647818707, + "acc_norm": 0.2379421221864952, + "acc_norm_stderr": 0.024185150647818707 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.15695067264573992, + "acc_stderr": 0.0244135871749074, + "acc_norm": 0.15695067264573992, + "acc_norm_stderr": 0.0244135871749074 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.033832012232444426, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.033832012232444426 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.02702543349888237, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.02702543349888237 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3384615384615385, + "acc_stderr": 0.023991500500313033, + "acc_norm": 0.3384615384615385, + "acc_norm_stderr": 0.023991500500313033 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.03031509928561773, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.03031509928561773 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3225806451612903, + "acc_stderr": 0.026593084516572277, + "acc_norm": 0.3225806451612903, + "acc_norm_stderr": 0.026593084516572277 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24786324786324787, + "acc_stderr": 0.02828632407556441, + "acc_norm": 0.24786324786324787, + "acc_norm_stderr": 0.02828632407556441 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844082, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844082 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22388059701492538, + "acc_stderr": 0.029475250236017183, + "acc_norm": 0.22388059701492538, + "acc_norm_stderr": 0.029475250236017183 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321659, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321659 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653697, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653697 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21676300578034682, + "acc_stderr": 0.022183477668412853, + "acc_norm": 0.21676300578034682, + "acc_norm_stderr": 0.022183477668412853 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.03487825168497892, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.03487825168497892 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.02465968518596728, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.02465968518596728 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.03324837939758159, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.03324837939758159 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3577981651376147, + "acc_stderr": 0.02055206078482781, + "acc_norm": 0.3577981651376147, + "acc_norm_stderr": 0.02055206078482781 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.025829163272757485, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.025829163272757485 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.017776947157528027, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.017776947157528027 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.025892151156709405, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.025892151156709405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681404, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681404 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.028263889943784593, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.028263889943784593 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2561929595827901, + "acc_stderr": 0.011149173153110583, + "acc_norm": 0.2561929595827901, + "acc_norm_stderr": 0.011149173153110583 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501947, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501947 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139405, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139405 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23011015911872704, + "mc1_stderr": 0.014734557959807762, + "mc2": 0.4165738353723906, + "mc2_stderr": 0.015472231665083085 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3624557260920897, + "acc_stderr": 0.016527131240453713, + "acc_norm": 0.44510035419126326, + "acc_norm_stderr": 0.017086417431005474 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/polyglot-ko-1.3b-it-v1.7", + "model_sha": "6f92e92497cdde6029d4cb4ea13380933e9c1b5f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/stupid_model/result_2024-01-03 00:05:37.json b/BM-K/stupid_model/result_2024-01-03 00:05:37.json new file mode 100644 index 0000000000000000000000000000000000000000..99ebe060da9c59dda7b0416bbb4ae3f33996ba5b --- /dev/null +++ b/BM-K/stupid_model/result_2024-01-03 00:05:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31313993174061433, + "acc_stderr": 0.013552671543623506, + "acc_norm": 0.36177474402730375, + "acc_norm_stderr": 0.014041957945038076 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3577972515435172, + "acc_stderr": 0.004783723798286501, + "acc_norm": 0.4493128858793069, + "acc_norm_stderr": 0.00496407587012034 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3567251461988304, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.3567251461988304, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41890166028097064, + "acc_stderr": 0.01764320505237717, + "acc_norm": 0.41890166028097064, + "acc_norm_stderr": 0.01764320505237717 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.031907012423268113, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.031907012423268113 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3408360128617363, + "acc_stderr": 0.026920841260776155, + "acc_norm": 0.3408360128617363, + "acc_norm_stderr": 0.026920841260776155 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.32413793103448274, + "acc_stderr": 0.03900432069185553, + "acc_norm": 0.32413793103448274, + "acc_norm_stderr": 0.03900432069185553 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.031811100324139245, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.031811100324139245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33589743589743587, + "acc_stderr": 0.023946724741563976, + "acc_norm": 0.33589743589743587, + "acc_norm_stderr": 0.023946724741563976 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3548387096774194, + "acc_stderr": 0.027218889773308753, + "acc_norm": 0.3548387096774194, + "acc_norm_stderr": 0.027218889773308753 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.594017094017094, + "acc_stderr": 0.03217180182641087, + "acc_norm": 0.594017094017094, + "acc_norm_stderr": 0.03217180182641087 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.030102793781791194, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.030102793781791194 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.02708037281514565, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.02708037281514565 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43283582089552236, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.43283582089552236, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247079, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247079 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101813, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101813 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686934, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686934 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.025624723994030454, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.025624723994030454 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831027, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831027 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.026571483480719967, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.026571483480719967 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38341968911917096, + "acc_stderr": 0.03508984236295341, + "acc_norm": 0.38341968911917096, + "acc_norm_stderr": 0.03508984236295341 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3743119266055046, + "acc_stderr": 0.020748959408988313, + "acc_norm": 0.3743119266055046, + "acc_norm_stderr": 0.020748959408988313 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.02685729466328142, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.02685729466328142 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.47107438016528924, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810536, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810536 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.018975427920507226, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.018975427920507226 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.027374128882631157, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.027374128882631157 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.044939490686135404, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.044939490686135404 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03141554629402543, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03141554629402543 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.02747227447323382, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.02747227447323382 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.02721283588407316, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.02721283588407316 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.39662447257383965, + "acc_stderr": 0.031843998738112264, + "acc_norm": 0.39662447257383965, + "acc_norm_stderr": 0.031843998738112264 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2926988265971317, + "acc_stderr": 0.011620949195849535, + "acc_norm": 0.2926988265971317, + "acc_norm_stderr": 0.011620949195849535 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03308611113236435, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03308611113236435 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.45430591754746863, + "mc2_stderr": 0.015765896646627423 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3069657615112161, + "acc_stderr": 0.015857588095362824, + "acc_norm": 0.35182998819362454, + "acc_norm_stderr": 0.01641820645121805 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/stupid_model", + "model_sha": "880d642a665380933fb7c9a975649188c51be2f8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/yi-ko-6b-it-v1.0.0/result_2023-12-05 09:11:37.json b/BM-K/yi-ko-6b-it-v1.0.0/result_2023-12-05 09:11:37.json new file mode 100644 index 0000000000000000000000000000000000000000..850d698b6237fe6f907136d34c701997792ea682 --- /dev/null +++ b/BM-K/yi-ko-6b-it-v1.0.0/result_2023-12-05 09:11:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.014077223108470137, + "acc_norm": 0.4300341296928328, + "acc_norm_stderr": 0.014467631559137991 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41894045010953995, + "acc_stderr": 0.0049237725818484955, + "acc_norm": 0.5630352519418442, + "acc_norm_stderr": 0.0049499693630176535 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5542784163473818, + "acc_stderr": 0.017774297282479506, + "acc_norm": 0.5542784163473818, + "acc_norm_stderr": 0.017774297282479506 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016337, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016337 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.029996951858349472, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.029996951858349472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723456, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723456 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228405, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228405 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.03419832608176007, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.03419832608176007 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.02455229220934265, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.02455229220934265 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594384, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594384 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6422018348623854, + "acc_stderr": 0.020552060784827818, + "acc_norm": 0.6422018348623854, + "acc_norm_stderr": 0.020552060784827818 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5620915032679739, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.5620915032679739, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4395424836601307, + "acc_stderr": 0.020079420408087918, + "acc_norm": 0.4395424836601307, + "acc_norm_stderr": 0.020079420408087918 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.032259413526312945, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.032259413526312945 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2346368715083799, + "acc_stderr": 0.014173044098303656, + "acc_norm": 0.2346368715083799, + "acc_norm_stderr": 0.014173044098303656 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.46691176470588236, + "acc_stderr": 0.030306257722468317, + "acc_norm": 0.46691176470588236, + "acc_norm_stderr": 0.030306257722468317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32333767926988266, + "acc_stderr": 0.011946565758447204, + "acc_norm": 0.32333767926988266, + "acc_norm_stderr": 0.011946565758447204 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630573, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630573 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165636, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522512, + "mc2": 0.40241254956351097, + "mc2_stderr": 0.015340553744152264 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5112160566706021, + "acc_stderr": 0.01718602846948929, + "acc_norm": 0.5419126328217237, + "acc_norm_stderr": 0.017129852117911144 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/yi-ko-6b-it-v1.0.0", + "model_sha": "1401792e5c974a79e0f6ccb7f060003d0d54e2e5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BM-K/yi-ko-6b-it-v1.0.3/result_2023-12-14 04:08:57.json b/BM-K/yi-ko-6b-it-v1.0.3/result_2023-12-14 04:08:57.json new file mode 100644 index 0000000000000000000000000000000000000000..28989edc70eba3c34454a421c031dda73801a7e9 --- /dev/null +++ b/BM-K/yi-ko-6b-it-v1.0.3/result_2023-12-14 04:08:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28242320819112626, + "acc_stderr": 0.013155456884097218, + "acc_norm": 0.34044368600682595, + "acc_norm_stderr": 0.01384746051889298 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3498307110137423, + "acc_stderr": 0.004759416464201141, + "acc_norm": 0.43397729535949015, + "acc_norm_stderr": 0.004946089230153021 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.035650796707083106, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.035650796707083106 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24393358876117496, + "acc_stderr": 0.015357212665829489, + "acc_norm": 0.24393358876117496, + "acc_norm_stderr": 0.015357212665829489 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.037125378336148665, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.037125378336148665 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.028659179374292323, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.028659179374292323 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071854, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071854 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.20257234726688103, + "acc_stderr": 0.022827317491059686, + "acc_norm": 0.20257234726688103, + "acc_norm_stderr": 0.022827317491059686 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.030898610882477515, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.030898610882477515 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.1919191919191919, + "acc_stderr": 0.028057791672989017, + "acc_norm": 0.1919191919191919, + "acc_norm_stderr": 0.028057791672989017 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.03664666337225256, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.03664666337225256 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33076923076923076, + "acc_stderr": 0.023854795680971142, + "acc_norm": 0.33076923076923076, + "acc_norm_stderr": 0.023854795680971142 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.039578354719809805, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.039578354719809805 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.17733990147783252, + "acc_stderr": 0.026874337276808342, + "acc_norm": 0.17733990147783252, + "acc_norm_stderr": 0.026874337276808342 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.026226485652553873, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.026226485652553873 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.028120966503914404, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.028120966503914404 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31840796019900497, + "acc_stderr": 0.032941184790540964, + "acc_norm": 0.31840796019900497, + "acc_norm_stderr": 0.032941184790540964 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.03414014007044036, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.03414014007044036 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.02094048156533483, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.02094048156533483 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080343, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080343 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.02335736578587403, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.02335736578587403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.02357688174400572, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.02357688174400572 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.034998072761933376, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.034998072761933376 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.017923087667803053, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.017923087667803053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.026493033225145894, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.026493033225145894 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2238562091503268, + "acc_stderr": 0.016863008585416617, + "acc_norm": 0.2238562091503268, + "acc_norm_stderr": 0.016863008585416617 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.02484792135806396, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.02484792135806396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2816326530612245, + "acc_stderr": 0.028795185574291268, + "acc_norm": 0.2816326530612245, + "acc_norm_stderr": 0.028795185574291268 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3037974683544304, + "acc_stderr": 0.029936696387138605, + "acc_norm": 0.3037974683544304, + "acc_norm_stderr": 0.029936696387138605 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25358539765319427, + "acc_stderr": 0.011111715336101129, + "acc_norm": 0.25358539765319427, + "acc_norm_stderr": 0.011111715336101129 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083291, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083291 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.036462049632538115, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.036462049632538115 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.01578537085839671, + "mc2": 0.44826493419395586, + "mc2_stderr": 0.015501611237277205 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2809917355371901, + "acc_stderr": 0.015453559655458275, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.016689333596980133 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BM-K/yi-ko-6b-it-v1.0.3", + "model_sha": "b52413e092bbe1a6319d94569f07891c2d0c95f0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/BioMistral/BioMistral-7B/result_2024-05-30 01:33:58.json b/BioMistral/BioMistral-7B/result_2024-05-30 01:33:58.json new file mode 100644 index 0000000000000000000000000000000000000000..afaf87f0bde3ff0c2223672aec19ed18eabbd29d --- /dev/null +++ b/BioMistral/BioMistral-7B/result_2024-05-30 01:33:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.257679180887372, + "acc_stderr": 0.012780770562768416, + "acc_norm": 0.3122866894197952, + "acc_norm_stderr": 0.013542598541688065 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3229436367257518, + "acc_stderr": 0.004666457279979418, + "acc_norm": 0.39255128460466043, + "acc_norm_stderr": 0.004873203269366306 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.34502923976608185, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.34502923976608185, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365778, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365778 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3780332056194125, + "acc_stderr": 0.017339844462104625, + "acc_norm": 0.3780332056194125, + "acc_norm_stderr": 0.017339844462104625 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851355, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851355 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.02937917046412482, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.02937917046412482 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3954983922829582, + "acc_stderr": 0.027770918531427838, + "acc_norm": 0.3954983922829582, + "acc_norm_stderr": 0.027770918531427838 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.03526552724601199, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.03526552724601199 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077636, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40336134453781514, + "acc_stderr": 0.031866081214088314, + "acc_norm": 0.40336134453781514, + "acc_norm_stderr": 0.031866081214088314 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938145, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938145 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36774193548387096, + "acc_stderr": 0.027430866579973474, + "acc_norm": 0.36774193548387096, + "acc_norm_stderr": 0.027430866579973474 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5598290598290598, + "acc_stderr": 0.0325207417206305, + "acc_norm": 0.5598290598290598, + "acc_norm_stderr": 0.0325207417206305 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4577114427860697, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.4577114427860697, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.02467786284133278, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.02467786284133278 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03942082639927213, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03942082639927213 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.03731133519673893, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.03731133519673893 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02686949074481525, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02686949074481525 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.0358701498607566, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.0358701498607566 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3798165137614679, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.3798165137614679, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.038781398887976125, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.038781398887976125 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.018999707383162666, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.018999707383162666 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307857, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307857 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697627, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697627 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331149, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331149 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003483, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003483 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3469387755102041, + "acc_stderr": 0.030472526026726492, + "acc_norm": 0.3469387755102041, + "acc_norm_stderr": 0.030472526026726492 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4177215189873418, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.4177215189873418, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3005215123859192, + "acc_stderr": 0.011709918883039124, + "acc_norm": 0.3005215123859192, + "acc_norm_stderr": 0.011709918883039124 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833344, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833344 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524753, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524753 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3072215422276622, + "mc1_stderr": 0.016150201321323002, + "mc2": 0.4721418472000992, + "mc2_stderr": 0.01626625866283201 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27863046044864226, + "acc_stderr": 0.01541373949434568, + "acc_norm": 0.3825265643447462, + "acc_norm_stderr": 0.016709165387228803 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "BioMistral/BioMistral-7B", + "model_sha": "9a11e1ffa817c211cbb52ee1fb312dc6b61b40a5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Byungchae/k2s3_test_0000/result_2024-01-30 01:48:19.json b/Byungchae/k2s3_test_0000/result_2024-01-30 01:48:19.json new file mode 100644 index 0000000000000000000000000000000000000000..d0a1cb6b2835e562daad9a409feb97a3da7ffb6c --- /dev/null +++ b/Byungchae/k2s3_test_0000/result_2024-01-30 01:48:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30119453924914674, + "acc_stderr": 0.013406741767847615, + "acc_norm": 0.3438566552901024, + "acc_norm_stderr": 0.013880644570156215 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34007169886476796, + "acc_stderr": 0.004727648057897938, + "acc_norm": 0.4106751643098984, + "acc_norm_stderr": 0.004909509538525168 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4355044699872286, + "acc_stderr": 0.0177305899279266, + "acc_norm": 0.4355044699872286, + "acc_norm_stderr": 0.0177305899279266 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.30638297872340425, + "acc_stderr": 0.030135906478517563, + "acc_norm": 0.30638297872340425, + "acc_norm_stderr": 0.030135906478517563 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3762057877813505, + "acc_stderr": 0.02751392568354943, + "acc_norm": 0.3762057877813505, + "acc_norm_stderr": 0.02751392568354943 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4393939393939394, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.4393939393939394, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36134453781512604, + "acc_stderr": 0.031204691225150016, + "acc_norm": 0.36134453781512604, + "acc_norm_stderr": 0.031204691225150016 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34102564102564104, + "acc_stderr": 0.02403548967633506, + "acc_norm": 0.34102564102564104, + "acc_norm_stderr": 0.02403548967633506 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.02804098138076155, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.02804098138076155 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5897435897435898, + "acc_stderr": 0.03222414045241107, + "acc_norm": 0.5897435897435898, + "acc_norm_stderr": 0.03222414045241107 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.38113207547169814, + "acc_stderr": 0.029890609686286623, + "acc_norm": 0.38113207547169814, + "acc_norm_stderr": 0.029890609686286623 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712156, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712156 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.03525675167467974, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.03525675167467974 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159663, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159663 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.02357760479165581, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.02357760479165581 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.026636539741116072, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.026636539741116072 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3734567901234568, + "acc_stderr": 0.026915003011380154, + "acc_norm": 0.3734567901234568, + "acc_norm_stderr": 0.026915003011380154 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3669724770642202, + "acc_stderr": 0.020664675659520532, + "acc_norm": 0.3669724770642202, + "acc_norm_stderr": 0.020664675659520532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.018635594034423983, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.018635594034423983 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101373, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101373 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605617, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.02725720260611495, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.02725720260611495 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45147679324894513, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.45147679324894513, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.303129074315515, + "acc_stderr": 0.011738669951254296, + "acc_norm": 0.303129074315515, + "acc_norm_stderr": 0.011738669951254296 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.03793713171165634, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.03793713171165634 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.016058999026100612, + "mc2": 0.45460567276547886, + "mc2_stderr": 0.01613634516859894 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29043683589138136, + "acc_stderr": 0.015607602569814633, + "acc_norm": 0.31641086186540734, + "acc_norm_stderr": 0.015989617951065477 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Byungchae/k2s3_test_0000", + "model_sha": "e93e87584402771cea9237a5cd119875e9b217f8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Byungchae/k2s3_test_0001/result_2024-01-17 05:50:02.json b/Byungchae/k2s3_test_0001/result_2024-01-17 05:50:02.json new file mode 100644 index 0000000000000000000000000000000000000000..fb54a9a28f8893b5fa91176bb141acb3ca6091c5 --- /dev/null +++ b/Byungchae/k2s3_test_0001/result_2024-01-17 05:50:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2986348122866894, + "acc_stderr": 0.01337407861506876, + "acc_norm": 0.3370307167235495, + "acc_norm_stderr": 0.013813476652902279 + }, + "harness|ko_hellaswag|10": { + "acc": 0.338478390758813, + "acc_stderr": 0.004722250355106692, + "acc_norm": 0.41147181836287594, + "acc_norm_stderr": 0.004910946424771614 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4240102171136654, + "acc_stderr": 0.01767226332908423, + "acc_norm": 0.4240102171136654, + "acc_norm_stderr": 0.01767226332908423 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.03068302084323101, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.03068302084323101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115476, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115476 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3633440514469453, + "acc_stderr": 0.027316847674192717, + "acc_norm": 0.3633440514469453, + "acc_norm_stderr": 0.027316847674192717 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3452914798206278, + "acc_stderr": 0.03191100192835794, + "acc_norm": 0.3452914798206278, + "acc_norm_stderr": 0.03191100192835794 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.043285772152629715, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.043285772152629715 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31932773109243695, + "acc_stderr": 0.030283995525884396, + "acc_norm": 0.31932773109243695, + "acc_norm_stderr": 0.030283995525884396 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33589743589743587, + "acc_stderr": 0.02394672474156397, + "acc_norm": 0.33589743589743587, + "acc_norm_stderr": 0.02394672474156397 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38064516129032255, + "acc_stderr": 0.027621717832907046, + "acc_norm": 0.38064516129032255, + "acc_norm_stderr": 0.027621717832907046 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5683760683760684, + "acc_stderr": 0.0324483553531149, + "acc_norm": 0.5683760683760684, + "acc_norm_stderr": 0.0324483553531149 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3584905660377358, + "acc_stderr": 0.029514703583981755, + "acc_norm": 0.3584905660377358, + "acc_norm_stderr": 0.029514703583981755 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02606715922227579, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02606715922227579 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736411, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736411 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.02369541500946308, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.02369541500946308 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720685, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720685 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4421965317919075, + "acc_stderr": 0.0267386036438074, + "acc_norm": 0.4421965317919075, + "acc_norm_stderr": 0.0267386036438074 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.03746668325470021, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.03746668325470021 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3487654320987654, + "acc_stderr": 0.02651759772446501, + "acc_norm": 0.3487654320987654, + "acc_norm_stderr": 0.02651759772446501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557673, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3522935779816514, + "acc_stderr": 0.020480568843998993, + "acc_norm": 0.3522935779816514, + "acc_norm_stderr": 0.020480568843998993 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283686, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283686 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296557, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296557 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.018607552131279834, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.018607552131279834 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.028353212866863438, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.028353212866863438 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574896, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574896 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.027033041151681456, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.027033041151681456 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4530612244897959, + "acc_stderr": 0.03186785930004129, + "acc_norm": 0.4530612244897959, + "acc_norm_stderr": 0.03186785930004129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.41350210970464135, + "acc_stderr": 0.03205649904851859, + "acc_norm": 0.41350210970464135, + "acc_norm_stderr": 0.03205649904851859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29726205997392435, + "acc_stderr": 0.011673346173086045, + "acc_norm": 0.29726205997392435, + "acc_norm_stderr": 0.011673346173086045 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.01613222972815506, + "mc2": 0.4662670134521324, + "mc2_stderr": 0.0162967834464091 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26564344746162927, + "acc_stderr": 0.015185107107791262, + "acc_norm": 0.29279811097992914, + "acc_norm_stderr": 0.015644823205401334 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Byungchae/k2s3_test_0001", + "model_sha": "897d3fc6b52b59bcdc5f363c2c5d4fab602d4a01", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Byungchae/k2s3_test_0002/result_2024-03-14 04:47:31.json b/Byungchae/k2s3_test_0002/result_2024-03-14 04:47:31.json new file mode 100644 index 0000000000000000000000000000000000000000..d29081a543c59e3ff1ac8ba015663d2a063c995b --- /dev/null +++ b/Byungchae/k2s3_test_0002/result_2024-03-14 04:47:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3302047781569966, + "acc_stderr": 0.013743085603760431, + "acc_norm": 0.3967576791808874, + "acc_norm_stderr": 0.014296513020180646 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36964748058155744, + "acc_stderr": 0.004817227292240289, + "acc_norm": 0.4918342959569807, + "acc_norm_stderr": 0.004989115942570064 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5964912280701754, + "acc_stderr": 0.03762738699917057, + "acc_norm": 0.5964912280701754, + "acc_norm_stderr": 0.03762738699917057 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5874840357598978, + "acc_stderr": 0.01760414910867192, + "acc_norm": 0.5874840357598978, + "acc_norm_stderr": 0.01760414910867192 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.032600385118357715, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.032600385118357715 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5562700964630225, + "acc_stderr": 0.02821768355665231, + "acc_norm": 0.5562700964630225, + "acc_norm_stderr": 0.02821768355665231 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03358618145732523, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03358618145732523 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48717948717948717, + "acc_stderr": 0.025342671293807264, + "acc_norm": 0.48717948717948717, + "acc_norm_stderr": 0.025342671293807264 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5483870967741935, + "acc_stderr": 0.028310500348568385, + "acc_norm": 0.5483870967741935, + "acc_norm_stderr": 0.028310500348568385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.02812096650391438, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.02812096650391438 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.0307358222062056, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.0307358222062056 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083018, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159788, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159788 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6011560693641619, + "acc_stderr": 0.02636243757454654, + "acc_norm": 0.6011560693641619, + "acc_norm_stderr": 0.02636243757454654 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6528497409326425, + "acc_stderr": 0.03435696168361356, + "acc_norm": 0.6528497409326425, + "acc_norm_stderr": 0.03435696168361356 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.04598188057816542, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.04598188057816542 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6055045871559633, + "acc_stderr": 0.020954642108587475, + "acc_norm": 0.6055045871559633, + "acc_norm_stderr": 0.020954642108587475 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.0404633688397825, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.0404633688397825 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.020154685712590898, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.020154685712590898 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.03395322726375798, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.03395322726375798 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21340782122905028, + "acc_stderr": 0.01370285993219609, + "acc_norm": 0.21340782122905028, + "acc_norm_stderr": 0.01370285993219609 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5918367346938775, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.5918367346938775, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3813559322033898, + "acc_stderr": 0.012405509401888124, + "acc_norm": 0.3813559322033898, + "acc_norm_stderr": 0.012405509401888124 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.03843566993588718, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.03843566993588718 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015018, + "mc2": 0.4183344178888925, + "mc2_stderr": 0.016052881125973823 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47107438016528924, + "acc_stderr": 0.017161563949916345, + "acc_norm": 0.5360094451003542, + "acc_norm_stderr": 0.01714571536548666 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Byungchae/k2s3_test_0002", + "model_sha": "db1881f05517a996d49eed440146b2eece2b31ff", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Cartinoe5930/KoRAE-13b-DPO/result_2023-11-28 08:26:01.json b/Cartinoe5930/KoRAE-13b-DPO/result_2023-11-28 08:26:01.json new file mode 100644 index 0000000000000000000000000000000000000000..c5b865a63bf20f4c69ea2c2e10929cb36ccd84d9 --- /dev/null +++ b/Cartinoe5930/KoRAE-13b-DPO/result_2023-11-28 08:26:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39078498293515357, + "acc_stderr": 0.014258563880513785, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.01457558392201967 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42362079267078273, + "acc_stderr": 0.004931219148182245, + "acc_norm": 0.5753833897629954, + "acc_norm_stderr": 0.004932745013072709 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5325670498084292, + "acc_stderr": 0.017841995750520874, + "acc_norm": 0.5325670498084292, + "acc_norm_stderr": 0.017841995750520874 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.02508830145469484, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.02508830145469484 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.02837228779796295, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.02837228779796295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.03125610824421881, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.03125610824421881 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.0478200179138006, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.0478200179138006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.02345603738398203, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.02345603738398203 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.01959402113657745, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.01959402113657745 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534778, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534778 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.011855911587048228, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.011855911587048228 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253595, + "mc2": 0.41276206628297735, + "mc2_stderr": 0.014870061139692456 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48288075560802834, + "acc_stderr": 0.017180275246085622, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.017090852631668336 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Cartinoe5930/KoRAE-13b-DPO", + "model_sha": "70de20a53e3dc47eeb7b8ddc0864d81caff6038c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Cartinoe5930/KoRAE-13b/result_2023-11-26 13:45:22.json b/Cartinoe5930/KoRAE-13b/result_2023-11-26 13:45:22.json new file mode 100644 index 0000000000000000000000000000000000000000..6d2c67f1ec90b1d5adac5c8f12eb493b206b7883 --- /dev/null +++ b/Cartinoe5930/KoRAE-13b/result_2023-11-26 13:45:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3924914675767918, + "acc_stderr": 0.01426963463567073, + "acc_norm": 0.46331058020477817, + "acc_norm_stderr": 0.014572000527756994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42202748456482775, + "acc_stderr": 0.004928735103635848, + "acc_norm": 0.572495518820952, + "acc_norm_stderr": 0.004937054233711569 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5338441890166028, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.5338441890166028, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101737, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101737 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.035476014940069384, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.035476014940069384 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.0478200179138006, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.0478200179138006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823018, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.02345603738398203, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.02345603738398203 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.03889066619112722, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.03889066619112722 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.021364122533881695, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.021364122533881695 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.01962744474841224, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.01962744474841224 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534778, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534778 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.01185591158704823, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.01185591158704823 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522505, + "mc2": 0.4107667883351212, + "mc2_stderr": 0.014847145006763885 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.017175671279836446, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.017077254131556228 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Cartinoe5930/KoRAE-13b", + "model_sha": "ea6b5bc5c26f06cbb2a0cb973b691f4080bbee72", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Cartinoe5930/original-KoRAE-13b-3ep/result_2023-11-29 23:33:12.json b/Cartinoe5930/original-KoRAE-13b-3ep/result_2023-11-29 23:33:12.json new file mode 100644 index 0000000000000000000000000000000000000000..b1023a16186c115f32a567f88db5d6c76ce14317 --- /dev/null +++ b/Cartinoe5930/original-KoRAE-13b-3ep/result_2023-11-29 23:33:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3856655290102389, + "acc_stderr": 0.014224250973257182, + "acc_norm": 0.44368600682593856, + "acc_norm_stderr": 0.014518421825670447 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4207329217287393, + "acc_stderr": 0.004926678108601343, + "acc_norm": 0.5697072296355308, + "acc_norm_stderr": 0.004941051795214797 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.541507024265645, + "acc_stderr": 0.017818248603465578, + "acc_norm": 0.541507024265645, + "acc_norm_stderr": 0.017818248603465578 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977978, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977978 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.040824829046386284, + "acc_norm": 0.4, + "acc_norm_stderr": 0.040824829046386284 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.02512465352588513, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.02512465352588513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653315, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091265, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.0305032920133426, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.0305032920133426 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.040894654493255835, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.040894654493255835 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.021311335009708575, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.021311335009708575 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171563, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171563 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.044492703500683836, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.044492703500683836 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.01969145905235415, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.01969145905235415 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.02718712701150381, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.02718712701150381 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687758, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687758 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33376792698826596, + "acc_stderr": 0.012043812655846147, + "acc_norm": 0.33376792698826596, + "acc_norm_stderr": 0.012043812655846147 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237024, + "mc2": 0.417499174328329, + "mc2_stderr": 0.014766097200285613 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.01717567127983645, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.017122829143292658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Cartinoe5930/original-KoRAE-13b-3ep", + "model_sha": "6c109c149338c1aff8de13e82058abedb03b754d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Cartinoe5930/original-KoRAE-13b/result_2023-11-28 09:14:48.json b/Cartinoe5930/original-KoRAE-13b/result_2023-11-28 09:14:48.json new file mode 100644 index 0000000000000000000000000000000000000000..72a97ed91ced9b44efb0628457bce6c2dbc03655 --- /dev/null +++ b/Cartinoe5930/original-KoRAE-13b/result_2023-11-28 09:14:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.01423008476191047, + "acc_norm": 0.45563139931740615, + "acc_norm_stderr": 0.014553749939306864 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4221270663214499, + "acc_stderr": 0.00492889189587429, + "acc_norm": 0.5704043019318861, + "acc_norm_stderr": 0.004940067402031046 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5389527458492975, + "acc_stderr": 0.017825621793239006, + "acc_norm": 0.5389527458492975, + "acc_norm_stderr": 0.017825621793239006 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.03148955829745529, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.03148955829745529 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4128205128205128, + "acc_stderr": 0.02496268356433182, + "acc_norm": 0.4128205128205128, + "acc_norm_stderr": 0.02496268356433182 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.034711928605184676, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.034711928605184676 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.03125610824421881, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.03125610824421881 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.0305032920133426, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.0305032920133426 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340496, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.042663394431593955, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.042663394431593955 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353992, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353992 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604675, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604675 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.01954210156485412, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.01954210156485412 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411952, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411952 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3122555410691004, + "acc_stderr": 0.011835798135683175, + "acc_norm": 0.3122555410691004, + "acc_norm_stderr": 0.011835798135683175 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.01534540948555797, + "mc2": 0.4067288610044621, + "mc2_stderr": 0.014720415548716639 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48760330578512395, + "acc_stderr": 0.017185069732676524, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.017019847535972202 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Cartinoe5930/original-KoRAE-13b", + "model_sha": "5db145b0f9576d388f073cd01036cd9c72f01860", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Cartinoe5930/weak-KoRAE-13b/result_2023-11-28 02:02:22.json b/Cartinoe5930/weak-KoRAE-13b/result_2023-11-28 02:02:22.json new file mode 100644 index 0000000000000000000000000000000000000000..d957eb8886a06f834cdf7b51054b61a301f9d94e --- /dev/null +++ b/Cartinoe5930/weak-KoRAE-13b/result_2023-11-28 02:02:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.014175915490000324, + "acc_norm": 0.4522184300341297, + "acc_norm_stderr": 0.014544519880633835 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41734714200358497, + "acc_stderr": 0.0049211338649318885, + "acc_norm": 0.5679147580163314, + "acc_norm_stderr": 0.0049435372423444176 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.42718446601941745, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.42718446601941745, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.017869330154003705, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.017869330154003705 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.032321469162244695, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.032321469162244695 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539746, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539746 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230175, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230175 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.02293097307163335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.02293097307163335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5174311926605505, + "acc_stderr": 0.02142429187185315, + "acc_norm": 0.5174311926605505, + "acc_norm_stderr": 0.02142429187185315 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36437908496732024, + "acc_stderr": 0.01946951822157369, + "acc_norm": 0.36437908496732024, + "acc_norm_stderr": 0.01946951822157369 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.011759939618085455, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.011759939618085455 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.01541524174023703, + "mc2": 0.4040029626548701, + "mc2_stderr": 0.014782276857043152 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4757969303423849, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5608028335301063, + "acc_norm_stderr": 0.017062775744780705 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Cartinoe5930/weak-KoRAE-13b", + "model_sha": "f6d72bd200da4870967487484595ac16355c52fd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Chang-Su/llama-2-13b-chat-ko/result_2023-10-18 16:07:29.json b/Chang-Su/llama-2-13b-chat-ko/result_2023-10-18 16:07:29.json new file mode 100644 index 0000000000000000000000000000000000000000..6419bb96be2d940e45a2e88853f2d86eafe98065 --- /dev/null +++ b/Chang-Su/llama-2-13b-chat-ko/result_2023-10-18 16:07:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3037542662116041, + "acc_stderr": 0.013438909184778759, + "acc_norm": 0.3464163822525597, + "acc_norm_stderr": 0.013905011180063251 + }, + "harness|ko_hellaswag|10": { + "acc": 0.350726946823342, + "acc_stderr": 0.0047622234924352535, + "acc_norm": 0.45429197371041624, + "acc_norm_stderr": 0.004968888130290068 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4610472541507024, + "acc_stderr": 0.01782562179323902, + "acc_norm": 0.4610472541507024, + "acc_norm_stderr": 0.01782562179323902 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353228, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353228 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4180064308681672, + "acc_stderr": 0.02801365189199507, + "acc_norm": 0.4180064308681672, + "acc_norm_stderr": 0.02801365189199507 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928276, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.41414141414141414, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.41414141414141414, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767762, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767762 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.027709359675032488, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.027709359675032488 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.03265903381186194, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.03265903381186194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3584905660377358, + "acc_stderr": 0.029514703583981765, + "acc_norm": 0.3584905660377358, + "acc_norm_stderr": 0.029514703583981765 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.02813325257881564, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.02813325257881564 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4925373134328358, + "acc_stderr": 0.035351400842767194, + "acc_norm": 0.4925373134328358, + "acc_norm_stderr": 0.035351400842767194 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.023330654054535903, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.023330654054535903 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.40173410404624277, + "acc_stderr": 0.026394104177643634, + "acc_norm": 0.40173410404624277, + "acc_norm_stderr": 0.026394104177643634 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.038258255488486076, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.038258255488486076 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027125115513166865, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027125115513166865 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537318, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537318 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3798165137614679, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.3798165137614679, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.038932596106046734, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.038932596106046734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.02768418188330289, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.02768418188330289 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775088, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.018718067052623227, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.018718067052623227 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.030546745264953202, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.030546745264953202 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.02679956202488769, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.02679956202488769 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4092827004219409, + "acc_stderr": 0.032007041833595914, + "acc_norm": 0.4092827004219409, + "acc_norm_stderr": 0.032007041833595914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31290743155149936, + "acc_stderr": 0.011842529823062999, + "acc_norm": 0.31290743155149936, + "acc_norm_stderr": 0.011842529823062999 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373616, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373616 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.038049136539710114, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.038049136539710114 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237035, + "mc2": 0.42145051773986575, + "mc2_stderr": 0.015233960921162444 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31759149940968123, + "acc_stderr": 0.0160055818762293, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.016884749503191392 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Chang-Su/llama-2-13b-chat-ko", + "model_sha": "3a82a33f61584cbe72dc32c15d55bfd182cefd8b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Changgil/K2S3-14b-v0.2/result_2024-06-17 01:09:17.json b/Changgil/K2S3-14b-v0.2/result_2024-06-17 01:09:17.json new file mode 100644 index 0000000000000000000000000000000000000000..4c959f4e02a49f9f15582411ea4ae134f7fbd999 --- /dev/null +++ b/Changgil/K2S3-14b-v0.2/result_2024-06-17 01:09:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43600682593856654, + "acc_stderr": 0.014491225699230916, + "acc_norm": 0.49658703071672355, + "acc_norm_stderr": 0.014611050403244081 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4221270663214499, + "acc_stderr": 0.00492889189587429, + "acc_norm": 0.5705038836885082, + "acc_norm_stderr": 0.004939925958728881 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6432748538011696, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.6432748538011696, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6232439335887612, + "acc_stderr": 0.017328292907303058, + "acc_norm": 0.6232439335887612, + "acc_norm_stderr": 0.017328292907303058 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033583, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033583 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5819935691318328, + "acc_stderr": 0.028013651891995076, + "acc_norm": 0.5819935691318328, + "acc_norm_stderr": 0.028013651891995076 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5695067264573991, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.5695067264573991, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547155, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5641025641025641, + "acc_stderr": 0.025141801511177488, + "acc_norm": 0.5641025641025641, + "acc_norm_stderr": 0.025141801511177488 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4876847290640394, + "acc_stderr": 0.035169204442208966, + "acc_norm": 0.4876847290640394, + "acc_norm_stderr": 0.035169204442208966 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.028040981380761536, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.028040981380761536 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.02891120880274947, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.02891120880274947 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0287420409039485, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0287420409039485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.03220024104534206, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.03220024104534206 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.026589231142174267, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.026589231142174267 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.02780165621232366, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.02780165621232366 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6770642201834862, + "acc_stderr": 0.020048115923415332, + "acc_norm": 0.6770642201834862, + "acc_norm_stderr": 0.020048115923415332 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5522875816993464, + "acc_stderr": 0.028472938478033522, + "acc_norm": 0.5522875816993464, + "acc_norm_stderr": 0.028472938478033522 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.0404633688397825, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.0404633688397825 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.02020351728026145, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.02020351728026145 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.03406315360711507, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.03406315360711507 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3139664804469274, + "acc_stderr": 0.015521923933523652, + "acc_norm": 0.3139664804469274, + "acc_norm_stderr": 0.015521923933523652 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03004261583271487, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03004261583271487 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6244897959183674, + "acc_stderr": 0.031001209039894843, + "acc_norm": 0.6244897959183674, + "acc_norm_stderr": 0.031001209039894843 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6877637130801688, + "acc_stderr": 0.03016513786784701, + "acc_norm": 0.6877637130801688, + "acc_norm_stderr": 0.03016513786784701 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37157757496740546, + "acc_stderr": 0.012341828514528289, + "acc_norm": 0.37157757496740546, + "acc_norm_stderr": 0.012341828514528289 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6225490196078431, + "acc_stderr": 0.03402272044340703, + "acc_norm": 0.6225490196078431, + "acc_norm_stderr": 0.03402272044340703 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396708, + "mc2": 0.43132993762625893, + "mc2_stderr": 0.015275011226108703 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5454545454545454, + "acc_stderr": 0.017119172208061504, + "acc_norm": 0.5808736717827627, + "acc_norm_stderr": 0.0169639950108628 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Changgil/K2S3-14b-v0.2", + "model_sha": "b4f0e1eed2640df2b75847ff37e6ebb1be217b6c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Changgil/K2S3-Llama2-13b-v1.0/result_2024-02-26 23:58:14.json b/Changgil/K2S3-Llama2-13b-v1.0/result_2024-02-26 23:58:14.json new file mode 100644 index 0000000000000000000000000000000000000000..cbad05d4cbb572505ea1b7410564fb5213653127 --- /dev/null +++ b/Changgil/K2S3-Llama2-13b-v1.0/result_2024-02-26 23:58:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2832764505119454, + "acc_stderr": 0.013167478735134576, + "acc_norm": 0.33532423208191126, + "acc_norm_stderr": 0.013796182947785562 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3331009759012149, + "acc_stderr": 0.004703590558552499, + "acc_norm": 0.41655048795060745, + "acc_norm_stderr": 0.004919794704673263 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.04846748253977239, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.04846748253977239 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3997445721583653, + "acc_stderr": 0.01751684790705327, + "acc_norm": 0.3997445721583653, + "acc_norm_stderr": 0.01751684790705327 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3729903536977492, + "acc_stderr": 0.027466610213140105, + "acc_norm": 0.3729903536977492, + "acc_norm_stderr": 0.027466610213140105 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3452914798206278, + "acc_stderr": 0.031911001928357934, + "acc_norm": 0.3452914798206278, + "acc_norm_stderr": 0.031911001928357934 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.37373737373737376, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.37373737373737376, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.036186648199362445, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.036186648199362445 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40336134453781514, + "acc_stderr": 0.031866081214088314, + "acc_norm": 0.40336134453781514, + "acc_norm_stderr": 0.031866081214088314 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36923076923076925, + "acc_stderr": 0.024468615241478916, + "acc_norm": 0.36923076923076925, + "acc_norm_stderr": 0.024468615241478916 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.04587904741301812, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.04587904741301812 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.02790615082604114, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.02790615082604114 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5598290598290598, + "acc_stderr": 0.032520741720630506, + "acc_norm": 0.5598290598290598, + "acc_norm_stderr": 0.032520741720630506 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3433962264150943, + "acc_stderr": 0.029224526469124792, + "acc_norm": 0.3433962264150943, + "acc_norm_stderr": 0.029224526469124792 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230186, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230186 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4577114427860697, + "acc_stderr": 0.03522865864099598, + "acc_norm": 0.4577114427860697, + "acc_norm_stderr": 0.03522865864099598 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400168, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.38439306358381503, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.38439306358381503, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.03746668325470023, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.03746668325470023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.404320987654321, + "acc_stderr": 0.027306625297327684, + "acc_norm": 0.404320987654321, + "acc_norm_stderr": 0.027306625297327684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40414507772020725, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.40414507772020725, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518752, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518752 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30825688073394497, + "acc_stderr": 0.019798366698367258, + "acc_norm": 0.30825688073394497, + "acc_norm_stderr": 0.019798366698367258 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047182, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047182 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04545454545454548, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04545454545454548 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.0378272898086547, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.0378272898086547 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32189542483660133, + "acc_stderr": 0.018901015322093092, + "acc_norm": 0.32189542483660133, + "acc_norm_stderr": 0.018901015322093092 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467763, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467763 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.03038805130167812, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.03038805130167812 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475361, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475361 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032032, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032032 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.37130801687763715, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.37130801687763715, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2940026075619296, + "acc_stderr": 0.011636062953698607, + "acc_norm": 0.2940026075619296, + "acc_norm_stderr": 0.011636062953698607 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.03296245110172229, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.03296245110172229 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.03793713171165634, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.03793713171165634 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.01539211880501501, + "mc2": 0.4406573503212225, + "mc2_stderr": 0.015634673384222614 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29988193624557263, + "acc_stderr": 0.01575344761542946, + "acc_norm": 0.3541912632821724, + "acc_norm_stderr": 0.01644317574921476 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Changgil/K2S3-Llama2-13b-v1.0", + "model_sha": "d946d79639945ec467eae0029696c7af39f15c6e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Changgil/K2S3-Mistral-7b-v1.1/result_2024-03-24 06:49:44.json b/Changgil/K2S3-Mistral-7b-v1.1/result_2024-03-24 06:49:44.json new file mode 100644 index 0000000000000000000000000000000000000000..3b5a5d146a1c9b4a5513d625847d4f77602708a6 --- /dev/null +++ b/Changgil/K2S3-Mistral-7b-v1.1/result_2024-03-24 06:49:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30204778156996587, + "acc_stderr": 0.01341751914471642, + "acc_norm": 0.35580204778157, + "acc_norm_stderr": 0.01399057113791876 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34096793467436765, + "acc_stderr": 0.0047306580730415515, + "acc_norm": 0.4274048994224258, + "acc_norm_stderr": 0.004936908503140863 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.03786720706234215, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.03786720706234215 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.40485312899106, + "acc_stderr": 0.017553246467720243, + "acc_norm": 0.40485312899106, + "acc_norm_stderr": 0.017553246467720243 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231004, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231004 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.035509201856896294, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.035509201856896294 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3954983922829582, + "acc_stderr": 0.027770918531427834, + "acc_norm": 0.3954983922829582, + "acc_norm_stderr": 0.027770918531427834 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.03318833286217281, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.03318833286217281 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.040393149787245605, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.040393149787245605 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.41414141414141414, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.41414141414141414, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.03921545312467122, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.03921545312467122 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33589743589743587, + "acc_stderr": 0.023946724741563976, + "acc_norm": 0.33589743589743587, + "acc_norm_stderr": 0.023946724741563976 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.02766618207553965, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.02766618207553965 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5811965811965812, + "acc_stderr": 0.03232128912157792, + "acc_norm": 0.5811965811965812, + "acc_norm_stderr": 0.03232128912157792 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3622641509433962, + "acc_stderr": 0.0295822451283843, + "acc_norm": 0.3622641509433962, + "acc_norm_stderr": 0.0295822451283843 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228402, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228402 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43781094527363185, + "acc_stderr": 0.035080801121998406, + "acc_norm": 0.43781094527363185, + "acc_norm_stderr": 0.035080801121998406 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.034564257450869995, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.034564257450869995 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.037161774375660164, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.037161774375660164 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.026483392042098177, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.026483392042098177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334384, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334384 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.026869490744815254, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.026869490744815254 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38341968911917096, + "acc_stderr": 0.03508984236295342, + "acc_norm": 0.38341968911917096, + "acc_norm_stderr": 0.03508984236295342 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3211009174311927, + "acc_stderr": 0.020018149772733747, + "acc_norm": 0.3211009174311927, + "acc_norm_stderr": 0.020018149772733747 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749255, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775088, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.03761070869867479, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.03761070869867479 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.01887568293806944, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.01887568293806944 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.0280459469420424, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.0280459469420424 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285714, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.014333522059217892, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.014333522059217892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2977941176470588, + "acc_stderr": 0.027778298701545443, + "acc_norm": 0.2977941176470588, + "acc_norm_stderr": 0.027778298701545443 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.031891418324213966, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.031891418324213966 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4219409282700422, + "acc_stderr": 0.03214814630240369, + "acc_norm": 0.4219409282700422, + "acc_norm_stderr": 0.03214814630240369 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31486310299869624, + "acc_stderr": 0.011862561755715937, + "acc_norm": 0.31486310299869624, + "acc_norm_stderr": 0.011862561755715937 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398395, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398395 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087307, + "mc2": 0.4379048810658281, + "mc2_stderr": 0.01538143830058003 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3140495867768595, + "acc_stderr": 0.015957332434295066, + "acc_norm": 0.4085005903187721, + "acc_norm_stderr": 0.01690006287942711 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Changgil/K2S3-Mistral-7b-v1.1", + "model_sha": "0f7e1ed84843f50791fa74315dfa0f975f300344", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Changgil/K2S3-Mistral-7b-v1.2/result_2024-03-28 00:34:25.json b/Changgil/K2S3-Mistral-7b-v1.2/result_2024-03-28 00:34:25.json new file mode 100644 index 0000000000000000000000000000000000000000..2c31ae975935fb10a91531356a118713c24f3a75 --- /dev/null +++ b/Changgil/K2S3-Mistral-7b-v1.2/result_2024-03-28 00:34:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.014049106564955003, + "acc_norm": 0.41467576791808874, + "acc_norm_stderr": 0.014397070564409174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3784106751643099, + "acc_stderr": 0.004839995745602312, + "acc_norm": 0.49352718581955785, + "acc_norm_stderr": 0.0049893632769551655 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.03833185275213023, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.03833185275213023 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5044699872286079, + "acc_stderr": 0.017879248970584356, + "acc_norm": 0.5044699872286079, + "acc_norm_stderr": 0.017879248970584356 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234355, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234355 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841585, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841585 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115007, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126167, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126167 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809445, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809445 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205608, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948496, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.02432631052914915, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.02432631052914915 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.02775653525734767, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.02775653525734767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5119266055045871, + "acc_stderr": 0.021431223617362223, + "acc_norm": 0.5119266055045871, + "acc_norm_stderr": 0.021431223617362223 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874142, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874142 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.01979448890002411, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.01979448890002411 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.0443280405529152, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.0443280405529152 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.015005762446786154, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.015005762446786154 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.031717528240626645, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.031717528240626645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34028683181225555, + "acc_stderr": 0.012101217610223805, + "acc_norm": 0.34028683181225555, + "acc_norm_stderr": 0.012101217610223805 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.01576477083677731, + "mc2": 0.452859232158323, + "mc2_stderr": 0.015516884053903536 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4049586776859504, + "acc_stderr": 0.01687694116504561, + "acc_norm": 0.4946871310507674, + "acc_norm_stderr": 0.017189383627229687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Changgil/K2S3-Mistral-7b-v1.2", + "model_sha": "52d060cd9e93f176911c91ee232f582f253e7f8f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Changgil/K2S3-Mistral-7b-v1.3/result_2024-04-01 13:03:09.json b/Changgil/K2S3-Mistral-7b-v1.3/result_2024-04-01 13:03:09.json new file mode 100644 index 0000000000000000000000000000000000000000..0588fa0cf86d6e4f524723934b4565084f50f297 --- /dev/null +++ b/Changgil/K2S3-Mistral-7b-v1.3/result_2024-04-01 13:03:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42662116040955633, + "acc_stderr": 0.014453185592920293, + "acc_norm": 0.48293515358361777, + "acc_norm_stderr": 0.014602878388536597 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41037641904003186, + "acc_stderr": 0.004908967278222492, + "acc_norm": 0.5455088627763394, + "acc_norm_stderr": 0.004969070188763748 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.0380579750559046, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.0380579750559046 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5823754789272031, + "acc_stderr": 0.0176356373269515, + "acc_norm": 0.5823754789272031, + "acc_norm_stderr": 0.0176356373269515 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305693, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305693 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4948717948717949, + "acc_stderr": 0.02534967290683867, + "acc_norm": 0.4948717948717949, + "acc_norm_stderr": 0.02534967290683867 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5935483870967742, + "acc_stderr": 0.027941727346256304, + "acc_norm": 0.5935483870967742, + "acc_norm_stderr": 0.027941727346256304 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.03074634997572347, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.03074634997572347 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871923, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871923 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979034, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979034 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.03807301726504511, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.03807301726504511 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02487081525105709, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02487081525105709 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.026830805998952233, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.026830805998952233 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5154320987654321, + "acc_stderr": 0.02780749004427619, + "acc_norm": 0.5154320987654321, + "acc_norm_stderr": 0.02780749004427619 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6373056994818653, + "acc_stderr": 0.034697137917043715, + "acc_norm": 0.6373056994818653, + "acc_norm_stderr": 0.034697137917043715 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6495412844036698, + "acc_stderr": 0.020456077599824454, + "acc_norm": 0.6495412844036698, + "acc_norm_stderr": 0.020456077599824454 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.545751633986928, + "acc_stderr": 0.02850980780262659, + "acc_norm": 0.545751633986928, + "acc_norm_stderr": 0.02850980780262659 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.04068590050224971, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.04068590050224971 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.44281045751633985, + "acc_stderr": 0.02009508315457734, + "acc_norm": 0.44281045751633985, + "acc_norm_stderr": 0.02009508315457734 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275941, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275941 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2770949720670391, + "acc_stderr": 0.014968772435812145, + "acc_norm": 0.2770949720670391, + "acc_norm_stderr": 0.014968772435812145 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6204081632653061, + "acc_stderr": 0.031067211262872464, + "acc_norm": 0.6204081632653061, + "acc_norm_stderr": 0.031067211262872464 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35267275097783574, + "acc_stderr": 0.012203286846053886, + "acc_norm": 0.35267275097783574, + "acc_norm_stderr": 0.012203286846053886 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5637254901960784, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.5637254901960784, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.593939393939394, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.593939393939394, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396704, + "mc2": 0.4382557590409575, + "mc2_stderr": 0.015307727969976953 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4332939787485242, + "acc_stderr": 0.017036683641893098, + "acc_norm": 0.51357733175915, + "acc_norm_stderr": 0.01718401506040146 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Changgil/K2S3-Mistral-7b-v1.3", + "model_sha": "3825ea65280f33aad5dab2d8b51a0af776f8e4a6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Changgil/K2S3-Mistral-7b-v1.48/result_2024-04-15 04:09:00.json b/Changgil/K2S3-Mistral-7b-v1.48/result_2024-04-15 04:09:00.json new file mode 100644 index 0000000000000000000000000000000000000000..70d74d805db7ba314161d3e89b2605fba7070f24 --- /dev/null +++ b/Changgil/K2S3-Mistral-7b-v1.48/result_2024-04-15 04:09:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938172, + "acc_norm": 0.44112627986348124, + "acc_norm_stderr": 0.014509747749064664 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38637721569408484, + "acc_stderr": 0.0048592361915797905, + "acc_norm": 0.49960167297351127, + "acc_norm_stderr": 0.0049897798280438485 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.03833185275213025, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.03833185275213025 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5159642401021711, + "acc_stderr": 0.01787084750608174, + "acc_norm": 0.5159642401021711, + "acc_norm_stderr": 0.01787084750608174 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079021, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079021 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.03437305501980619, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.03437305501980619 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933903, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933903 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.035107665979592154, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.035107665979592154 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.40397350993377484, + "acc_stderr": 0.0400648568536534, + "acc_norm": 0.40397350993377484, + "acc_norm_stderr": 0.0400648568536534 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.03794012674697028, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.03794012674697028 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137588, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137588 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303118, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303118 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.02780749004427621, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.02780749004427621 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5302752293577981, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.5302752293577981, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.028614624752805434, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.028614624752805434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437538, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437538 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.04327040932578728, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.04327040932578728 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364555, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364555 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.02976826352893311, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.02976826352893311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6122448979591837, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.6122448979591837, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.03121956944530186, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.03121956944530186 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3539765319426336, + "acc_stderr": 0.01221350473173165, + "acc_norm": 0.3539765319426336, + "acc_norm_stderr": 0.01221350473173165 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.03843566993588717, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.03843566993588717 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253595, + "mc2": 0.41198238256398484, + "mc2_stderr": 0.015155918602262708 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4002361275088548, + "acc_stderr": 0.016844693510505045, + "acc_norm": 0.46162927981109797, + "acc_norm_stderr": 0.01713966022184556 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Changgil/K2S3-Mistral-7b-v1.48", + "model_sha": "ac358c944bceb2129fb45398c7722321df5f55eb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Changgil/K2S3-Mistral-7bx2-48layers_v1.2/result_2024-03-28 07:58:59.json b/Changgil/K2S3-Mistral-7bx2-48layers_v1.2/result_2024-03-28 07:58:59.json new file mode 100644 index 0000000000000000000000000000000000000000..a1a94f0f4ac1ce26df195d22c193b30272638816 --- /dev/null +++ b/Changgil/K2S3-Mistral-7bx2-48layers_v1.2/result_2024-03-28 07:58:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35409556313993173, + "acc_stderr": 0.013975454122756565, + "acc_norm": 0.4189419795221843, + "acc_norm_stderr": 0.014418106953639008 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37024497112129057, + "acc_stderr": 0.004818833521340355, + "acc_norm": 0.4924317864967138, + "acc_norm_stderr": 0.00498920977074323 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5006385696040868, + "acc_stderr": 0.017879948914431697, + "acc_norm": 0.5006385696040868, + "acc_norm_stderr": 0.017879948914431697 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788683, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788683 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056127, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056127 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364397, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364397 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986483, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540632, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540632 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.02961432369045665, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.02961432369045665 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075657, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075657 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268815, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268815 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342654, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342654 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.041014055198424264, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.041014055198424264 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.02690290045866664, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.02690290045866664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376543, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376543 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.03582724530036095, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.03582724530036095 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617157, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.04065771002562605, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.04065771002562605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.0198984127176359, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.0198984127176359 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0286638201471995, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0286638201471995 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.01487425216809528, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.01487425216809528 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5877551020408164, + "acc_stderr": 0.03151236044674268, + "acc_norm": 0.5877551020408164, + "acc_norm_stderr": 0.03151236044674268 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.012150699768228588, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.012150699768228588 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.4788271114193854, + "mc2_stderr": 0.015629787609916828 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4085005903187721, + "acc_stderr": 0.01690006287942712, + "acc_norm": 0.5017709563164109, + "acc_norm_stderr": 0.01719024627623186 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Changgil/K2S3-Mistral-7bx2-48layers_v1.2", + "model_sha": "ff242b7f1bcebcc1e0f913b934536e66045d8b4b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Changgil/K2S3-SOLAR-11b-v1.0/result_2024-03-03 06:41:46.json b/Changgil/K2S3-SOLAR-11b-v1.0/result_2024-03-03 06:41:46.json new file mode 100644 index 0000000000000000000000000000000000000000..5573da6dedc043f441658afdea6dbce02bbd88d9 --- /dev/null +++ b/Changgil/K2S3-SOLAR-11b-v1.0/result_2024-03-03 06:41:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28754266211604096, + "acc_stderr": 0.013226719056266132, + "acc_norm": 0.3319112627986348, + "acc_norm_stderr": 0.013760988200880543 + }, + "harness|ko_hellaswag|10": { + "acc": 0.344353714399522, + "acc_stderr": 0.004741859753178417, + "acc_norm": 0.4291973710416252, + "acc_norm_stderr": 0.004939500404882185 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3065134099616858, + "acc_stderr": 0.016486952893041515, + "acc_norm": 0.3065134099616858, + "acc_norm_stderr": 0.016486952893041515 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501116, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501116 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.030251237579213167, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.030251237579213167 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.29260450160771706, + "acc_stderr": 0.025839898334877983, + "acc_norm": 0.29260450160771706, + "acc_norm_stderr": 0.025839898334877983 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713546, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713546 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.038061426873099935, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.038061426873099935 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.022421273612923707, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.022421273612923707 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.032550867699701024, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.032550867699701024 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335137, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335137 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3169811320754717, + "acc_stderr": 0.02863723563980092, + "acc_norm": 0.3169811320754717, + "acc_norm_stderr": 0.02863723563980092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910507, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910507 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844075, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2935323383084577, + "acc_stderr": 0.03220024104534205, + "acc_norm": 0.2935323383084577, + "acc_norm_stderr": 0.03220024104534205 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.033450369167889925, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.033450369167889925 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.034370793441061316, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.034370793441061316 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3208092485549133, + "acc_stderr": 0.025131000233647907, + "acc_norm": 0.3208092485549133, + "acc_norm_stderr": 0.025131000233647907 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33024691358024694, + "acc_stderr": 0.026168298456732846, + "acc_norm": 0.33024691358024694, + "acc_norm_stderr": 0.026168298456732846 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.03119584087770031, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.03119584087770031 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3100917431192661, + "acc_stderr": 0.019830849684439752, + "acc_norm": 0.3100917431192661, + "acc_norm_stderr": 0.019830849684439752 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.027057974624494382, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.027057974624494382 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.033176727875331574, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.033176727875331574 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.01815287105153881, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.01815287105153881 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.02689170942834396, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.02689170942834396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005344, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005344 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.02866199620233531, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.02866199620233531 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31645569620253167, + "acc_stderr": 0.030274974880218974, + "acc_norm": 0.31645569620253167, + "acc_norm_stderr": 0.030274974880218974 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28552803129074317, + "acc_stderr": 0.01153575158666565, + "acc_norm": 0.28552803129074317, + "acc_norm_stderr": 0.01153575158666565 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.032282103870378935, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.032282103870378935 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.03608541011573967, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.03608541011573967 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006514, + "mc2": 0.44588495304167214, + "mc2_stderr": 0.015458963700699168 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27744982290436837, + "acc_stderr": 0.015393630236605975, + "acc_norm": 0.3105076741440378, + "acc_norm_stderr": 0.015908004528762024 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Changgil/K2S3-SOLAR-11b-v1.0", + "model_sha": "3c5ff9c8a00dfb6cf8619ce08c2f06a22e650e0c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Changgil/K2S3-SOLAR-11b-v2.0/result_2024-03-05 09:03:08.json b/Changgil/K2S3-SOLAR-11b-v2.0/result_2024-03-05 09:03:08.json new file mode 100644 index 0000000000000000000000000000000000000000..15d3ea6b1c6c7446aed6e3686ed09cd4645e5f0e --- /dev/null +++ b/Changgil/K2S3-SOLAR-11b-v2.0/result_2024-03-05 09:03:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27559726962457337, + "acc_stderr": 0.01305716965576184, + "acc_norm": 0.3319112627986348, + "acc_norm_stderr": 0.013760988200880543 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33359888468432586, + "acc_stderr": 0.004705347137699606, + "acc_norm": 0.40748854809798846, + "acc_norm_stderr": 0.0049036288872645285 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3300970873786408, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.3300970873786408, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3563218390804598, + "acc_stderr": 0.017125853762755893, + "acc_norm": 0.3563218390804598, + "acc_norm_stderr": 0.017125853762755893 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.030251237579213167, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.030251237579213167 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3440514469453376, + "acc_stderr": 0.026981478043648043, + "acc_norm": 0.3440514469453376, + "acc_norm_stderr": 0.026981478043648043 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30808080808080807, + "acc_stderr": 0.03289477330098615, + "acc_norm": 0.30808080808080807, + "acc_norm_stderr": 0.03289477330098615 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.023119362758232297, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.023119362758232297 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.33225806451612905, + "acc_stderr": 0.0267955608481228, + "acc_norm": 0.33225806451612905, + "acc_norm_stderr": 0.0267955608481228 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5042735042735043, + "acc_stderr": 0.03275489264382132, + "acc_norm": 0.5042735042735043, + "acc_norm_stderr": 0.03275489264382132 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3169811320754717, + "acc_stderr": 0.02863723563980092, + "acc_norm": 0.3169811320754717, + "acc_norm_stderr": 0.02863723563980092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3482587064676617, + "acc_stderr": 0.03368787466115459, + "acc_norm": 0.3482587064676617, + "acc_norm_stderr": 0.03368787466115459 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.03567603799639168, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.03567603799639168 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.315028901734104, + "acc_stderr": 0.025009313790069713, + "acc_norm": 0.315028901734104, + "acc_norm_stderr": 0.025009313790069713 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3734567901234568, + "acc_stderr": 0.026915003011380154, + "acc_norm": 0.3734567901234568, + "acc_norm_stderr": 0.026915003011380154 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3160621761658031, + "acc_stderr": 0.03355397369686175, + "acc_norm": 0.3160621761658031, + "acc_norm_stderr": 0.03355397369686175 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3192660550458716, + "acc_stderr": 0.019987829069750024, + "acc_norm": 0.3192660550458716, + "acc_norm_stderr": 0.019987829069750024 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790607, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790607 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.02724561304721536, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.02724561304721536 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952925, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952925 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.018492596536396955, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.018492596536396955 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.02678917235114025, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.02678917235114025 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.0449394906861354, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.0449394906861354 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.02769691071309394, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.02769691071309394 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.028666857790274645, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.028666857790274645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03068582059661079, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03068582059661079 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25945241199478486, + "acc_stderr": 0.01119526207635032, + "acc_norm": 0.25945241199478486, + "acc_norm_stderr": 0.01119526207635032 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.036462049632538115, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.036462049632538115 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608746, + "mc2": 0.4565764673278688, + "mc2_stderr": 0.015497077861968201 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27390791027154665, + "acc_stderr": 0.015332499474791024, + "acc_norm": 0.3364817001180638, + "acc_norm_stderr": 0.016245085294386556 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Changgil/K2S3-SOLAR-11b-v2.0", + "model_sha": "ad171800ebf03b89cfe6d556a67ad765bb70292f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Changgil/K2S3-SOLAR-11b-v3.0/result_2024-03-14 02:18:20.json b/Changgil/K2S3-SOLAR-11b-v3.0/result_2024-03-14 02:18:20.json new file mode 100644 index 0000000000000000000000000000000000000000..d48b3dc8533075e61793f406a11b306498543920 --- /dev/null +++ b/Changgil/K2S3-SOLAR-11b-v3.0/result_2024-03-14 02:18:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43600682593856654, + "acc_stderr": 0.014491225699230916, + "acc_norm": 0.5102389078498294, + "acc_norm_stderr": 0.014608326906285015 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45160326628161723, + "acc_stderr": 0.004966351835028203, + "acc_norm": 0.6191993626767576, + "acc_norm_stderr": 0.0048459128573386705 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6140350877192983, + "acc_stderr": 0.03733756969066165, + "acc_norm": 0.6140350877192983, + "acc_norm_stderr": 0.03733756969066165 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6590038314176245, + "acc_stderr": 0.016951781383223313, + "acc_norm": 0.6590038314176245, + "acc_norm_stderr": 0.016951781383223313 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.502127659574468, + "acc_stderr": 0.03268572658667493, + "acc_norm": 0.502127659574468, + "acc_norm_stderr": 0.03268572658667493 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.0389136449583582, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.0389136449583582 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6045016077170418, + "acc_stderr": 0.027770918531427838, + "acc_norm": 0.6045016077170418, + "acc_norm_stderr": 0.027770918531427838 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.648854961832061, + "acc_stderr": 0.0418644516301375, + "acc_norm": 0.648854961832061, + "acc_norm_stderr": 0.0418644516301375 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6818181818181818, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.6818181818181818, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5538461538461539, + "acc_stderr": 0.025203571773028323, + "acc_norm": 0.5538461538461539, + "acc_norm_stderr": 0.025203571773028323 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.046166311118017146, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.046166311118017146 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.03510766597959215, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.03510766597959215 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6096774193548387, + "acc_stderr": 0.027751256636969587, + "acc_norm": 0.6096774193548387, + "acc_norm_stderr": 0.027751256636969587 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8418803418803419, + "acc_stderr": 0.023902325549560403, + "acc_norm": 0.8418803418803419, + "acc_norm_stderr": 0.023902325549560403 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5622641509433962, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.5622641509433962, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699947, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699947 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5867052023121387, + "acc_stderr": 0.026511261369409244, + "acc_norm": 0.5867052023121387, + "acc_norm_stderr": 0.026511261369409244 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456608, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456608 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6049382716049383, + "acc_stderr": 0.027201117666925657, + "acc_norm": 0.6049382716049383, + "acc_norm_stderr": 0.027201117666925657 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.03221024508041153, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.03221024508041153 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7174311926605504, + "acc_stderr": 0.01930424349770715, + "acc_norm": 0.7174311926605504, + "acc_norm_stderr": 0.01930424349770715 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6013071895424836, + "acc_stderr": 0.02803609227389177, + "acc_norm": 0.6013071895424836, + "acc_norm_stderr": 0.02803609227389177 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5592105263157895, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.5592105263157895, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.020226106567657814, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.020226106567657814 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596154, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596154 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.046695106638751926, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.046695106638751926 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19106145251396647, + "acc_stderr": 0.013148479802450805, + "acc_norm": 0.19106145251396647, + "acc_norm_stderr": 0.013148479802450805 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.03030625772246831, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.03030625772246831 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6408163265306123, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.6408163265306123, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7130801687763713, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.7130801687763713, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.40352020860495436, + "acc_stderr": 0.012530241301193186, + "acc_norm": 0.40352020860495436, + "acc_norm_stderr": 0.012530241301193186 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31701346389228885, + "mc1_stderr": 0.016289203374403396, + "mc2": 0.4695018092236423, + "mc2_stderr": 0.015379565123329907 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.551357733175915, + "acc_stderr": 0.01709943051472578, + "acc_norm": 0.5796930342384888, + "acc_norm_stderr": 0.01697059828117771 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Changgil/K2S3-SOLAR-11b-v3.0", + "model_sha": "b47d5115a5e4a1fbee8bf94ce732890deb710432", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Changgil/K2S3-SOLAR-11b-v4.0/result_2024-03-17 06:24:30.json b/Changgil/K2S3-SOLAR-11b-v4.0/result_2024-03-17 06:24:30.json new file mode 100644 index 0000000000000000000000000000000000000000..721effeac0feda5699573fe1b5c4b60e4c84af49 --- /dev/null +++ b/Changgil/K2S3-SOLAR-11b-v4.0/result_2024-03-17 06:24:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4462457337883959, + "acc_stderr": 0.014526705548539983, + "acc_norm": 0.507679180887372, + "acc_norm_stderr": 0.014609667440892574 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4570802628958375, + "acc_stderr": 0.004971364031062585, + "acc_norm": 0.6213901613224457, + "acc_norm_stderr": 0.004840493603166207 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6641123882503193, + "acc_stderr": 0.01688940723517168, + "acc_norm": 0.6641123882503193, + "acc_norm_stderr": 0.01688940723517168 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5106382978723404, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.5106382978723404, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.027466610213140105, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.027466610213140105 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6098654708520179, + "acc_stderr": 0.03273766725459157, + "acc_norm": 0.6098654708520179, + "acc_norm_stderr": 0.03273766725459157 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6793893129770993, + "acc_stderr": 0.04093329229834278, + "acc_norm": 0.6793893129770993, + "acc_norm_stderr": 0.04093329229834278 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5487179487179488, + "acc_stderr": 0.02523038123893484, + "acc_norm": 0.5487179487179488, + "acc_norm_stderr": 0.02523038123893484 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162933, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162933 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.02786932057166462, + "acc_norm": 0.6, + "acc_norm_stderr": 0.02786932057166462 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8376068376068376, + "acc_stderr": 0.024161618127987745, + "acc_norm": 0.8376068376068376, + "acc_norm_stderr": 0.024161618127987745 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5584905660377358, + "acc_stderr": 0.030561590426731837, + "acc_norm": 0.5584905660377358, + "acc_norm_stderr": 0.030561590426731837 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7611940298507462, + "acc_stderr": 0.03014777593540922, + "acc_norm": 0.7611940298507462, + "acc_norm_stderr": 0.03014777593540922 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.03807301726504513, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.03807301726504513 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02510742548113728, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02510742548113728 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.74, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5953757225433526, + "acc_stderr": 0.026424816594009852, + "acc_norm": 0.5953757225433526, + "acc_norm_stderr": 0.026424816594009852 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.027339546640662734, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.027339546640662734 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7357512953367875, + "acc_stderr": 0.03182155050916646, + "acc_norm": 0.7357512953367875, + "acc_norm_stderr": 0.03182155050916646 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594964, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594964 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.689908256880734, + "acc_stderr": 0.019830849684439752, + "acc_norm": 0.689908256880734, + "acc_norm_stderr": 0.019830849684439752 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04426266681379909, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04426266681379909 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5915032679738562, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.5915032679738562, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591206, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591206 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249034, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249034 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.020212274976302964, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.020212274976302964 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.02909767559946393, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.02909767559946393 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2022346368715084, + "acc_stderr": 0.013433729483320994, + "acc_norm": 0.2022346368715084, + "acc_norm_stderr": 0.013433729483320994 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.7, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5551470588235294, + "acc_stderr": 0.03018753206032939, + "acc_norm": 0.5551470588235294, + "acc_norm_stderr": 0.03018753206032939 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6571428571428571, + "acc_stderr": 0.03038726291954772, + "acc_norm": 0.6571428571428571, + "acc_norm_stderr": 0.03038726291954772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598025, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598025 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39308996088657105, + "acc_stderr": 0.012474899613873955, + "acc_norm": 0.39308996088657105, + "acc_norm_stderr": 0.012474899613873955 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6519607843137255, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.6519607843137255, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.03804913653971009, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.03804913653971009 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882466, + "mc2": 0.46094875069234287, + "mc2_stderr": 0.015327523723455975 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5619834710743802, + "acc_stderr": 0.01705775370216029, + "acc_norm": 0.577331759149941, + "acc_norm_stderr": 0.016983506079577607 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Changgil/K2S3-SOLAR-11b-v4.0", + "model_sha": "f1a90b4594dfe14349be1db44ee887856f73a82c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Changgil/K2S3-v0.1/result_2024-04-29 01:06:23.json b/Changgil/K2S3-v0.1/result_2024-04-29 01:06:23.json new file mode 100644 index 0000000000000000000000000000000000000000..d4e98ed88e7161d03bd4710744a9555aeadfbc8a --- /dev/null +++ b/Changgil/K2S3-v0.1/result_2024-04-29 01:06:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.45051194539249145, + "acc_stderr": 0.014539646098471627, + "acc_norm": 0.5008532423208191, + "acc_norm_stderr": 0.014611369529813269 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4230233021310496, + "acc_stderr": 0.004930293787545619, + "acc_norm": 0.5676160127464649, + "acc_norm_stderr": 0.0049439450696114546 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6168582375478927, + "acc_stderr": 0.01738477419488562, + "acc_norm": 0.6168582375478927, + "acc_norm_stderr": 0.01738477419488562 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04316378599511326, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04316378599511326 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5594855305466238, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.5594855305466238, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.03318833286217281, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.03318833286217281 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03358618145732524, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03358618145732524 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5798319327731093, + "acc_stderr": 0.032061837832361516, + "acc_norm": 0.5798319327731093, + "acc_norm_stderr": 0.032061837832361516 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.025294608023986462, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.025294608023986462 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.027869320571664632, + "acc_norm": 0.6, + "acc_norm_stderr": 0.027869320571664632 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431187, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431187 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5471698113207547, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.5471698113207547, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524572, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524572 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6965174129353234, + "acc_stderr": 0.03251006816458618, + "acc_norm": 0.6965174129353234, + "acc_norm_stderr": 0.03251006816458618 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115979, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115979 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.026788811931562757, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.026788811931562757 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5401234567901234, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.5401234567901234, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.689119170984456, + "acc_stderr": 0.03340361906276586, + "acc_norm": 0.689119170984456, + "acc_norm_stderr": 0.03340361906276586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6770642201834862, + "acc_stderr": 0.020048115923415342, + "acc_norm": 0.6770642201834862, + "acc_norm_stderr": 0.020048115923415342 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.04060127035236395, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.04060127035236395 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.020227402794434867, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.020227402794434867 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963775, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963775 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30726256983240224, + "acc_stderr": 0.01543015884646961, + "acc_norm": 0.30726256983240224, + "acc_norm_stderr": 0.01543015884646961 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.0302114796091216, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.0302114796091216 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6489795918367347, + "acc_stderr": 0.030555316755573644, + "acc_norm": 0.6489795918367347, + "acc_norm_stderr": 0.030555316755573644 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6919831223628692, + "acc_stderr": 0.030052389335605695, + "acc_norm": 0.6919831223628692, + "acc_norm_stderr": 0.030052389335605695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3559322033898305, + "acc_stderr": 0.01222864553727757, + "acc_norm": 0.3559322033898305, + "acc_norm_stderr": 0.01222864553727757 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205983, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205983 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.01576477083677731, + "mc2": 0.43390504265082586, + "mc2_stderr": 0.015336718297088065 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5017709563164109, + "acc_stderr": 0.017190246276231853, + "acc_norm": 0.5584415584415584, + "acc_norm_stderr": 0.01707252587556311 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Changgil/K2S3-v0.1", + "model_sha": "d544e389f091983bb4f11314edb526d81753c919", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Changgil/k2s3_test_24001/result_2024-02-14 06:16:40.json b/Changgil/k2s3_test_24001/result_2024-02-14 06:16:40.json new file mode 100644 index 0000000000000000000000000000000000000000..c1c15a0c5ce03db8b1e61265ae86caa474a4c3a2 --- /dev/null +++ b/Changgil/k2s3_test_24001/result_2024-02-14 06:16:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3225255972696246, + "acc_stderr": 0.013659980894277373, + "acc_norm": 0.371160409556314, + "acc_norm_stderr": 0.014117971901142811 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36168094005178253, + "acc_stderr": 0.004795051037917729, + "acc_norm": 0.4660426209918343, + "acc_norm_stderr": 0.004978260641742204 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46360153256704983, + "acc_stderr": 0.01783252407959326, + "acc_norm": 0.46360153256704983, + "acc_norm_stderr": 0.01783252407959326 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.0282908690541976, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.0282908690541976 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484504, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484504 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4595959595959596, + "acc_stderr": 0.03550702465131342, + "acc_norm": 0.4595959595959596, + "acc_norm_stderr": 0.03550702465131342 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40336134453781514, + "acc_stderr": 0.03186608121408831, + "acc_norm": 0.40336134453781514, + "acc_norm_stderr": 0.03186608121408831 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3717948717948718, + "acc_stderr": 0.024503472557110946, + "acc_norm": 0.3717948717948718, + "acc_norm_stderr": 0.024503472557110946 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.033442837442804574, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.033442837442804574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.028071588901091852, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.028071588901091852 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.03193705726200293, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.03193705726200293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.0344578996436275, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.0344578996436275 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.037336266553835096, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.037336266553835096 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.404320987654321, + "acc_stderr": 0.027306625297327688, + "acc_norm": 0.404320987654321, + "acc_norm_stderr": 0.027306625297327688 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44220183486238535, + "acc_stderr": 0.0212936132075202, + "acc_norm": 0.44220183486238535, + "acc_norm_stderr": 0.0212936132075202 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.018824219512706207, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.018824219512706207 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509317, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.042032772914677614, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.042032772914677614 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.0312803908432988, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.0312803908432988 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.031987615467631264, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.031987615467631264 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4219409282700422, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.4219409282700422, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733095, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.033933885849584046, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.033933885849584046 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.4412699277567685, + "mc2_stderr": 0.015194518848596681 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3400236127508855, + "acc_stderr": 0.016286717220737677, + "acc_norm": 0.4309327036599764, + "acc_norm_stderr": 0.017025558196043136 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Changgil/k2s3_test_24001", + "model_sha": "1c706ac080122e5a2901b06b8228ed177a7055c5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/CultriX/NeuralMona_MoE-4x7B/result_2024-05-15 18:00:30.json b/CultriX/NeuralMona_MoE-4x7B/result_2024-05-15 18:00:30.json new file mode 100644 index 0000000000000000000000000000000000000000..757ad5af5e6078cc186475cec1b926a80761f587 --- /dev/null +++ b/CultriX/NeuralMona_MoE-4x7B/result_2024-05-15 18:00:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910471, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.014494421584256515 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3952399920334595, + "acc_stderr": 0.004879030010598926, + "acc_norm": 0.5206134236207927, + "acc_norm_stderr": 0.004985539159783418 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4495530012771392, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.4495530012771392, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029319, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029319 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.02527589207024063, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.02527589207024063 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.033959703819985754, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.033959703819985754 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748842, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748842 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673281, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673281 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.037657466938651504, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.037657466938651504 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.02475747390275205, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.02475747390275205 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379417, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379417 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.03606065001832919, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.03606065001832919 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5082568807339449, + "acc_stderr": 0.021434399918214334, + "acc_norm": 0.5082568807339449, + "acc_norm_stderr": 0.021434399918214334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.019643801557924803, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.019643801557924803 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0286638201471995, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0286638201471995 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.046355501356099754, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.046355501356099754 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31620111731843575, + "acc_stderr": 0.015551673652172556, + "acc_norm": 0.31620111731843575, + "acc_norm_stderr": 0.015551673652172556 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681404, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681404 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35267275097783574, + "acc_stderr": 0.012203286846053887, + "acc_norm": 0.35267275097783574, + "acc_norm_stderr": 0.012203286846053887 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.41982864137086906, + "mc1_stderr": 0.017277030301775766, + "mc2": 0.5850332501736099, + "mc2_stderr": 0.01624952629450338 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41440377804014167, + "acc_stderr": 0.016936583383943615, + "acc_norm": 0.41912632821723733, + "acc_norm_stderr": 0.016963995010862796 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "CultriX/NeuralMona_MoE-4x7B", + "model_sha": "869c5cafb3f5002a0d273621519e3f352418eded", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DBCMLAB/Llama-3-instruction-constructionsafety-layertuning/result_2024-05-24 06:09:39.json b/DBCMLAB/Llama-3-instruction-constructionsafety-layertuning/result_2024-05-24 06:09:39.json new file mode 100644 index 0000000000000000000000000000000000000000..4e64e4d7a0bb72fa4cbc3cc935e32a562564a5a3 --- /dev/null +++ b/DBCMLAB/Llama-3-instruction-constructionsafety-layertuning/result_2024-05-24 06:09:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4257679180887372, + "acc_stderr": 0.01444946427886881, + "acc_norm": 0.4812286689419795, + "acc_norm_stderr": 0.014601090150633964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4213304122684724, + "acc_stderr": 0.004927631806477561, + "acc_norm": 0.5729934276040629, + "acc_norm_stderr": 0.004936323537147931 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.611749680715198, + "acc_stderr": 0.017427673295544326, + "acc_norm": 0.611749680715198, + "acc_norm_stderr": 0.017427673295544326 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.042667634040995814, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.042667634040995814 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.032469569197899575, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.032469569197899575 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5594855305466238, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.5594855305466238, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.040233822736177476, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.040233822736177476 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.02533466708095496, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.02533466708095496 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5483870967741935, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.5483870967741935, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.029872577708891176, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.029872577708891176 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524593, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6965174129353234, + "acc_stderr": 0.03251006816458618, + "acc_norm": 0.6965174129353234, + "acc_norm_stderr": 0.03251006816458618 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101813, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101813 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5246913580246914, + "acc_stderr": 0.027786800931427436, + "acc_norm": 0.5246913580246914, + "acc_norm_stderr": 0.027786800931427436 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.616580310880829, + "acc_stderr": 0.03508984236295342, + "acc_norm": 0.616580310880829, + "acc_norm_stderr": 0.03508984236295342 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.634862385321101, + "acc_stderr": 0.020642801454383995, + "acc_norm": 0.634862385321101, + "acc_norm_stderr": 0.020642801454383995 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138286, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138286 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978252, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978252 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.020196594933541197, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.020196594933541197 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2346368715083799, + "acc_stderr": 0.014173044098303675, + "acc_norm": 0.2346368715083799, + "acc_norm_stderr": 0.014173044098303675 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47794117647058826, + "acc_stderr": 0.030343264224213528, + "acc_norm": 0.47794117647058826, + "acc_norm_stderr": 0.030343264224213528 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.03200682020163908, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.03200682020163908 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.03058732629470237, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.03058732629470237 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.012134433741002575, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.012134433741002575 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5931372549019608, + "acc_stderr": 0.03447891136353383, + "acc_norm": 0.5931372549019608, + "acc_norm_stderr": 0.03447891136353383 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.01550620472283456, + "mc2": 0.4196920864518041, + "mc2_stderr": 0.015083569722000319 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4510035419126328, + "acc_stderr": 0.017107618859549346, + "acc_norm": 0.4817001180637544, + "acc_norm_stderr": 0.01717883663917776 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DBCMLAB/Llama-3-instruction-constructionsafety-layertuning", + "model_sha": "cce37ef6a6ecf95e6995b7901dc53ea332732d1a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DILAB-HYU/KoQuality-Polyglot-5.8b/result_2023-10-12 13:21:04.json b/DILAB-HYU/KoQuality-Polyglot-5.8b/result_2023-10-12 13:21:04.json new file mode 100644 index 0000000000000000000000000000000000000000..86130d043c4fa0acf617a0cf455d394b8310c9d8 --- /dev/null +++ b/DILAB-HYU/KoQuality-Polyglot-5.8b/result_2023-10-12 13:21:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2977815699658703, + "acc_stderr": 0.01336308010724449, + "acc_norm": 0.3370307167235495, + "acc_norm_stderr": 0.013813476652902272 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38458474407488547, + "acc_stderr": 0.004855027248398158, + "acc_norm": 0.4970125473013344, + "acc_norm_stderr": 0.004989692344313998 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.1695906432748538, + "acc_stderr": 0.028782108105401712, + "acc_norm": 0.1695906432748538, + "acc_norm_stderr": 0.028782108105401712 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3300970873786408, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.3300970873786408, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20561941251596424, + "acc_stderr": 0.014452500456785823, + "acc_norm": 0.20561941251596424, + "acc_norm_stderr": 0.014452500456785823 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.17872340425531916, + "acc_stderr": 0.025045373272050957, + "acc_norm": 0.17872340425531916, + "acc_norm_stderr": 0.025045373272050957 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.031417842916639245, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.031417842916639245 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.1031390134529148, + "acc_stderr": 0.020412564289839272, + "acc_norm": 0.1031390134529148, + "acc_norm_stderr": 0.020412564289839272 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732523, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732523 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36153846153846153, + "acc_stderr": 0.024359581465396983, + "acc_norm": 0.36153846153846153, + "acc_norm_stderr": 0.024359581465396983 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678241, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678241 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.02652270967466777, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.02652270967466777 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.17094017094017094, + "acc_stderr": 0.024662496845209828, + "acc_norm": 0.17094017094017094, + "acc_norm_stderr": 0.024662496845209828 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443866, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443866 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948365, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948365 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.022497230190967547, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.022497230190967547 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.033519538795212696, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.033519538795212696 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720685, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720685 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3541284403669725, + "acc_stderr": 0.0205047290138291, + "acc_norm": 0.3541284403669725, + "acc_norm_stderr": 0.0205047290138291 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.026173908506718576, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.026173908506718576 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.12396694214876033, + "acc_stderr": 0.03008309871603522, + "acc_norm": 0.12396694214876033, + "acc_norm_stderr": 0.03008309871603522 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.03823428969926606, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.03823428969926606 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21241830065359477, + "acc_stderr": 0.016547148636203147, + "acc_norm": 0.21241830065359477, + "acc_norm_stderr": 0.016547148636203147 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266733, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266733 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.03562367850095391, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.03562367850095391 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556163, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556163 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.19831223628691982, + "acc_stderr": 0.025955020841621112, + "acc_norm": 0.19831223628691982, + "acc_norm_stderr": 0.025955020841621112 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24315514993481094, + "acc_stderr": 0.010956556654417356, + "acc_norm": 0.24315514993481094, + "acc_norm_stderr": 0.010956556654417356 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693257, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693257 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.033175059300091805, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.033175059300091805 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.015000674373570342, + "mc2": 0.4081734277840062, + "mc2_stderr": 0.014989124693241153 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36835891381345925, + "acc_stderr": 0.01658385898263907, + "acc_norm": 0.46871310507674147, + "acc_norm_stderr": 0.017156666859785445 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DILAB-HYU/KoQuality-Polyglot-5.8b", + "model_sha": "270b6dd7bb08032bb13164b7438b2bac83709ae4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DILAB-HYU/koquality-ko-ref-llama2-7b/result_2023-11-05 11:52:21.json b/DILAB-HYU/koquality-ko-ref-llama2-7b/result_2023-11-05 11:52:21.json new file mode 100644 index 0000000000000000000000000000000000000000..07206e453cbd2b46e708242213929b8311ab4360 --- /dev/null +++ b/DILAB-HYU/koquality-ko-ref-llama2-7b/result_2023-11-05 11:52:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32593856655290104, + "acc_stderr": 0.01369743246669324, + "acc_norm": 0.3779863481228669, + "acc_norm_stderr": 0.0141696645203031 + }, + "harness|ko_hellaswag|10": { + "acc": 0.377414857598088, + "acc_stderr": 0.0048374934398742984, + "acc_norm": 0.48755228042222665, + "acc_norm_stderr": 0.004988234881206747 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03615507630310935, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03615507630310935 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3116219667943806, + "acc_stderr": 0.016562433867284176, + "acc_norm": 0.3116219667943806, + "acc_norm_stderr": 0.016562433867284176 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.030363582197238167, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.030363582197238167 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.23493975903614459, + "acc_stderr": 0.03300533186128922, + "acc_norm": 0.23493975903614459, + "acc_norm_stderr": 0.03300533186128922 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632945, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632945 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.03021683101150876, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.03021683101150876 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.32323232323232326, + "acc_stderr": 0.033322999210706444, + "acc_norm": 0.32323232323232326, + "acc_norm_stderr": 0.033322999210706444 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.029597329730978096, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.029597329730978096 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.25384615384615383, + "acc_stderr": 0.022066054378726257, + "acc_norm": 0.25384615384615383, + "acc_norm_stderr": 0.022066054378726257 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.025988500792411894, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.025988500792411894 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708087, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708087 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.040693063197213775, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.040693063197213775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.02578787422095932, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.02578787422095932 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.29850746268656714, + "acc_stderr": 0.032357437893550424, + "acc_norm": 0.29850746268656714, + "acc_norm_stderr": 0.032357437893550424 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.023786203255508283, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.023786203255508283 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.32515337423312884, + "acc_stderr": 0.03680350371286462, + "acc_norm": 0.32515337423312884, + "acc_norm_stderr": 0.03680350371286462 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2808641975308642, + "acc_stderr": 0.02500646975579921, + "acc_norm": 0.2808641975308642, + "acc_norm_stderr": 0.02500646975579921 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916649, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916649 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28623853211009176, + "acc_stderr": 0.019379436628919968, + "acc_norm": 0.28623853211009176, + "acc_norm_stderr": 0.019379436628919968 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.037610708698674805, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.037610708698674805 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23366013071895425, + "acc_stderr": 0.017119158496044506, + "acc_norm": 0.23366013071895425, + "acc_norm_stderr": 0.017119158496044506 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140245, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140245 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467761, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467761 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966358, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966358 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.031362502409358915, + "acc_norm": 0.4, + "acc_norm_stderr": 0.031362502409358915 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.02904133351059804, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.02904133351059804 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27835723598435463, + "acc_stderr": 0.011446990197380984, + "acc_norm": 0.27835723598435463, + "acc_norm_stderr": 0.011446990197380984 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.01517698502770768, + "mc2": 0.41091136339297607, + "mc2_stderr": 0.014831976469805178 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3270365997638725, + "acc_stderr": 0.016129047485457022, + "acc_norm": 0.40968122786304606, + "acc_norm_stderr": 0.01690756819221947 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DILAB-HYU/koquality-ko-ref-llama2-7b", + "model_sha": "3ef89d06e678a10cd678b2f0258d0f4a0ef2b5bb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DILAB-HYU/koquality-polyglot-1.3b/result_2023-10-30 14:22:39.json b/DILAB-HYU/koquality-polyglot-1.3b/result_2023-10-30 14:22:39.json new file mode 100644 index 0000000000000000000000000000000000000000..84d4526e803ff7a428d9b968808e7a9131880576 --- /dev/null +++ b/DILAB-HYU/koquality-polyglot-1.3b/result_2023-10-30 14:22:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2363481228668942, + "acc_stderr": 0.012414960524301823, + "acc_norm": 0.28924914675767915, + "acc_norm_stderr": 0.013250012579393443 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3379804819757021, + "acc_stderr": 0.004720551323547122, + "acc_norm": 0.4183429595698068, + "acc_norm_stderr": 0.004922789247319879 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.041858325989283136, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.041858325989283136 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2681992337164751, + "acc_stderr": 0.01584243083526944, + "acc_norm": 0.2681992337164751, + "acc_norm_stderr": 0.01584243083526944 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.035914440841969694, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.035914440841969694 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.028020226271200217, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.028020226271200217 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21084337349397592, + "acc_stderr": 0.0317555478662992, + "acc_norm": 0.21084337349397592, + "acc_norm_stderr": 0.0317555478662992 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2572347266881029, + "acc_stderr": 0.024826171289250888, + "acc_norm": 0.2572347266881029, + "acc_norm_stderr": 0.024826171289250888 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22137404580152673, + "acc_stderr": 0.0364129708131373, + "acc_norm": 0.22137404580152673, + "acc_norm_stderr": 0.0364129708131373 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124498, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124498 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.03095663632856654, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.03095663632856654 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.022556551010132354, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.022556551010132354 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678243, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678243 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.025649381063029258, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.025649381063029258 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21367521367521367, + "acc_stderr": 0.02685345037700916, + "acc_norm": 0.21367521367521367, + "acc_norm_stderr": 0.02685345037700916 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.0430911870994646, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.0430911870994646 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.029705284056772432, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.029705284056772432 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292404, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292404 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.02447722285613511, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.02447722285613511 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.03410780251836184, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.03410780251836184 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.038351539543994194, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.038351539543994194 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21834862385321102, + "acc_stderr": 0.017712600528722734, + "acc_norm": 0.21834862385321102, + "acc_norm_stderr": 0.017712600528722734 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102149, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102149 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.02367908986180772, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.02367908986180772 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.04026187527591206, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.04026187527591206 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810537, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810537 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2107843137254902, + "acc_stderr": 0.01650047297902479, + "acc_norm": 0.2107843137254902, + "acc_norm_stderr": 0.01650047297902479 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984301, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984301 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3673469387755102, + "acc_stderr": 0.03086214492108755, + "acc_norm": 0.3673469387755102, + "acc_norm_stderr": 0.03086214492108755 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24472573839662448, + "acc_stderr": 0.027985699387036423, + "acc_norm": 0.24472573839662448, + "acc_norm_stderr": 0.027985699387036423 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2438070404172099, + "acc_stderr": 0.010966507972178472, + "acc_norm": 0.2438070404172099, + "acc_norm_stderr": 0.010966507972178472 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693257, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693257 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.20606060606060606, + "acc_stderr": 0.0315841532404771, + "acc_norm": 0.20606060606060606, + "acc_norm_stderr": 0.0315841532404771 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456418, + "mc2": 0.41348688566296676, + "mc2_stderr": 0.015238831556708764 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3293978748524203, + "acc_stderr": 0.016158746868147143, + "acc_norm": 0.40731995277449823, + "acc_norm_stderr": 0.01689245669519127 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DILAB-HYU/koquality-polyglot-1.3b", + "model_sha": "ca9ba27cccf4065cf447f9fdd7d5aec1715a3175", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DILAB-HYU/koquality-polyglot-12.8b/result_2023-11-12 20:22:29.json b/DILAB-HYU/koquality-polyglot-12.8b/result_2023-11-12 20:22:29.json new file mode 100644 index 0000000000000000000000000000000000000000..039ee932e9ee8d05f5a93fe9158f92163fd097af --- /dev/null +++ b/DILAB-HYU/koquality-polyglot-12.8b/result_2023-11-12 20:22:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2986348122866894, + "acc_stderr": 0.013374078615068759, + "acc_norm": 0.35494880546075086, + "acc_norm_stderr": 0.013983036904094095 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4047998406691894, + "acc_stderr": 0.004898501014225842, + "acc_norm": 0.5283808006373233, + "acc_norm_stderr": 0.004981736689518753 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.03446296217088426, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.03446296217088426 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27458492975734355, + "acc_stderr": 0.015959829933084032, + "acc_norm": 0.27458492975734355, + "acc_norm_stderr": 0.015959829933084032 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.02732107841738753, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.02732107841738753 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.18674698795180722, + "acc_stderr": 0.03033874914450058, + "acc_norm": 0.18674698795180722, + "acc_norm_stderr": 0.03033874914450058 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3086816720257235, + "acc_stderr": 0.026236965881153266, + "acc_norm": 0.3086816720257235, + "acc_norm_stderr": 0.026236965881153266 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.031544498882702866, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.031544498882702866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.226890756302521, + "acc_stderr": 0.027205371538279472, + "acc_norm": 0.226890756302521, + "acc_norm_stderr": 0.027205371538279472 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.0219169577092138, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.0219169577092138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24838709677419354, + "acc_stderr": 0.02458002892148101, + "acc_norm": 0.24838709677419354, + "acc_norm_stderr": 0.02458002892148101 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02934311479809447, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02934311479809447 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.02560423347089911, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.02560423347089911 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072774, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072774 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.02620276653465215, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.02620276653465215 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804725, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804725 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.03456425745087, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.03456425745087 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.023135287974325618, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.023135287974325618 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.038270523579507554, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.038270523579507554 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.02525117393649502, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.02525117393649502 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.01822407811729906, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.01822407811729906 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.025261691219729487, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.025261691219729487 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.03690677986137283, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.03690677986137283 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24673202614379086, + "acc_stderr": 0.0174408203674025, + "acc_norm": 0.24673202614379086, + "acc_norm_stderr": 0.0174408203674025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432403, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432403 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608044, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409162, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409162 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335317, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235922, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235922 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.1940928270042194, + "acc_stderr": 0.025744902532290927, + "acc_norm": 0.1940928270042194, + "acc_norm_stderr": 0.025744902532290927 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2542372881355932, + "acc_stderr": 0.011121129007840673, + "acc_norm": 0.2542372881355932, + "acc_norm_stderr": 0.011121129007840673 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139405, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139405 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.014896277441041866, + "mc2": 0.3991183406834575, + "mc2_stderr": 0.01489709315395438 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3305785123966942, + "acc_stderr": 0.0161734232988457, + "acc_norm": 0.3837072018890201, + "acc_norm_stderr": 0.016718924637231826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DILAB-HYU/koquality-polyglot-12.8b", + "model_sha": "7cca798e18e44cebbde1c6d1f59162882c2bf254", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DILAB-HYU/koquality-polyglot-3.8b/result_2023-10-30 14:22:50.json b/DILAB-HYU/koquality-polyglot-3.8b/result_2023-10-30 14:22:50.json new file mode 100644 index 0000000000000000000000000000000000000000..4dd3e85c00130ec067da9ec58e55fcdcd9329997 --- /dev/null +++ b/DILAB-HYU/koquality-polyglot-3.8b/result_2023-10-30 14:22:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26109215017064846, + "acc_stderr": 0.012835523909473855, + "acc_norm": 0.3097269624573379, + "acc_norm_stderr": 0.013512058415238361 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36606253734315874, + "acc_stderr": 0.00480742334322458, + "acc_norm": 0.46016729735112527, + "acc_norm_stderr": 0.004973922192982238 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2573099415204678, + "acc_stderr": 0.03352799844161865, + "acc_norm": 0.2573099415204678, + "acc_norm_stderr": 0.03352799844161865 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785138, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785138 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.027321078417387533, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.027321078417387533 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.035716092300534796, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.035716092300534796 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.02608270069539966, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.02608270069539966 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.027157150479563824, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.027157150479563824 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.037276735755969174, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.037276735755969174 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727772, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727772 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31932773109243695, + "acc_stderr": 0.030283995525884403, + "acc_norm": 0.31932773109243695, + "acc_norm_stderr": 0.030283995525884403 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2923076923076923, + "acc_stderr": 0.023060438380857737, + "acc_norm": 0.2923076923076923, + "acc_norm_stderr": 0.023060438380857737 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.025284416114900156, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.025284416114900156 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.18376068376068377, + "acc_stderr": 0.02537213967172293, + "acc_norm": 0.18376068376068377, + "acc_norm_stderr": 0.02537213967172293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.025604233470899105, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.025604233470899105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724136, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724136 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.030769444967296014, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.030769444967296014 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.033917503223216586, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.033917503223216586 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.25722543352601157, + "acc_stderr": 0.023532925431044287, + "acc_norm": 0.25722543352601157, + "acc_norm_stderr": 0.023532925431044287 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.025630824975621344, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.025630824975621344 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22752293577981653, + "acc_stderr": 0.0179744635787765, + "acc_norm": 0.22752293577981653, + "acc_norm_stderr": 0.0179744635787765 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.032684540130117436, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.032684540130117436 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2231404958677686, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.2231404958677686, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998905, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998905 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.016819028375736386, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.016819028375736386 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.31020408163265306, + "acc_stderr": 0.029613459872484375, + "acc_norm": 0.31020408163265306, + "acc_norm_stderr": 0.029613459872484375 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2489451476793249, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.2489451476793249, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24315514993481094, + "acc_stderr": 0.010956556654417362, + "acc_norm": 0.24315514993481094, + "acc_norm_stderr": 0.010956556654417362 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283335, + "mc2": 0.4152993218865631, + "mc2_stderr": 0.015196497707034719 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3116883116883117, + "acc_stderr": 0.015924567607358338, + "acc_norm": 0.39433293978748524, + "acc_norm_stderr": 0.016802090674893213 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DILAB-HYU/koquality-polyglot-3.8b", + "model_sha": "c07be8b24386d148dae0b95cf1beecfd5ce1b695", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DILAB-HYU/koquality-polyglot-ko-12.8b/result_2023-11-05 05:44:07.json b/DILAB-HYU/koquality-polyglot-ko-12.8b/result_2023-11-05 05:44:07.json new file mode 100644 index 0000000000000000000000000000000000000000..611d7db705f98a9c1303a8396c7886885e3211c7 --- /dev/null +++ b/DILAB-HYU/koquality-polyglot-ko-12.8b/result_2023-11-05 05:44:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3037542662116041, + "acc_stderr": 0.01343890918477875, + "acc_norm": 0.34044368600682595, + "acc_norm_stderr": 0.013847460518892978 + }, + "harness|ko_hellaswag|10": { + "acc": 0.394443337980482, + "acc_stderr": 0.004877319683639072, + "acc_norm": 0.5136427006572396, + "acc_norm_stderr": 0.004987923636628548 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.03446296217088426, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.03446296217088426 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26947637292464877, + "acc_stderr": 0.01586624307321507, + "acc_norm": 0.26947637292464877, + "acc_norm_stderr": 0.01586624307321507 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.02767845257821238, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.02767845257821238 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.23493975903614459, + "acc_stderr": 0.03300533186128922, + "acc_norm": 0.23493975903614459, + "acc_norm_stderr": 0.03300533186128922 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.02558306248998482, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.02558306248998482 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.03154449888270287, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.03154449888270287 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003336, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993178, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993178 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.02755361446786381, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.02755361446786381 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2641025641025641, + "acc_stderr": 0.022352193737453268, + "acc_norm": 0.2641025641025641, + "acc_norm_stderr": 0.022352193737453268 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.038935425188248475, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.038935425188248475 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114475, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114475 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24838709677419354, + "acc_stderr": 0.024580028921481006, + "acc_norm": 0.24838709677419354, + "acc_norm_stderr": 0.024580028921481006 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21367521367521367, + "acc_stderr": 0.026853450377009164, + "acc_norm": 0.21367521367521367, + "acc_norm_stderr": 0.026853450377009164 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.28679245283018867, + "acc_stderr": 0.027834912527544057, + "acc_norm": 0.28679245283018867, + "acc_norm_stderr": 0.027834912527544057 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878284, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878284 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655078, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655078 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935555, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935555 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776564, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776564 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.038270523579507554, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.038270523579507554 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.02289408248992599, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.02289408248992599 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.025251173936495026, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.025251173936495026 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27522935779816515, + "acc_stderr": 0.0191490937431552, + "acc_norm": 0.27522935779816515, + "acc_norm_stderr": 0.0191490937431552 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790607, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790607 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.025058503316958157, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.025058503316958157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.30578512396694213, + "acc_stderr": 0.04205953933884124, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.04205953933884124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.037610708698674805, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.037610708698674805 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290392, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290392 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.01431099954796146, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.01431099954796146 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225417, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225417 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.30612244897959184, + "acc_stderr": 0.02950489645459596, + "acc_norm": 0.30612244897959184, + "acc_norm_stderr": 0.02950489645459596 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2489451476793249, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.2489451476793249, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083291, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083291 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03477691162163659, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03477691162163659 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.01532182168847619, + "mc2": 0.4062486938859843, + "mc2_stderr": 0.014871974864786166 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31641086186540734, + "acc_stderr": 0.015989617951065477, + "acc_norm": 0.3778040141676505, + "acc_norm_stderr": 0.016669082840694963 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DILAB-HYU/koquality-polyglot-ko-12.8b", + "model_sha": "8db9d0a47a6dc69b8fd405f4053c723a4c54696a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DKYoon/kosolar-hermes-test/result_2024-02-14 07:53:57.json b/DKYoon/kosolar-hermes-test/result_2024-02-14 07:53:57.json new file mode 100644 index 0000000000000000000000000000000000000000..8399a6a107fe48e5d1bb0ce74d5597cfbcd3dd60 --- /dev/null +++ b/DKYoon/kosolar-hermes-test/result_2024-02-14 07:53:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.45733788395904434, + "acc_stderr": 0.014558106543924063, + "acc_norm": 0.5238907849829352, + "acc_norm_stderr": 0.01459470179807165 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45508862776339376, + "acc_stderr": 0.0049696115546853945, + "acc_norm": 0.6228838876717785, + "acc_norm_stderr": 0.0048367385140513286 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280041, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280041 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6845466155810983, + "acc_stderr": 0.016617501738763408, + "acc_norm": 0.6845466155810983, + "acc_norm_stderr": 0.016617501738763408 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5106382978723404, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.5106382978723404, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.038823108508905954, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.038823108508905954 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.02736807824397163, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.02736807824397163 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5964125560538116, + "acc_stderr": 0.03292802819330314, + "acc_norm": 0.5964125560538116, + "acc_norm_stderr": 0.03292802819330314 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.04243869242230523, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.04243869242230523 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7373737373737373, + "acc_stderr": 0.031353050095330855, + "acc_norm": 0.7373737373737373, + "acc_norm_stderr": 0.031353050095330855 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.03142946637883708, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.03142946637883708 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5564102564102564, + "acc_stderr": 0.02518914989476421, + "acc_norm": 0.5564102564102564, + "acc_norm_stderr": 0.02518914989476421 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6193548387096774, + "acc_stderr": 0.02762171783290703, + "acc_norm": 0.6193548387096774, + "acc_norm_stderr": 0.02762171783290703 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922765, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922765 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5509433962264151, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.5509433962264151, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465066, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465066 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851116, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851116 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.04166666666666666, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.04166666666666666 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932262, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932262 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6242774566473989, + "acc_stderr": 0.026074314851657076, + "acc_norm": 0.6242774566473989, + "acc_norm_stderr": 0.026074314851657076 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6049382716049383, + "acc_stderr": 0.02720111766692565, + "acc_norm": 0.6049382716049383, + "acc_norm_stderr": 0.02720111766692565 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.691743119266055, + "acc_stderr": 0.01979836669836726, + "acc_norm": 0.691743119266055, + "acc_norm_stderr": 0.01979836669836726 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.5079365079365079, + "acc_stderr": 0.044715725362943486, + "acc_norm": 0.5079365079365079, + "acc_norm_stderr": 0.044715725362943486 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.028180596328259287, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.028180596328259287 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6118421052631579, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.6118421052631579, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5473856209150327, + "acc_stderr": 0.020136790918492534, + "acc_norm": 0.5473856209150327, + "acc_norm_stderr": 0.020136790918492534 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4148936170212766, + "acc_stderr": 0.029392236584612496, + "acc_norm": 0.4148936170212766, + "acc_norm_stderr": 0.029392236584612496 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.03409386946992699, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.03409386946992699 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331158, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331158 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816503, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816503 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5367647058823529, + "acc_stderr": 0.03029061918048569, + "acc_norm": 0.5367647058823529, + "acc_norm_stderr": 0.03029061918048569 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.7061224489795919, + "acc_stderr": 0.029162738410249765, + "acc_norm": 0.7061224489795919, + "acc_norm_stderr": 0.029162738410249765 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036413, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036413 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4165580182529335, + "acc_stderr": 0.01259115324505739, + "acc_norm": 0.4165580182529335, + "acc_norm_stderr": 0.01259115324505739 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7156862745098039, + "acc_stderr": 0.03166009679399813, + "acc_norm": 0.7156862745098039, + "acc_norm_stderr": 0.03166009679399813 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155062, + "mc2": 0.45196805400238105, + "mc2_stderr": 0.015153079564257226 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5820543093270366, + "acc_stderr": 0.016957292005279706, + "acc_norm": 0.6139315230224321, + "acc_norm_stderr": 0.016738130760321757 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DKYoon/kosolar-hermes-test", + "model_sha": "333d71c30f64c3835dbcb00922493f74b3d89864", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Danielbrdz/Barcenas-14b-Phi-3-medium-ORPO/result_2024-07-31 19:07:10.json b/Danielbrdz/Barcenas-14b-Phi-3-medium-ORPO/result_2024-07-31 19:07:10.json new file mode 100644 index 0000000000000000000000000000000000000000..edd6b9eb688f47521b922b180dc871ea6ab358df --- /dev/null +++ b/Danielbrdz/Barcenas-14b-Phi-3-medium-ORPO/result_2024-07-31 19:07:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3703071672354949, + "acc_stderr": 0.014111298751674948, + "acc_norm": 0.4061433447098976, + "acc_norm_stderr": 0.014351656690097863 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33867755427205737, + "acc_stderr": 0.004722928332834044, + "acc_norm": 0.416849233220474, + "acc_norm_stderr": 0.004920298437884906 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4674329501915709, + "acc_stderr": 0.017841995750520846, + "acc_norm": 0.4674329501915709, + "acc_norm_stderr": 0.017841995750520846 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.03257901482099835, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.03257901482099835 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685516, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685516 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.535483870967742, + "acc_stderr": 0.028372287797962945, + "acc_norm": 0.535483870967742, + "acc_norm_stderr": 0.028372287797962945 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688173, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688173 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.03058805297427066, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.03058805297427066 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.40370370370370373, + "acc_stderr": 0.029914812342227627, + "acc_norm": 0.40370370370370373, + "acc_norm_stderr": 0.029914812342227627 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719197, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719197 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5582010582010583, + "acc_stderr": 0.02557625706125384, + "acc_norm": 0.5582010582010583, + "acc_norm_stderr": 0.02557625706125384 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422708, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939392, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939392 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04444444444444449, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04444444444444449 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5424836601307189, + "acc_stderr": 0.028526383452142638, + "acc_norm": 0.5424836601307189, + "acc_norm_stderr": 0.028526383452142638 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.04065771002562603, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.04065771002562603 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.019977422600227467, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.019977422600227467 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.029462189233370607, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.029462189233370607 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.03395322726375797, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.03395322726375797 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31731843575418994, + "acc_stderr": 0.01556639263005703, + "acc_norm": 0.31731843575418994, + "acc_norm_stderr": 0.01556639263005703 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3539765319426336, + "acc_stderr": 0.012213504731731646, + "acc_norm": 0.3539765319426336, + "acc_norm_stderr": 0.012213504731731646 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32558139534883723, + "mc1_stderr": 0.016403989469907815, + "mc2": 0.47752621597215167, + "mc2_stderr": 0.015896749649219843 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4887839433293979, + "acc_stderr": 0.017186028469489283, + "acc_norm": 0.5064935064935064, + "acc_norm_stderr": 0.01718890435907731 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Danielbrdz/Barcenas-14b-Phi-3-medium-ORPO", + "model_sha": "b749dbcb19901b8fd0e9f38c923a24533569f895", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Danielbrdz/Barcenas-2x10.7b-Korean/result_2024-04-27 16:36:38.json b/Danielbrdz/Barcenas-2x10.7b-Korean/result_2024-04-27 16:36:38.json new file mode 100644 index 0000000000000000000000000000000000000000..a93339f52352a4ec64e9328a097d7c79cc6a66a7 --- /dev/null +++ b/Danielbrdz/Barcenas-2x10.7b-Korean/result_2024-04-27 16:36:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.24744027303754265, + "acc_stderr": 0.01261035266329267, + "acc_norm": 0.3873720136518771, + "acc_norm_stderr": 0.014235872487909869 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2749452300338578, + "acc_stderr": 0.004455741817861901, + "acc_norm": 0.3882692690699064, + "acc_norm_stderr": 0.00486360363836745 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824563, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824563 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3397190293742018, + "acc_stderr": 0.01693639411430165, + "acc_norm": 0.3397190293742018, + "acc_norm_stderr": 0.01693639411430165 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.251063829787234, + "acc_stderr": 0.02834696377716245, + "acc_norm": 0.251063829787234, + "acc_norm_stderr": 0.02834696377716245 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.031069390260789406, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.031069390260789406 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3440514469453376, + "acc_stderr": 0.026981478043648026, + "acc_norm": 0.3440514469453376, + "acc_norm_stderr": 0.026981478043648026 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.27802690582959644, + "acc_stderr": 0.03006958487449405, + "acc_norm": 0.27802690582959644, + "acc_norm_stderr": 0.03006958487449405 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.032742879140268674, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.032742879140268674 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.028359620870533953, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.028359620870533953 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.22564102564102564, + "acc_stderr": 0.021193632525148533, + "acc_norm": 0.22564102564102564, + "acc_norm_stderr": 0.021193632525148533 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.04453197507374984, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.04453197507374984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.02614868593067175, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.02614868593067175 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36752136752136755, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.36752136752136755, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.028049186315695245, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.028049186315695245 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505417, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505417 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276611, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276611 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213323, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213323 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2275132275132275, + "acc_stderr": 0.02159126940782378, + "acc_norm": 0.2275132275132275, + "acc_norm_stderr": 0.02159126940782378 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080342, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080342 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36127167630057805, + "acc_stderr": 0.025862201852277906, + "acc_norm": 0.36127167630057805, + "acc_norm_stderr": 0.025862201852277906 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924034, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924034 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3487654320987654, + "acc_stderr": 0.02651759772446501, + "acc_norm": 0.3487654320987654, + "acc_norm_stderr": 0.02651759772446501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.03119584087770029, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.03119584087770029 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29174311926605506, + "acc_stderr": 0.01948930096887653, + "acc_norm": 0.29174311926605506, + "acc_norm_stderr": 0.01948930096887653 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790604, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790604 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.026857294663281416, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.026857294663281416 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.03690677986137283, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.03690677986137283 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29248366013071897, + "acc_stderr": 0.01840341571010979, + "acc_norm": 0.29248366013071897, + "acc_norm_stderr": 0.01840341571010979 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503796, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503796 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993666, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993666 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.1564245810055866, + "acc_stderr": 0.012149144539664161, + "acc_norm": 0.1564245810055866, + "acc_norm_stderr": 0.012149144539664161 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21691176470588236, + "acc_stderr": 0.02503584522771126, + "acc_norm": 0.21691176470588236, + "acc_norm_stderr": 0.02503584522771126 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2571428571428571, + "acc_stderr": 0.027979823538744543, + "acc_norm": 0.2571428571428571, + "acc_norm_stderr": 0.027979823538744543 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29957805907172996, + "acc_stderr": 0.0298180247497531, + "acc_norm": 0.29957805907172996, + "acc_norm_stderr": 0.0298180247497531 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2770534550195567, + "acc_stderr": 0.011430462443719678, + "acc_norm": 0.2770534550195567, + "acc_norm_stderr": 0.011430462443719678 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.01552856663708729, + "mc2": 0.5091125601958423, + "mc2_stderr": 0.017262344185815827 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.11570247933884298, + "acc_stderr": 0.01099728182694215, + "acc_norm": 0.3730814639905549, + "acc_norm_stderr": 0.01662731827513743 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Danielbrdz/Barcenas-2x10.7b-Korean", + "model_sha": "bcc5f0a3e63caac0f7a8459da21379daf5fb1edd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Danielbrdz/Barcenas-Llama3-8b-ORPO/result_2024-05-13 16:38:37.json b/Danielbrdz/Barcenas-Llama3-8b-ORPO/result_2024-05-13 16:38:37.json new file mode 100644 index 0000000000000000000000000000000000000000..5d64ef9b40c2f8b5d541e93e3e294c37e3106e0c --- /dev/null +++ b/Danielbrdz/Barcenas-Llama3-8b-ORPO/result_2024-05-13 16:38:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4206484641638225, + "acc_stderr": 0.014426211252508403, + "acc_norm": 0.4872013651877133, + "acc_norm_stderr": 0.014606603181012541 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3784106751643099, + "acc_stderr": 0.004839995745602313, + "acc_norm": 0.5045807608046206, + "acc_norm_stderr": 0.004989572002196689 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.03786720706234214, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.03786720706234214 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45338441890166026, + "acc_stderr": 0.017802087135850297, + "acc_norm": 0.45338441890166026, + "acc_norm_stderr": 0.017802087135850297 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680814, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.572347266881029, + "acc_stderr": 0.02809924077580957, + "acc_norm": 0.572347266881029, + "acc_norm_stderr": 0.02809924077580957 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.035607165165310595, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.035607165165310595 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.593103448275862, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.593103448275862, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.04878608714466996, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.04878608714466996 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5756302521008403, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.5756302521008403, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.025317649726448666, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.025317649726448666 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.0497569851956243, + "acc_norm": 0.57, + "acc_norm_stderr": 0.0497569851956243 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5451612903225806, + "acc_stderr": 0.028327743091561074, + "acc_norm": 0.5451612903225806, + "acc_norm_stderr": 0.028327743091561074 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.029958249250082114, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.029958249250082114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.039580272311215706, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.039580272311215706 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.03220024104534205, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.03220024104534205 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273958, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273958 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137282, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137282 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.74, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5809248554913294, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.5809248554913294, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5926605504587156, + "acc_stderr": 0.021065986244412895, + "acc_norm": 0.5926605504587156, + "acc_norm_stderr": 0.021065986244412895 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.028491993586171563, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.028491993586171563 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635464, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635464 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249035, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249035 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.020154685712590884, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.020154685712590884 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759422, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759422 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053757, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053757 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.034063153607115086, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.034063153607115086 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29832402234636873, + "acc_stderr": 0.015301840045129267, + "acc_norm": 0.29832402234636873, + "acc_norm_stderr": 0.015301840045129267 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6530612244897959, + "acc_stderr": 0.030472526026726496, + "acc_norm": 0.6530612244897959, + "acc_norm_stderr": 0.030472526026726496 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6624472573839663, + "acc_stderr": 0.030781549102026205, + "acc_norm": 0.6624472573839663, + "acc_norm_stderr": 0.030781549102026205 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38396349413298564, + "acc_stderr": 0.01242158783313423, + "acc_norm": 0.38396349413298564, + "acc_norm_stderr": 0.01242158783313423 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5637254901960784, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.5637254901960784, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3635250917992656, + "mc1_stderr": 0.016838862883965813, + "mc2": 0.5455338309920051, + "mc2_stderr": 0.01599542661949036 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48760330578512395, + "acc_stderr": 0.01718506973267654, + "acc_norm": 0.538370720188902, + "acc_norm_stderr": 0.01713966022184556 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Danielbrdz/Barcenas-Llama3-8b-ORPO", + "model_sha": "66c848c4526d3db1ec41468c0f73ac4448c6abe9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DeepMount00/Llama-3-8b-Ita/result_2024-05-17 15:15:19.json b/DeepMount00/Llama-3-8b-Ita/result_2024-05-17 15:15:19.json new file mode 100644 index 0000000000000000000000000000000000000000..69cbab43458394517e93f468a37fc9b6c8a1e6a0 --- /dev/null +++ b/DeepMount00/Llama-3-8b-Ita/result_2024-05-17 15:15:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44112627986348124, + "acc_stderr": 0.014509747749064663, + "acc_norm": 0.5051194539249146, + "acc_norm_stderr": 0.014610624890309154 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38836885082652856, + "acc_stderr": 0.0048638313648480805, + "acc_norm": 0.5151364270065724, + "acc_norm_stderr": 0.004987494455523721 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45721583652618136, + "acc_stderr": 0.01781438523853443, + "acc_norm": 0.45721583652618136, + "acc_norm_stderr": 0.01781438523853443 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742399, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033582, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033582 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5787781350482315, + "acc_stderr": 0.02804339985821063, + "acc_norm": 0.5787781350482315, + "acc_norm_stderr": 0.02804339985821063 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5862068965517241, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.5862068965517241, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.048580835742663454, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.048580835742663454 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.025334667080954897, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.025334667080954897 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5580645161290323, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.5580645161290323, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3962962962962963, + "acc_stderr": 0.029822619458533997, + "acc_norm": 0.3962962962962963, + "acc_norm_stderr": 0.029822619458533997 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6965174129353234, + "acc_stderr": 0.03251006816458619, + "acc_norm": 0.6965174129353234, + "acc_norm_stderr": 0.03251006816458619 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.038073017265045105, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.038073017265045105 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.025279850397404904, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.025279850397404904 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.74, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6110091743119266, + "acc_stderr": 0.020902300887392866, + "acc_norm": 0.6110091743119266, + "acc_norm_stderr": 0.020902300887392866 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.565359477124183, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.565359477124183, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874143, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874143 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.02016552331390791, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.02016552331390791 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759422, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759422 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2860335195530726, + "acc_stderr": 0.015113972129062136, + "acc_norm": 0.2860335195530726, + "acc_norm_stderr": 0.015113972129062136 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483927, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483927 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03068582059661082, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03068582059661082 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38396349413298564, + "acc_stderr": 0.01242158783313423, + "acc_norm": 0.38396349413298564, + "acc_norm_stderr": 0.01242158783313423 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.03465868196380762, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.03465868196380762 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.38310893512851896, + "mc1_stderr": 0.017018461679389862, + "mc2": 0.5663893700124538, + "mc2_stderr": 0.016082183282294993 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5324675324675324, + "acc_stderr": 0.017154073716682865, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.01707725413155622 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DeepMount00/Llama-3-8b-Ita", + "model_sha": "c399bd706c749788d260ed5f47c3c5c3190f37d9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Deepnoid/DND-v0.0-e1/result_2024-03-26 10:16:42.json b/Deepnoid/DND-v0.0-e1/result_2024-03-26 10:16:42.json new file mode 100644 index 0000000000000000000000000000000000000000..3d31c1f295f6cb19193fe5865d601ba8a3e130f8 --- /dev/null +++ b/Deepnoid/DND-v0.0-e1/result_2024-03-26 10:16:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5819112627986348, + "acc_stderr": 0.014413988396996083, + "acc_norm": 0.6757679180887372, + "acc_norm_stderr": 0.01367881039951882 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3521210914160526, + "acc_stderr": 0.0047665533369174885, + "acc_norm": 0.49133638717386974, + "acc_norm_stderr": 0.004989032307320729 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5906432748538012, + "acc_stderr": 0.037712831076265434, + "acc_norm": 0.5906432748538012, + "acc_norm_stderr": 0.037712831076265434 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6500638569604087, + "acc_stderr": 0.01705567979715043, + "acc_norm": 0.6500638569604087, + "acc_norm_stderr": 0.01705567979715043 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5542168674698795, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.5542168674698795, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.572347266881029, + "acc_stderr": 0.028099240775809574, + "acc_norm": 0.572347266881029, + "acc_norm_stderr": 0.028099240775809574 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5964125560538116, + "acc_stderr": 0.03292802819330313, + "acc_norm": 0.5964125560538116, + "acc_norm_stderr": 0.03292802819330313 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016338, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016338 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201943, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201943 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5282051282051282, + "acc_stderr": 0.02531063925493386, + "acc_norm": 0.5282051282051282, + "acc_norm_stderr": 0.02531063925493386 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5516129032258065, + "acc_stderr": 0.02829205683011273, + "acc_norm": 0.5516129032258065, + "acc_norm_stderr": 0.02829205683011273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.03070948699255654, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.03070948699255654 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.02925290592725198, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.02925290592725198 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02510742548113728, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02510742548113728 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5346820809248555, + "acc_stderr": 0.026854257928258893, + "acc_norm": 0.5346820809248555, + "acc_norm_stderr": 0.026854257928258893 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334384, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334384 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5987654320987654, + "acc_stderr": 0.027272582849839806, + "acc_norm": 0.5987654320987654, + "acc_norm_stderr": 0.027272582849839806 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6580310880829016, + "acc_stderr": 0.03423465100104283, + "acc_norm": 0.6580310880829016, + "acc_norm_stderr": 0.03423465100104283 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958215, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958215 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6954128440366972, + "acc_stderr": 0.01973229942035404, + "acc_norm": 0.6954128440366972, + "acc_norm_stderr": 0.01973229942035404 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5522875816993464, + "acc_stderr": 0.028472938478033522, + "acc_norm": 0.5522875816993464, + "acc_norm_stderr": 0.028472938478033522 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309172, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309172 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5081699346405228, + "acc_stderr": 0.02022513434305727, + "acc_norm": 0.5081699346405228, + "acc_norm_stderr": 0.02022513434305727 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4148936170212766, + "acc_stderr": 0.0293922365846125, + "acc_norm": 0.4148936170212766, + "acc_norm_stderr": 0.0293922365846125 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28938547486033517, + "acc_stderr": 0.015166544550490303, + "acc_norm": 0.28938547486033517, + "acc_norm_stderr": 0.015166544550490303 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.030320243265004123, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.030320243265004123 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4048239895697523, + "acc_stderr": 0.012536743830953977, + "acc_norm": 0.4048239895697523, + "acc_norm_stderr": 0.012536743830953977 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.46266829865361075, + "mc1_stderr": 0.017454645150970588, + "mc2": 0.6141257090429322, + "mc2_stderr": 0.015748348539901636 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24321133412042503, + "acc_stderr": 0.014750068360453278, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.016068253615813953 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Deepnoid/DND-v0.0-e1", + "model_sha": "3d2a1d996a2c9b03847d95fa67476aaf2e11a17a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Deepnoid/OPEN-SOLAR-KO-10.7B-v13/result_2024-03-06 03:11:08.json b/Deepnoid/OPEN-SOLAR-KO-10.7B-v13/result_2024-03-06 03:11:08.json new file mode 100644 index 0000000000000000000000000000000000000000..3da8521fe481e31307e3e3c3d48272f6043c49b1 --- /dev/null +++ b/Deepnoid/OPEN-SOLAR-KO-10.7B-v13/result_2024-03-06 03:11:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4513651877133106, + "acc_stderr": 0.014542104569955262, + "acc_norm": 0.5008532423208191, + "acc_norm_stderr": 0.014611369529813265 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4470225054769966, + "acc_stderr": 0.004961693567208819, + "acc_norm": 0.6060545708026289, + "acc_norm_stderr": 0.004876243842318606 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.04689765937278134, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.04689765937278134 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6819923371647509, + "acc_stderr": 0.0166534862756154, + "acc_norm": 0.6819923371647509, + "acc_norm_stderr": 0.0166534862756154 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033583, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033583 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.028173917761762906, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.028173917761762906 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016338, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016338 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5307692307692308, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.5307692307692308, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5548387096774193, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.5548387096774193, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.0276019213814176, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.0276019213814176 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5358490566037736, + "acc_stderr": 0.030693675018458006, + "acc_norm": 0.5358490566037736, + "acc_norm_stderr": 0.030693675018458006 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.0284934650910286, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.0284934650910286 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.02422996529842507, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.02422996529842507 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.73, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.02780749004427619, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.02780749004427619 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6321243523316062, + "acc_stderr": 0.03480175668466036, + "acc_norm": 0.6321243523316062, + "acc_norm_stderr": 0.03480175668466036 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5944954128440367, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.5944954128440367, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.44281045751633985, + "acc_stderr": 0.02009508315457734, + "acc_norm": 0.44281045751633985, + "acc_norm_stderr": 0.02009508315457734 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966727, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966727 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2782122905027933, + "acc_stderr": 0.014987325439963556, + "acc_norm": 0.2782122905027933, + "acc_norm_stderr": 0.014987325439963556 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.029818024749753095, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.029818024749753095 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.012150699768228572, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.012150699768228572 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.03434131164719129, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.03434131164719129 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.03843566993588717, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.03843566993588717 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.38968264251427537, + "mc2_stderr": 0.01488976046530435 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5572609208972845, + "acc_stderr": 0.017077254131556224, + "acc_norm": 0.5808736717827627, + "acc_norm_stderr": 0.016963995010862796 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Deepnoid/OPEN-SOLAR-KO-10.7B-v13", + "model_sha": "4637eaaac985e309568132b6d961ba3f639d7640", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Deepnoid/OPEN-SOLAR-KO-10.7B-v14/result_2024-03-07 04:46:53.json b/Deepnoid/OPEN-SOLAR-KO-10.7B-v14/result_2024-03-07 04:46:53.json new file mode 100644 index 0000000000000000000000000000000000000000..0505a0715a8a93f6c8fcfa1a33f4f1a6967b835a --- /dev/null +++ b/Deepnoid/OPEN-SOLAR-KO-10.7B-v14/result_2024-03-07 04:46:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43686006825938567, + "acc_stderr": 0.014494421584256517, + "acc_norm": 0.49573378839590443, + "acc_norm_stderr": 0.014610858923956959 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4444333798048198, + "acc_stderr": 0.004958872288442145, + "acc_norm": 0.6016729735112527, + "acc_norm_stderr": 0.004885529674958332 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280041, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280041 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6743295019157088, + "acc_stderr": 0.016757989458549682, + "acc_norm": 0.6743295019157088, + "acc_norm_stderr": 0.016757989458549682 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5605381165919282, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.5605381165919282, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262973, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262973 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.041641887201693775, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.041641887201693775 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.0438986995680878, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.0438986995680878 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.025294608023986462, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.025294608023986462 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998573, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998573 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5548387096774193, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.5548387096774193, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924336, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924336 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5471698113207547, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.5471698113207547, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871916, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.02397386199899207, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.02397386199899207 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.74, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.035177397963731316, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.035177397963731316 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6146788990825688, + "acc_stderr": 0.020865850852794108, + "acc_norm": 0.6146788990825688, + "acc_norm_stderr": 0.020865850852794108 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5, + "acc_stderr": 0.028629916715693413, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028629916715693413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.020036393768352638, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.020036393768352638 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340455, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475347, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475347 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933095, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933095 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.031601069934496004, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.031601069934496004 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.679324894514768, + "acc_stderr": 0.030381931949990407, + "acc_norm": 0.679324894514768, + "acc_norm_stderr": 0.030381931949990407 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3500651890482399, + "acc_stderr": 0.01218255231321517, + "acc_norm": 0.3500651890482399, + "acc_norm_stderr": 0.01218255231321517 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380025, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380025 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2460220318237454, + "mc1_stderr": 0.015077219200662581, + "mc2": 0.39548731719111496, + "mc2_stderr": 0.015045880520942254 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5041322314049587, + "acc_stderr": 0.017189767032130817, + "acc_norm": 0.5218417945690673, + "acc_norm_stderr": 0.017173944474294385 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Deepnoid/OPEN-SOLAR-KO-10.7B-v14", + "model_sha": "0d67dec530f606541a40f8705caf78e2bcc9caa8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Deepnoid/deep-solar-Rev-v2.0.4/result_2024-03-21 01:04:06.json b/Deepnoid/deep-solar-Rev-v2.0.4/result_2024-03-21 01:04:06.json new file mode 100644 index 0000000000000000000000000000000000000000..c16993c21d930548970b228354e30681d398c35f --- /dev/null +++ b/Deepnoid/deep-solar-Rev-v2.0.4/result_2024-03-21 01:04:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6715017064846417, + "acc_stderr": 0.013724978465537304, + "acc_norm": 0.7158703071672355, + "acc_norm_stderr": 0.013179442447653887 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43288189603664606, + "acc_stderr": 0.004944620712318277, + "acc_norm": 0.5691097390957977, + "acc_norm_stderr": 0.004941887610849033 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.036871306155620606, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.036871306155620606 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.04541609446503948, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.04541609446503948 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.70242656449553, + "acc_stderr": 0.016349111912909425, + "acc_norm": 0.70242656449553, + "acc_norm_stderr": 0.016349111912909425 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5191489361702127, + "acc_stderr": 0.0326620429906468, + "acc_norm": 0.5191489361702127, + "acc_norm_stderr": 0.0326620429906468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.03892212195333045, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.03892212195333045 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6237942122186495, + "acc_stderr": 0.027513925683549427, + "acc_norm": 0.6237942122186495, + "acc_norm_stderr": 0.027513925683549427 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5964125560538116, + "acc_stderr": 0.03292802819330314, + "acc_norm": 0.5964125560538116, + "acc_norm_stderr": 0.03292802819330314 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7121212121212122, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.7121212121212122, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.03149930577784906, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.03149930577784906 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5820512820512821, + "acc_stderr": 0.025007329882461203, + "acc_norm": 0.5820512820512821, + "acc_norm_stderr": 0.025007329882461203 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6387096774193548, + "acc_stderr": 0.02732754844795755, + "acc_norm": 0.6387096774193548, + "acc_norm_stderr": 0.02732754844795755 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922754, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922754 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5962264150943396, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.5962264150943396, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547307, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547307 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3941798941798942, + "acc_stderr": 0.02516798233389414, + "acc_norm": 0.3941798941798942, + "acc_norm_stderr": 0.02516798233389414 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.041633319989322626, + "acc_norm": 0.78, + "acc_norm_stderr": 0.041633319989322626 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5751445086705202, + "acc_stderr": 0.026613350840261736, + "acc_norm": 0.5751445086705202, + "acc_norm_stderr": 0.026613350840261736 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.588957055214724, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.588957055214724, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6080246913580247, + "acc_stderr": 0.027163686038271146, + "acc_norm": 0.6080246913580247, + "acc_norm_stderr": 0.027163686038271146 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6683937823834197, + "acc_stderr": 0.03397636541089118, + "acc_norm": 0.6683937823834197, + "acc_norm_stderr": 0.03397636541089118 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.046151869625837026, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.046151869625837026 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7394495412844037, + "acc_stderr": 0.018819182034850068, + "acc_norm": 0.7394495412844037, + "acc_norm_stderr": 0.018819182034850068 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.043758884927270605, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.043758884927270605 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.02827549015679145, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.02827549015679145 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7603305785123967, + "acc_stderr": 0.03896878985070415, + "acc_norm": 0.7603305785123967, + "acc_norm_stderr": 0.03896878985070415 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5081699346405228, + "acc_stderr": 0.020225134343057272, + "acc_norm": 0.5081699346405228, + "acc_norm_stderr": 0.020225134343057272 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40070921985815605, + "acc_stderr": 0.029233465745573086, + "acc_norm": 0.40070921985815605, + "acc_norm_stderr": 0.029233465745573086 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260664, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260664 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.03016191193076711, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.03016191193076711 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6204081632653061, + "acc_stderr": 0.03106721126287245, + "acc_norm": 0.6204081632653061, + "acc_norm_stderr": 0.03106721126287245 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.03048603938910529, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.03048603938910529 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4074315514993481, + "acc_stderr": 0.01254947371421222, + "acc_norm": 0.4074315514993481, + "acc_norm_stderr": 0.01254947371421222 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5931372549019608, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.5931372549019608, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398393, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398393 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5397796817625459, + "mc1_stderr": 0.01744801722396087, + "mc2": 0.6515843486495623, + "mc2_stderr": 0.014653976350130278 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.538370720188902, + "acc_stderr": 0.017139660221845557, + "acc_norm": 0.5855962219598583, + "acc_norm_stderr": 0.01693658338394361 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Deepnoid/deep-solar-Rev-v2.0.4", + "model_sha": "39148431f98c7e3fdc598d54b72ccbd555cb1fce", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Deepnoid/deep-solar-Rev-v3.0.4/result_2024-03-21 01:43:11.json b/Deepnoid/deep-solar-Rev-v3.0.4/result_2024-03-21 01:43:11.json new file mode 100644 index 0000000000000000000000000000000000000000..4ea8cd0ebd47086d2ab40f82dea68a2b231c6ca5 --- /dev/null +++ b/Deepnoid/deep-solar-Rev-v3.0.4/result_2024-03-21 01:43:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6774744027303754, + "acc_stderr": 0.013659980894277371, + "acc_norm": 0.7226962457337884, + "acc_norm_stderr": 0.013082095839059374 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44054969129655447, + "acc_stderr": 0.004954384702021653, + "acc_norm": 0.5784704242182832, + "acc_norm_stderr": 0.004927948061486062 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.672514619883041, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.672514619883041, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7266922094508301, + "acc_stderr": 0.01593668106262856, + "acc_norm": 0.7266922094508301, + "acc_norm_stderr": 0.01593668106262856 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5404255319148936, + "acc_stderr": 0.032579014820998335, + "acc_norm": 0.5404255319148936, + "acc_norm_stderr": 0.032579014820998335 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5301204819277109, + "acc_stderr": 0.03885425420866767, + "acc_norm": 0.5301204819277109, + "acc_norm_stderr": 0.03885425420866767 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6205787781350482, + "acc_stderr": 0.02755994980234782, + "acc_norm": 0.6205787781350482, + "acc_norm_stderr": 0.02755994980234782 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.57847533632287, + "acc_stderr": 0.03314190222110658, + "acc_norm": 0.57847533632287, + "acc_norm_stderr": 0.03314190222110658 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7373737373737373, + "acc_stderr": 0.03135305009533086, + "acc_norm": 0.7373737373737373, + "acc_norm_stderr": 0.03135305009533086 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6680672268907563, + "acc_stderr": 0.03058869701378364, + "acc_norm": 0.6680672268907563, + "acc_norm_stderr": 0.03058869701378364 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6205128205128205, + "acc_stderr": 0.024603626924097417, + "acc_norm": 0.6205128205128205, + "acc_norm_stderr": 0.024603626924097417 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6161290322580645, + "acc_stderr": 0.027666182075539645, + "acc_norm": 0.6161290322580645, + "acc_norm_stderr": 0.027666182075539645 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.02624677294689047, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.02624677294689047 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.030437794342983045, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.030437794342983045 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555403, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555403 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.455026455026455, + "acc_stderr": 0.02564692836104939, + "acc_norm": 0.455026455026455, + "acc_norm_stderr": 0.02564692836104939 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5694444444444444, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.5694444444444444, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.026720034380514998, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.026720034380514998 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6134969325153374, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.6134969325153374, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6419753086419753, + "acc_stderr": 0.026675611926037096, + "acc_norm": 0.6419753086419753, + "acc_norm_stderr": 0.026675611926037096 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.694300518134715, + "acc_stderr": 0.033248379397581594, + "acc_norm": 0.694300518134715, + "acc_norm_stderr": 0.033248379397581594 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594963, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594963 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7559633027522936, + "acc_stderr": 0.01841528635141643, + "acc_norm": 0.7559633027522936, + "acc_norm_stderr": 0.01841528635141643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.02768418188330289, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.02768418188330289 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.0423696475304102, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.0423696475304102 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6052631578947368, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.6052631578947368, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.020219083895133917, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.020219083895133917 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.029494827600144376, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.029494827600144376 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010212, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010212 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5324074074074074, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.5324074074074074, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30837988826815643, + "acc_stderr": 0.01544571691099888, + "acc_norm": 0.30837988826815643, + "acc_norm_stderr": 0.01544571691099888 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.029520095697687758, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.029520095697687758 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6122448979591837, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.6122448979591837, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7426160337552743, + "acc_stderr": 0.0284588209914603, + "acc_norm": 0.7426160337552743, + "acc_norm_stderr": 0.0284588209914603 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.43415906127770537, + "acc_stderr": 0.01265903323706725, + "acc_norm": 0.43415906127770537, + "acc_norm_stderr": 0.01265903323706725 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6323529411764706, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.6323529411764706, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380027, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380027 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5483476132190942, + "mc1_stderr": 0.01742148030027764, + "mc2": 0.6615036888593401, + "mc2_stderr": 0.014573548083073195 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5348288075560803, + "acc_stderr": 0.017148598015747422, + "acc_norm": 0.5997638724911453, + "acc_norm_stderr": 0.01684469351050504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Deepnoid/deep-solar-Rev-v3.0.4", + "model_sha": "2b02b62a5decc775fa31d49bb29f93301272566c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Deepnoid/deep-solar-eeve-KorSTS/result_2024-03-12 05:11:52.json b/Deepnoid/deep-solar-eeve-KorSTS/result_2024-03-12 05:11:52.json new file mode 100644 index 0000000000000000000000000000000000000000..97153aae5a0859b75530fed8838d48812e9d9528 --- /dev/null +++ b/Deepnoid/deep-solar-eeve-KorSTS/result_2024-03-12 05:11:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46501706484641636, + "acc_stderr": 0.014575583922019667, + "acc_norm": 0.5273037542662116, + "acc_norm_stderr": 0.014589589101985994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4630551682931687, + "acc_stderr": 0.00497614145773688, + "acc_norm": 0.6409081856203943, + "acc_norm_stderr": 0.004787537385153014 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7134502923976608, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.7134502923976608, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.04582124160161552, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.04582124160161552 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7203065134099617, + "acc_stderr": 0.016050792148036563, + "acc_norm": 0.7203065134099617, + "acc_norm_stderr": 0.016050792148036563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5361702127659574, + "acc_stderr": 0.03260038511835771, + "acc_norm": 0.5361702127659574, + "acc_norm_stderr": 0.03260038511835771 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6045016077170418, + "acc_stderr": 0.027770918531427838, + "acc_norm": 0.6045016077170418, + "acc_norm_stderr": 0.027770918531427838 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5919282511210763, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.5919282511210763, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.04225875451969639, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.04225875451969639 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932046, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932046 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383887, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383887 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6554621848739496, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.6554621848739496, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5615384615384615, + "acc_stderr": 0.025158266016868613, + "acc_norm": 0.5615384615384615, + "acc_norm_stderr": 0.025158266016868613 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6290322580645161, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.6290322580645161, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392933, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5547169811320755, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.5547169811320755, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857406, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857406 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.03809342081273956, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.03809342081273956 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137285, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137285 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5625, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.5625, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.81, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.81, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6184971098265896, + "acc_stderr": 0.026152198619726803, + "acc_norm": 0.6184971098265896, + "acc_norm_stderr": 0.026152198619726803 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.02672586880910079, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.02672586880910079 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583703, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583703 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7027522935779816, + "acc_stderr": 0.019595707224643533, + "acc_norm": 0.7027522935779816, + "acc_norm_stderr": 0.019595707224643533 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6503267973856209, + "acc_stderr": 0.027305308076274695, + "acc_norm": 0.6503267973856209, + "acc_norm_stderr": 0.027305308076274695 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6118421052631579, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.6118421052631579, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5081699346405228, + "acc_stderr": 0.020225134343057272, + "acc_norm": 0.5081699346405228, + "acc_norm_stderr": 0.020225134343057272 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.029097675599463926, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.029097675599463926 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2022346368715084, + "acc_stderr": 0.013433729483320993, + "acc_norm": 0.2022346368715084, + "acc_norm_stderr": 0.013433729483320993 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5551470588235294, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.5551470588235294, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6612244897959184, + "acc_stderr": 0.030299506562154188, + "acc_norm": 0.6612244897959184, + "acc_norm_stderr": 0.030299506562154188 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7637130801687764, + "acc_stderr": 0.027652153144159263, + "acc_norm": 0.7637130801687764, + "acc_norm_stderr": 0.027652153144159263 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39504563233376794, + "acc_stderr": 0.012485727813251558, + "acc_norm": 0.39504563233376794, + "acc_norm_stderr": 0.012485727813251558 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.696078431372549, + "acc_stderr": 0.03228210387037892, + "acc_norm": 0.696078431372549, + "acc_norm_stderr": 0.03228210387037892 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.01600265148736101, + "mc2": 0.43902374904102626, + "mc2_stderr": 0.015135819154370348 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6115702479338843, + "acc_stderr": 0.01675692157106942, + "acc_norm": 0.6375442739079102, + "acc_norm_stderr": 0.0165271312404537 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Deepnoid/deep-solar-eeve-KorSTS", + "model_sha": "63024622bcb7442d0d89e73930b5e57e675b22df", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Deepnoid/deep-solar-eeve-kullm-v2/result_2024-03-12 05:12:25.json b/Deepnoid/deep-solar-eeve-kullm-v2/result_2024-03-12 05:12:25.json new file mode 100644 index 0000000000000000000000000000000000000000..d35d6c18d41c06ddbd736ae25eb634b0b9f4ac68 --- /dev/null +++ b/Deepnoid/deep-solar-eeve-kullm-v2/result_2024-03-12 05:12:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.47952218430034127, + "acc_stderr": 0.014599131353035014, + "acc_norm": 0.5281569965870307, + "acc_norm_stderr": 0.014588204105102205 + }, + "harness|ko_hellaswag|10": { + "acc": 0.465345548695479, + "acc_stderr": 0.004977782217582456, + "acc_norm": 0.6335391356303525, + "acc_norm_stderr": 0.004808526802718585 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7017543859649122, + "acc_stderr": 0.03508771929824563, + "acc_norm": 0.7017543859649122, + "acc_norm_stderr": 0.03508771929824563 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.04498676320572922, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.04498676320572922 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7292464878671775, + "acc_stderr": 0.01588988836256049, + "acc_norm": 0.7292464878671775, + "acc_norm_stderr": 0.01588988836256049 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5276595744680851, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.5276595744680851, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6077170418006431, + "acc_stderr": 0.02773125864701199, + "acc_norm": 0.6077170418006431, + "acc_norm_stderr": 0.02773125864701199 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5919282511210763, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.5919282511210763, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.04243869242230523, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.04243869242230523 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7525252525252525, + "acc_stderr": 0.03074630074212451, + "acc_norm": 0.7525252525252525, + "acc_norm_stderr": 0.03074630074212451 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6554621848739496, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.6554621848739496, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5512820512820513, + "acc_stderr": 0.02521731518484649, + "acc_norm": 0.5512820512820513, + "acc_norm_stderr": 0.02521731518484649 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301811, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301811 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6387096774193548, + "acc_stderr": 0.027327548447957546, + "acc_norm": 0.6387096774193548, + "acc_norm_stderr": 0.027327548447957546 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922765, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922765 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5547169811320755, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.5547169811320755, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.03983798306659808, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.03983798306659808 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.030965903123573037, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.030965903123573037 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851112, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851112 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5625, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.5625, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.83, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.83, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6242774566473989, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.6242774566473989, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.588957055214724, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.588957055214724, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6327160493827161, + "acc_stderr": 0.026822801759507894, + "acc_norm": 0.6327160493827161, + "acc_norm_stderr": 0.026822801759507894 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7357512953367875, + "acc_stderr": 0.03182155050916646, + "acc_norm": 0.7357512953367875, + "acc_norm_stderr": 0.03182155050916646 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7155963302752294, + "acc_stderr": 0.0193420365877026, + "acc_norm": 0.7155963302752294, + "acc_norm_stderr": 0.0193420365877026 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6601307189542484, + "acc_stderr": 0.027121956071388856, + "acc_norm": 0.6601307189542484, + "acc_norm_stderr": 0.027121956071388856 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.625, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.625, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5310457516339869, + "acc_stderr": 0.020188804456361894, + "acc_norm": 0.5310457516339869, + "acc_norm_stderr": 0.020188804456361894 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.028999080904806185, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.028999080904806185 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22569832402234638, + "acc_stderr": 0.013981395058455049, + "acc_norm": 0.22569832402234638, + "acc_norm_stderr": 0.013981395058455049 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5514705882352942, + "acc_stderr": 0.030211479609121596, + "acc_norm": 0.5514705882352942, + "acc_norm_stderr": 0.030211479609121596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6653061224489796, + "acc_stderr": 0.030209235226242304, + "acc_norm": 0.6653061224489796, + "acc_norm_stderr": 0.030209235226242304 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7763713080168776, + "acc_stderr": 0.027123298205229966, + "acc_norm": 0.7763713080168776, + "acc_norm_stderr": 0.027123298205229966 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.40808344198174706, + "acc_stderr": 0.012552598958563668, + "acc_norm": 0.40808344198174706, + "acc_norm_stderr": 0.012552598958563668 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7254901960784313, + "acc_stderr": 0.03132179803083291, + "acc_norm": 0.7254901960784313, + "acc_norm_stderr": 0.03132179803083291 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7333333333333333, + "acc_stderr": 0.03453131801885416, + "acc_norm": 0.7333333333333333, + "acc_norm_stderr": 0.03453131801885416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31701346389228885, + "mc1_stderr": 0.016289203374403403, + "mc2": 0.47068223819357563, + "mc2_stderr": 0.015463822134428559 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5702479338842975, + "acc_stderr": 0.017019847535972205, + "acc_norm": 0.6009445100354192, + "acc_norm_stderr": 0.016836377292849307 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Deepnoid/deep-solar-eeve-kullm-v2", + "model_sha": "f17ed3cb5afaac315f25e27004b9fd5a4eecc877", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Deepnoid/deep-solar-eeve-sentineg/result_2024-03-13 01:08:10.json b/Deepnoid/deep-solar-eeve-sentineg/result_2024-03-13 01:08:10.json new file mode 100644 index 0000000000000000000000000000000000000000..f8d6e34f6a0e1c8f38467d5de897e0475e8b1e86 --- /dev/null +++ b/Deepnoid/deep-solar-eeve-sentineg/result_2024-03-13 01:08:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.47440273037542663, + "acc_stderr": 0.014592230885298962, + "acc_norm": 0.5358361774744027, + "acc_norm_stderr": 0.014573813664735714 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46634136626170086, + "acc_stderr": 0.004978462690966937, + "acc_norm": 0.6487751443935471, + "acc_norm_stderr": 0.004763774981834677 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.04656147110012352, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.04656147110012352 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7215836526181354, + "acc_stderr": 0.016028295188992448, + "acc_norm": 0.7215836526181354, + "acc_norm_stderr": 0.016028295188992448 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5319148936170213, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.5319148936170213, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.0274666102131401, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.0274666102131401 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.04225875451969639, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.04225875451969639 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7525252525252525, + "acc_stderr": 0.030746300742124515, + "acc_norm": 0.7525252525252525, + "acc_norm_stderr": 0.030746300742124515 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062946, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062946 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.031041941304059288, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.031041941304059288 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5538461538461539, + "acc_stderr": 0.025203571773028326, + "acc_norm": 0.5538461538461539, + "acc_norm_stderr": 0.025203571773028326 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.632258064516129, + "acc_stderr": 0.02743086657997347, + "acc_norm": 0.632258064516129, + "acc_norm_stderr": 0.02743086657997347 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.02704685763071666, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.02704685763071666 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5660377358490566, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.5660377358490566, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555404, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555404 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.03807301726504513, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.03807301726504513 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137595, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137595 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5694444444444444, + "acc_stderr": 0.04140685639111502, + "acc_norm": 0.5694444444444444, + "acc_norm_stderr": 0.04140685639111502 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.8, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.8, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6127167630057804, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.6127167630057804, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.558282208588957, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.558282208588957, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6327160493827161, + "acc_stderr": 0.026822801759507894, + "acc_norm": 0.6327160493827161, + "acc_norm_stderr": 0.026822801759507894 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7202072538860104, + "acc_stderr": 0.03239637046735703, + "acc_norm": 0.7202072538860104, + "acc_norm_stderr": 0.03239637046735703 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583704, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583704 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7137614678899082, + "acc_stderr": 0.019379436628919975, + "acc_norm": 0.7137614678899082, + "acc_norm_stderr": 0.019379436628919975 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6535947712418301, + "acc_stderr": 0.02724561304721536, + "acc_norm": 0.6535947712418301, + "acc_norm_stderr": 0.02724561304721536 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6118421052631579, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.6118421052631579, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.0201965949335412, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.0201965949335412 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596147, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.014102223623152586, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.014102223623152586 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5551470588235294, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.5551470588235294, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6693877551020408, + "acc_stderr": 0.030116426296540624, + "acc_norm": 0.6693877551020408, + "acc_norm_stderr": 0.030116426296540624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4048239895697523, + "acc_stderr": 0.012536743830953986, + "acc_norm": 0.4048239895697523, + "acc_norm_stderr": 0.012536743830953986 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7205882352941176, + "acc_stderr": 0.03149328104507955, + "acc_norm": 0.7205882352941176, + "acc_norm_stderr": 0.03149328104507955 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7151515151515152, + "acc_stderr": 0.03524390844511782, + "acc_norm": 0.7151515151515152, + "acc_norm_stderr": 0.03524390844511782 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219376, + "mc2": 0.42918102739316594, + "mc2_stderr": 0.014928266402004902 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5820543093270366, + "acc_stderr": 0.01695729200527971, + "acc_norm": 0.6080283353010626, + "acc_norm_stderr": 0.016784332119424077 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Deepnoid/deep-solar-eeve-sentineg", + "model_sha": "9f3b8fbadad2a34548dc6f199900be9f2b1e6786", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Deepnoid/deep-solar-eeve-v2.0.0/result_2024-03-13 01:07:41.json b/Deepnoid/deep-solar-eeve-v2.0.0/result_2024-03-13 01:07:41.json new file mode 100644 index 0000000000000000000000000000000000000000..a5b610ac1021debd781125241feae63e0a783c7e --- /dev/null +++ b/Deepnoid/deep-solar-eeve-v2.0.0/result_2024-03-13 01:07:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6416382252559727, + "acc_stderr": 0.014012883334859864, + "acc_norm": 0.6902730375426621, + "acc_norm_stderr": 0.01351205841523836 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3943437562238598, + "acc_stderr": 0.004877104939356235, + "acc_norm": 0.5182234614618602, + "acc_norm_stderr": 0.0049864661516987735 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6104725415070242, + "acc_stderr": 0.0174380825562646, + "acc_norm": 0.6104725415070242, + "acc_norm_stderr": 0.0174380825562646 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5884244372990354, + "acc_stderr": 0.027950481494401266, + "acc_norm": 0.5884244372990354, + "acc_norm_stderr": 0.027950481494401266 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5829596412556054, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.5829596412556054, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03427308652999935, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03427308652999935 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077636, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5487179487179488, + "acc_stderr": 0.02523038123893484, + "acc_norm": 0.5487179487179488, + "acc_norm_stderr": 0.02523038123893484 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5741935483870968, + "acc_stderr": 0.028129112709165904, + "acc_norm": 0.5741935483870968, + "acc_norm_stderr": 0.028129112709165904 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.02777883590493543, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.02777883590493543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547307, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547307 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273957, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273957 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155257, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.02687408588351835, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.02687408588351835 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5705521472392638, + "acc_stderr": 0.03889066619112723, + "acc_norm": 0.5705521472392638, + "acc_norm_stderr": 0.03889066619112723 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5679012345679012, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.5679012345679012, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6632124352331606, + "acc_stderr": 0.03410780251836184, + "acc_norm": 0.6632124352331606, + "acc_norm_stderr": 0.03410780251836184 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.689908256880734, + "acc_stderr": 0.019830849684439756, + "acc_norm": 0.689908256880734, + "acc_norm_stderr": 0.019830849684439756 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089775, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089775 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874143, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874143 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.020212274976302957, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.020212274976302957 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41134751773049644, + "acc_stderr": 0.029354911159940985, + "acc_norm": 0.41134751773049644, + "acc_norm_stderr": 0.029354911159940985 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833585, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833585 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608043, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.0497569851956243, + "acc_norm": 0.57, + "acc_norm_stderr": 0.0497569851956243 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.030254372573976687, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.030254372573976687 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5877551020408164, + "acc_stderr": 0.0315123604467427, + "acc_norm": 0.5877551020408164, + "acc_norm_stderr": 0.0315123604467427 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.032007041833595914, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.032007041833595914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4048239895697523, + "acc_stderr": 0.012536743830953979, + "acc_norm": 0.4048239895697523, + "acc_norm_stderr": 0.012536743830953979 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5201958384332925, + "mc1_stderr": 0.01748921684973705, + "mc2": 0.6350127563212159, + "mc2_stderr": 0.015049263970699864 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43919716646989376, + "acc_stderr": 0.0170627757447807, + "acc_norm": 0.4817001180637544, + "acc_norm_stderr": 0.017178836639177766 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Deepnoid/deep-solar-eeve-v2.0.0", + "model_sha": "fc10ad6c60e72832c4181a386acb17c898e35407", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Deepnoid/deep-solar-v2.0.1/result_2024-03-14 01:00:16.json b/Deepnoid/deep-solar-v2.0.1/result_2024-03-14 01:00:16.json new file mode 100644 index 0000000000000000000000000000000000000000..24920dd15bef7df882f7f17845210e09023ee495 --- /dev/null +++ b/Deepnoid/deep-solar-v2.0.1/result_2024-03-14 01:00:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.674061433447099, + "acc_stderr": 0.01369743246669325, + "acc_norm": 0.7218430034129693, + "acc_norm_stderr": 0.013094469919538802 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4311890061740689, + "acc_stderr": 0.0049423027680021055, + "acc_norm": 0.5707030472017527, + "acc_norm_stderr": 0.004939642460172577 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.672514619883041, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.672514619883041, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7088122605363985, + "acc_stderr": 0.0162460870697014, + "acc_norm": 0.7088122605363985, + "acc_norm_stderr": 0.0162460870697014 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5276595744680851, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.5276595744680851, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.027466610213140095, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.027466610213140095 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5919282511210763, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.5919282511210763, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.031911782267135466, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.031911782267135466 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.047840607041056527, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.047840607041056527 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6428571428571429, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.6428571428571429, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5948717948717949, + "acc_stderr": 0.02489047176993814, + "acc_norm": 0.5948717948717949, + "acc_norm_stderr": 0.02489047176993814 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6290322580645161, + "acc_stderr": 0.02748054188795359, + "acc_norm": 0.6290322580645161, + "acc_norm_stderr": 0.02748054188795359 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8076923076923077, + "acc_stderr": 0.025819233256483727, + "acc_norm": 0.8076923076923077, + "acc_norm_stderr": 0.025819233256483727 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6037735849056604, + "acc_stderr": 0.030102793781791197, + "acc_norm": 0.6037735849056604, + "acc_norm_stderr": 0.030102793781791197 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857403, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857403 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.025279850397404904, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.025279850397404904 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.02663653974111609, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.02663653974111609 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5828220858895705, + "acc_stderr": 0.03874102859818081, + "acc_norm": 0.5828220858895705, + "acc_norm_stderr": 0.03874102859818081 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5864197530864198, + "acc_stderr": 0.02740204204026997, + "acc_norm": 0.5864197530864198, + "acc_norm_stderr": 0.02740204204026997 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6735751295336787, + "acc_stderr": 0.033840286211432945, + "acc_norm": 0.6735751295336787, + "acc_norm_stderr": 0.033840286211432945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7394495412844037, + "acc_stderr": 0.018819182034850068, + "acc_norm": 0.7394495412844037, + "acc_norm_stderr": 0.018819182034850068 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5849673202614379, + "acc_stderr": 0.028213504177824096, + "acc_norm": 0.5849673202614379, + "acc_norm_stderr": 0.028213504177824096 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.041733491480835, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.041733491480835 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5921052631578947, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.5921052631578947, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.511437908496732, + "acc_stderr": 0.020222541515610863, + "acc_norm": 0.511437908496732, + "acc_norm_stderr": 0.020222541515610863 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.029049190342543454, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.029049190342543454 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.014676252009319463, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.014676252009319463 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5625, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.5625, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6122448979591837, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.6122448979591837, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030685820596610795, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030685820596610795 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4074315514993481, + "acc_stderr": 0.01254947371421222, + "acc_norm": 0.4074315514993481, + "acc_norm_stderr": 0.01254947371421222 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.034542365853806094, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.034542365853806094 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398394, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398394 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5458996328029376, + "mc1_stderr": 0.01742959309132352, + "mc2": 0.6636841578972051, + "mc2_stderr": 0.0145882675097334 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5171192443919717, + "acc_stderr": 0.01718027524608563, + "acc_norm": 0.5749704840613932, + "acc_norm_stderr": 0.016996016308362887 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Deepnoid/deep-solar-v2.0.1", + "model_sha": "b4c257a994271ba973583a90d98a20284af42cb6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Deepnoid/deep-solar-v2.0.2/result_2024-03-14 01:00:37.json b/Deepnoid/deep-solar-v2.0.2/result_2024-03-14 01:00:37.json new file mode 100644 index 0000000000000000000000000000000000000000..38da92b99c39eb6b2442010d5b3f51e5f960f938 --- /dev/null +++ b/Deepnoid/deep-solar-v2.0.2/result_2024-03-14 01:00:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6407849829351536, + "acc_stderr": 0.014020224155839154, + "acc_norm": 0.7005119453924915, + "acc_norm_stderr": 0.013385021637313565 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3986257717586138, + "acc_stderr": 0.004886147907627405, + "acc_norm": 0.5222067317267477, + "acc_norm_stderr": 0.004984857671187099 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5906432748538012, + "acc_stderr": 0.037712831076265434, + "acc_norm": 0.5906432748538012, + "acc_norm_stderr": 0.037712831076265434 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.017268607560005794, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.017268607560005794 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108102, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5755627009646302, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.5755627009646302, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.600896860986547, + "acc_stderr": 0.03286745312567961, + "acc_norm": 0.600896860986547, + "acc_norm_stderr": 0.03286745312567961 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.034273086529999344, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.034273086529999344 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5564102564102564, + "acc_stderr": 0.025189149894764215, + "acc_norm": 0.5564102564102564, + "acc_norm_stderr": 0.025189149894764215 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5806451612903226, + "acc_stderr": 0.02807158890109185, + "acc_norm": 0.5806451612903226, + "acc_norm_stderr": 0.02807158890109185 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.02665569965392273, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.02665569965392273 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028604, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028604 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6616915422885572, + "acc_stderr": 0.03345563070339191, + "acc_norm": 0.6616915422885572, + "acc_norm_stderr": 0.03345563070339191 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.041406856391115014, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.041406856391115014 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932264, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932264 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5802469135802469, + "acc_stderr": 0.027460099557005128, + "acc_norm": 0.5802469135802469, + "acc_norm_stderr": 0.027460099557005128 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6373056994818653, + "acc_stderr": 0.034697137917043715, + "acc_norm": 0.6373056994818653, + "acc_norm_stderr": 0.034697137917043715 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6990825688073394, + "acc_stderr": 0.01966475136680211, + "acc_norm": 0.6990825688073394, + "acc_norm_stderr": 0.01966475136680211 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576073, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576073 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536671, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536671 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.040335656678483205, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.040335656678483205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.020180144843307293, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.020180144843307293 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.029275532159704725, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.029275532159704725 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.0302114796091216, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.0302114796091216 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.031601069934496004, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.031601069934496004 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.03178471874564729, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.03178471874564729 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3891786179921773, + "acc_stderr": 0.012452613934287022, + "acc_norm": 0.3891786179921773, + "acc_norm_stderr": 0.012452613934287022 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.035091433756067866, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.035091433756067866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5397796817625459, + "mc1_stderr": 0.017448017223960877, + "mc2": 0.646964625973436, + "mc2_stderr": 0.014905252003582121 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45218417945690675, + "acc_stderr": 0.017111567130916785, + "acc_norm": 0.5112160566706021, + "acc_norm_stderr": 0.017186028469489287 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Deepnoid/deep-solar-v2.0.2", + "model_sha": "3a3d413e5fdbf6add6c9d79a994d0328665e46d3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Deepnoid/deep-solar-v2.0.3/result_2024-03-14 01:02:05.json b/Deepnoid/deep-solar-v2.0.3/result_2024-03-14 01:02:05.json new file mode 100644 index 0000000000000000000000000000000000000000..18d4bf56361800239a1f6b4fb023607d76952228 --- /dev/null +++ b/Deepnoid/deep-solar-v2.0.3/result_2024-03-14 01:02:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6399317406143344, + "acc_stderr": 0.014027516814585188, + "acc_norm": 0.7013651877133106, + "acc_norm_stderr": 0.013374078615068752 + }, + "harness|ko_hellaswag|10": { + "acc": 0.398725353515236, + "acc_stderr": 0.0048863535635718545, + "acc_norm": 0.5217088229436367, + "acc_norm_stderr": 0.004985076094464756 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6140350877192983, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.6140350877192983, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6283524904214559, + "acc_stderr": 0.017280802522133185, + "acc_norm": 0.6283524904214559, + "acc_norm_stderr": 0.017280802522133185 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108102, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5240963855421686, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.5240963855421686, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.572347266881029, + "acc_stderr": 0.02809924077580957, + "acc_norm": 0.572347266881029, + "acc_norm_stderr": 0.02809924077580957 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6053811659192825, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.6053811659192825, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.034273086529999344, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.034273086529999344 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201943, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201943 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5564102564102564, + "acc_stderr": 0.025189149894764215, + "acc_norm": 0.5564102564102564, + "acc_norm_stderr": 0.025189149894764215 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5806451612903226, + "acc_stderr": 0.028071588901091845, + "acc_norm": 0.5806451612903226, + "acc_norm_stderr": 0.028071588901091845 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.02645350805404032, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.02645350805404032 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066465, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066465 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6616915422885572, + "acc_stderr": 0.03345563070339191, + "acc_norm": 0.6616915422885572, + "acc_norm_stderr": 0.03345563070339191 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.038118909889404105, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.038118909889404105 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602841997, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602841997 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5864197530864198, + "acc_stderr": 0.027402042040269966, + "acc_norm": 0.5864197530864198, + "acc_norm_stderr": 0.027402042040269966 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6476683937823834, + "acc_stderr": 0.034474782864143565, + "acc_norm": 0.6476683937823834, + "acc_norm_stderr": 0.034474782864143565 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6935779816513762, + "acc_stderr": 0.019765517220458523, + "acc_norm": 0.6935779816513762, + "acc_norm_stderr": 0.019765517220458523 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.028568699752225875, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.028568699752225875 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536671, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536671 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5592105263157895, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.5592105263157895, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.02016552331390791, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.02016552331390791 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.029462189233370583, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.029462189233370583 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.044642857142857116, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.044642857142857116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261446, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261446 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121603, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121603 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.031601069934496004, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.031601069934496004 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.03165867806410668, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.03165867806410668 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3898305084745763, + "acc_stderr": 0.01245638661908259, + "acc_norm": 0.3898305084745763, + "acc_norm_stderr": 0.01245638661908259 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5410036719706243, + "mc1_stderr": 0.0174445444476612, + "mc2": 0.6463838547258014, + "mc2_stderr": 0.014895266557719184 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44510035419126326, + "acc_stderr": 0.017086417431005464, + "acc_norm": 0.4887839433293979, + "acc_norm_stderr": 0.017186028469489287 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Deepnoid/deep-solar-v2.0.3", + "model_sha": "4b6d2432b8447af0fbce21df215925a0ac985cdc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Deepnoid/deep-solar-v2.0.7/result_2024-03-21 02:28:54.json b/Deepnoid/deep-solar-v2.0.7/result_2024-03-21 02:28:54.json new file mode 100644 index 0000000000000000000000000000000000000000..203415b2010b3e7130931e557976ea887478f4a5 --- /dev/null +++ b/Deepnoid/deep-solar-v2.0.7/result_2024-03-21 02:28:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6749146757679181, + "acc_stderr": 0.013688147309729129, + "acc_norm": 0.7303754266211604, + "acc_norm_stderr": 0.012968040686869148 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4342760406293567, + "acc_stderr": 0.004946485466544623, + "acc_norm": 0.5824536944831706, + "acc_norm_stderr": 0.004921466591335048 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03615507630310935, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03615507630310935 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7281553398058253, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.7281553398058253, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7075351213282248, + "acc_stderr": 0.016267000684598645, + "acc_norm": 0.7075351213282248, + "acc_norm_stderr": 0.016267000684598645 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5191489361702127, + "acc_stderr": 0.032662042990646796, + "acc_norm": 0.5191489361702127, + "acc_norm_stderr": 0.032662042990646796 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5481927710843374, + "acc_stderr": 0.03874371556587952, + "acc_norm": 0.5481927710843374, + "acc_norm_stderr": 0.03874371556587952 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5829596412556054, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.5829596412556054, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.042258754519696386, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.042258754519696386 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.03154449888270286, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.03154449888270286 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.047840607041056527, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.047840607041056527 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.03156663099215415, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.03156663099215415 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6, + "acc_stderr": 0.02483881198803317, + "acc_norm": 0.6, + "acc_norm_stderr": 0.02483881198803317 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490385, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490385 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.632258064516129, + "acc_stderr": 0.02743086657997347, + "acc_norm": 0.632258064516129, + "acc_norm_stderr": 0.02743086657997347 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6150943396226415, + "acc_stderr": 0.02994649856769995, + "acc_norm": 0.6150943396226415, + "acc_norm_stderr": 0.02994649856769995 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608466, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608466 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.038118909889404105, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.038118909889404105 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41005291005291006, + "acc_stderr": 0.02533120243894442, + "acc_norm": 0.41005291005291006, + "acc_norm_stderr": 0.02533120243894442 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5486111111111112, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.5486111111111112, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.79, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.79, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.02677299065336182, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.02677299065336182 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.027339546640662734, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.027339546640662734 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6839378238341969, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.6839378238341969, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958215, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958215 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7651376146788991, + "acc_stderr": 0.018175110510343602, + "acc_norm": 0.7651376146788991, + "acc_norm_stderr": 0.018175110510343602 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127154, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127154 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6013071895424836, + "acc_stderr": 0.02803609227389176, + "acc_norm": 0.6013071895424836, + "acc_norm_stderr": 0.02803609227389176 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.042943408452120926, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.042943408452120926 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.48856209150326796, + "acc_stderr": 0.02022254151561086, + "acc_norm": 0.48856209150326796, + "acc_norm_stderr": 0.02022254151561086 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.029097675599463926, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.029097675599463926 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26927374301675977, + "acc_stderr": 0.014835616582882617, + "acc_norm": 0.26927374301675977, + "acc_norm_stderr": 0.014835616582882617 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5661764705882353, + "acc_stderr": 0.03010563657001663, + "acc_norm": 0.5661764705882353, + "acc_norm_stderr": 0.03010563657001663 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.031601069934496004, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.031601069934496004 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.679324894514768, + "acc_stderr": 0.03038193194999041, + "acc_norm": 0.679324894514768, + "acc_norm_stderr": 0.03038193194999041 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3970013037809648, + "acc_stderr": 0.012496346982909556, + "acc_norm": 0.3970013037809648, + "acc_norm_stderr": 0.012496346982909556 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5931372549019608, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.5931372549019608, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.593939393939394, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.593939393939394, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.49938800489596086, + "mc1_stderr": 0.01750348793889251, + "mc2": 0.6218423545848288, + "mc2_stderr": 0.014757889508056288 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4344746162927981, + "acc_stderr": 0.017042098620824935, + "acc_norm": 0.4781582054309327, + "acc_norm_stderr": 0.017173944474294375 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Deepnoid/deep-solar-v2.0.7", + "model_sha": "e3c320ee6a4e5b554d34e1e9b1c299e96a6fecf8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Deepnoid/deep-solar-v3.0/result_2024-03-19 01:35:25.json b/Deepnoid/deep-solar-v3.0/result_2024-03-19 01:35:25.json new file mode 100644 index 0000000000000000000000000000000000000000..5ed21bb59384fbf573a163e38c0e1187ff9fd980 --- /dev/null +++ b/Deepnoid/deep-solar-v3.0/result_2024-03-19 01:35:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6510238907849829, + "acc_stderr": 0.013928933461382501, + "acc_norm": 0.7005119453924915, + "acc_norm_stderr": 0.013385021637313565 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41366261700856405, + "acc_stderr": 0.0049148293849834756, + "acc_norm": 0.5400318661621191, + "acc_norm_stderr": 0.004973762948302803 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6140350877192983, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.6140350877192983, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6832694763729247, + "acc_stderr": 0.016635566427712585, + "acc_norm": 0.6832694763729247, + "acc_norm_stderr": 0.016635566427712585 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.038922121953330446, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.038922121953330446 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6045016077170418, + "acc_stderr": 0.027770918531427834, + "acc_norm": 0.6045016077170418, + "acc_norm_stderr": 0.027770918531427834 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.57847533632287, + "acc_stderr": 0.03314190222110658, + "acc_norm": 0.57847533632287, + "acc_norm_stderr": 0.03314190222110658 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6616161616161617, + "acc_stderr": 0.03371124142626303, + "acc_norm": 0.6616161616161617, + "acc_norm_stderr": 0.03371124142626303 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201943, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201943 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5615384615384615, + "acc_stderr": 0.02515826601686861, + "acc_norm": 0.5615384615384615, + "acc_norm_stderr": 0.02515826601686861 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4729064039408867, + "acc_stderr": 0.03512819077876106, + "acc_norm": 0.4729064039408867, + "acc_norm_stderr": 0.03512819077876106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5806451612903226, + "acc_stderr": 0.02807158890109183, + "acc_norm": 0.5806451612903226, + "acc_norm_stderr": 0.02807158890109183 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.026246772946890474, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.026246772946890474 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.03067609659938917, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.03067609659938917 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113114, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7014925373134329, + "acc_stderr": 0.03235743789355043, + "acc_norm": 0.7014925373134329, + "acc_norm_stderr": 0.03235743789355043 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41005291005291006, + "acc_stderr": 0.025331202438944423, + "acc_norm": 0.41005291005291006, + "acc_norm_stderr": 0.025331202438944423 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.81, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.81, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5867052023121387, + "acc_stderr": 0.026511261369409244, + "acc_norm": 0.5867052023121387, + "acc_norm_stderr": 0.026511261369409244 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.588957055214724, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.588957055214724, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.595679012345679, + "acc_stderr": 0.027306625297327677, + "acc_norm": 0.595679012345679, + "acc_norm_stderr": 0.027306625297327677 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6787564766839378, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.6787564766839378, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04644602091222317, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04644602091222317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7247706422018348, + "acc_stderr": 0.019149093743155196, + "acc_norm": 0.7247706422018348, + "acc_norm_stderr": 0.019149093743155196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.03941897526516304, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.03941897526516304 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5592105263157895, + "acc_stderr": 0.04040311062490435, + "acc_norm": 0.5592105263157895, + "acc_norm_stderr": 0.04040311062490435 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5081699346405228, + "acc_stderr": 0.02022513434305728, + "acc_norm": 0.5081699346405228, + "acc_norm_stderr": 0.02022513434305728 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.02889395541211589, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.02889395541211589 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.03407632093854051, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.03407632093854051 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.015005762446786157, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.015005762446786157 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5257352941176471, + "acc_stderr": 0.030332578094555026, + "acc_norm": 0.5257352941176471, + "acc_norm_stderr": 0.030332578094555026 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6, + "acc_stderr": 0.03136250240935894, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03136250240935894 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.030486039389105293, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.030486039389105293 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4256844850065189, + "acc_stderr": 0.01262839355181194, + "acc_norm": 0.4256844850065189, + "acc_norm_stderr": 0.01262839355181194 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.03434131164719129, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.03434131164719129 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.0381549430868893, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.0381549430868893 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5667074663402693, + "mc1_stderr": 0.017347024450107492, + "mc2": 0.6802475288433785, + "mc2_stderr": 0.014647532570120409 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4510035419126328, + "acc_stderr": 0.01710761885954934, + "acc_norm": 0.4946871310507674, + "acc_norm_stderr": 0.017189383627229684 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Deepnoid/deep-solar-v3.0", + "model_sha": "24c9e5607891194ceb7512534666d354c899152a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Deepnoid/mergekit_v2/result_2024-03-12 05:18:21.json b/Deepnoid/mergekit_v2/result_2024-03-12 05:18:21.json new file mode 100644 index 0000000000000000000000000000000000000000..220ab8f9b6ab5b92dd5809320279f10bc5fd6c7c --- /dev/null +++ b/Deepnoid/mergekit_v2/result_2024-03-12 05:18:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6399317406143344, + "acc_stderr": 0.014027516814585188, + "acc_norm": 0.7005119453924915, + "acc_norm_stderr": 0.013385021637313565 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39832702648874724, + "acc_stderr": 0.004885529674958325, + "acc_norm": 0.5230033857797252, + "acc_norm_stderr": 0.004984497871025246 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5906432748538012, + "acc_stderr": 0.037712831076265434, + "acc_norm": 0.5906432748538012, + "acc_norm_stderr": 0.037712831076265434 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.017268607560005794, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.017268607560005794 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108102, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5240963855421686, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.5240963855421686, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5787781350482315, + "acc_stderr": 0.02804339985821063, + "acc_norm": 0.5787781350482315, + "acc_norm_stderr": 0.02804339985821063 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.600896860986547, + "acc_stderr": 0.03286745312567961, + "acc_norm": 0.600896860986547, + "acc_norm_stderr": 0.03286745312567961 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.034273086529999344, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.034273086529999344 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5538461538461539, + "acc_stderr": 0.025203571773028323, + "acc_norm": 0.5538461538461539, + "acc_norm_stderr": 0.025203571773028323 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.02804098138076154, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.02804098138076154 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.02685345037700917, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.02685345037700917 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066465, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066465 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6616915422885572, + "acc_stderr": 0.03345563070339191, + "acc_norm": 0.6616915422885572, + "acc_norm_stderr": 0.03345563070339191 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.041406856391115014, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.041406856391115014 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932264, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932264 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.026918645383239015, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.026918645383239015 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5771604938271605, + "acc_stderr": 0.027487472980871595, + "acc_norm": 0.5771604938271605, + "acc_norm_stderr": 0.027487472980871595 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6373056994818653, + "acc_stderr": 0.034697137917043715, + "acc_norm": 0.6373056994818653, + "acc_norm_stderr": 0.034697137917043715 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7027522935779816, + "acc_stderr": 0.019595707224643537, + "acc_norm": 0.7027522935779816, + "acc_norm_stderr": 0.019595707224643537 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576073, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576073 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536671, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536671 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.04026097083296563, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.04026097083296563 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.020175488765484043, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.020175488765484043 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4078014184397163, + "acc_stderr": 0.029316011776343562, + "acc_norm": 0.4078014184397163, + "acc_norm_stderr": 0.029316011776343562 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.0497569851956243, + "acc_norm": 0.57, + "acc_norm_stderr": 0.0497569851956243 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.0302114796091216, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.0302114796091216 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.031601069934496004, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.031601069934496004 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.03178471874564729, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.03178471874564729 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3878748370273794, + "acc_stderr": 0.01244499830967564, + "acc_norm": 0.3878748370273794, + "acc_norm_stderr": 0.01244499830967564 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630573, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630573 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5397796817625459, + "mc1_stderr": 0.017448017223960877, + "mc2": 0.6468351446228312, + "mc2_stderr": 0.01490520219933525 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4498229043683589, + "acc_stderr": 0.017103573343825708, + "acc_norm": 0.5100354191263282, + "acc_norm_stderr": 0.01718689128689405 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Deepnoid/mergekit_v2", + "model_sha": "e417048c933b8a7df6431649e5a5236eca7f61ec", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Dongwookss/zephyr_tuning_v1/result_2024-06-18 02:38:53.json b/Dongwookss/zephyr_tuning_v1/result_2024-06-18 02:38:53.json new file mode 100644 index 0000000000000000000000000000000000000000..5cdac81ccf5ee9b61e8a35a0cb9279d7e7251de8 --- /dev/null +++ b/Dongwookss/zephyr_tuning_v1/result_2024-06-18 02:38:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3122866894197952, + "acc_stderr": 0.013542598541688065, + "acc_norm": 0.3626279863481229, + "acc_norm_stderr": 0.014049106564955017 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34644493128858794, + "acc_stderr": 0.004748645133281563, + "acc_norm": 0.4420434176458873, + "acc_norm_stderr": 0.00495614704610896 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.03771283107626544, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.03771283107626544 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.43039591315453385, + "acc_stderr": 0.017705868776292377, + "acc_norm": 0.43039591315453385, + "acc_norm_stderr": 0.017705868776292377 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.028173917761762878, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.028173917761762878 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008732, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008732 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.02524277098712617, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.02524277098712617 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.030365050829115215, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.030365050829115215 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02874204090394849, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02874204090394849 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.035344398485395806, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.035344398485395806 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699968, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699968 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554859, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554859 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.026636539741116082, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.026636539741116082 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.03889066619112722, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.03889066619112722 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400466, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400466 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47155963302752296, + "acc_stderr": 0.02140261569734804, + "acc_norm": 0.47155963302752296, + "acc_norm_stderr": 0.02140261569734804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138282, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138282 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775088, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.018771683893528183, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.018771683893528183 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503803, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503803 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915206, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915206 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.033812000056435254, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.033812000056435254 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3195530726256983, + "acc_stderr": 0.015595520294147413, + "acc_norm": 0.3195530726256983, + "acc_norm_stderr": 0.015595520294147413 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4472573839662447, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.4472573839662447, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2842242503259452, + "acc_stderr": 0.011519880596516076, + "acc_norm": 0.2842242503259452, + "acc_norm_stderr": 0.011519880596516076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.03182231867647553, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.03182231867647553 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.0356796977226805, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.0356796977226805 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394816, + "mc2": 0.4650159734520416, + "mc2_stderr": 0.015707129691814502 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3530106257378985, + "acc_stderr": 0.016430745982427136, + "acc_norm": 0.3624557260920897, + "acc_norm_stderr": 0.01652713124045371 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Dongwookss/zephyr_tuning_v1", + "model_sha": "0757d3285a25a82393dceb2cf9dc35b57e2217fe", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DooDooHyun/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.54/result_2024-01-21 09:29:31.json b/DooDooHyun/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.54/result_2024-01-21 09:29:31.json new file mode 100644 index 0000000000000000000000000000000000000000..646329da0f715a4fbdd0625058e50a9cead5d65b --- /dev/null +++ b/DooDooHyun/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.54/result_2024-01-21 09:29:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2721843003412969, + "acc_stderr": 0.013006600406423707, + "acc_norm": 0.3242320819112628, + "acc_norm_stderr": 0.01367881039951882 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3593905596494722, + "acc_stderr": 0.004788412062375701, + "acc_norm": 0.4607647878908584, + "acc_norm_stderr": 0.004974395131539591 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23499361430395913, + "acc_stderr": 0.015162024152278441, + "acc_norm": 0.23499361430395913, + "acc_norm_stderr": 0.015162024152278441 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.035914440841969694, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.035914440841969694 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.030363582197238174, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.030363582197238174 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.03610805018031023, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.03610805018031023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.20257234726688103, + "acc_stderr": 0.022827317491059682, + "acc_norm": 0.20257234726688103, + "acc_norm_stderr": 0.022827317491059682 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.03219079200419997, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.03219079200419997 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.040103589424622034, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.040103589424622034 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20202020202020202, + "acc_stderr": 0.028606204289229872, + "acc_norm": 0.20202020202020202, + "acc_norm_stderr": 0.028606204289229872 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.040233822736177455, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.040233822736177455 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380558, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380558 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.021916957709213796, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.021916957709213796 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.029678333141444434, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.029678333141444434 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.21935483870967742, + "acc_stderr": 0.023540799358723268, + "acc_norm": 0.21935483870967742, + "acc_norm_stderr": 0.023540799358723268 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004274, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004274 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22641509433962265, + "acc_stderr": 0.02575755989310672, + "acc_norm": 0.22641509433962265, + "acc_norm_stderr": 0.02575755989310672 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04265792110940589, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04265792110940589 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02606715922227578, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02606715922227578 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.021851509822031715, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.021851509822031715 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.033961162058453336, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.033961162058453336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.02269865716785571, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.02269865716785571 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.024836057868294677, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.024836057868294677 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.03027690994517826, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.03027690994517826 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.037752050135836386, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.037752050135836386 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.018125669180861507, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.018125669180861507 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.034550710191021496, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.034550710191021496 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.025160998214292456, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.025160998214292456 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3305785123966942, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.14473684210526316, + "acc_stderr": 0.0286319518459304, + "acc_norm": 0.14473684210526316, + "acc_norm_stderr": 0.0286319518459304 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.017630827375148383, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.017630827375148383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.03128039084329883, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.03128039084329883 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010083, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010083 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670736, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670736 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.02737294220178816, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.02737294220178816 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25316455696202533, + "acc_stderr": 0.028304657943035286, + "acc_norm": 0.25316455696202533, + "acc_norm_stderr": 0.028304657943035286 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26140808344198174, + "acc_stderr": 0.011222528169771314, + "acc_norm": 0.26140808344198174, + "acc_norm_stderr": 0.011222528169771314 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350195, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03588624800091709, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03588624800091709 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842885, + "mc2": 0.41628207118178134, + "mc2_stderr": 0.01511903356687514 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26092089728453366, + "acc_stderr": 0.015097836279964201, + "acc_norm": 0.3482880755608028, + "acc_norm_stderr": 0.016379926739148037 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DooDooHyun/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.54", + "model_sha": "baa9eb0e08e09ef6bb1fcaa76db69d4e64cb48c1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DooDooHyun/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.55/result_2024-01-22 07:27:06.json b/DooDooHyun/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.55/result_2024-01-22 07:27:06.json new file mode 100644 index 0000000000000000000000000000000000000000..f7244c2a5c5a3a1c2a61883ae782366ca8311d52 --- /dev/null +++ b/DooDooHyun/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.55/result_2024-01-22 07:27:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26621160409556316, + "acc_stderr": 0.012915774781523217, + "acc_norm": 0.32081911262798635, + "acc_norm_stderr": 0.013640943091946522 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35789683330013944, + "acc_stderr": 0.004784018497679818, + "acc_norm": 0.46026687910774744, + "acc_norm_stderr": 0.004974001515580969 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.04245022486384493, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.04245022486384493 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24776500638569604, + "acc_stderr": 0.015438083080568965, + "acc_norm": 0.24776500638569604, + "acc_norm_stderr": 0.015438083080568965 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.03502553170678318, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.03502553170678318 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24437299035369775, + "acc_stderr": 0.02440616209466892, + "acc_norm": 0.24437299035369775, + "acc_norm_stderr": 0.02440616209466892 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20202020202020202, + "acc_stderr": 0.02860620428922987, + "acc_norm": 0.20202020202020202, + "acc_norm_stderr": 0.02860620428922987 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.226890756302521, + "acc_stderr": 0.02720537153827948, + "acc_norm": 0.226890756302521, + "acc_norm_stderr": 0.02720537153827948 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.021020672680827912, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.021020672680827912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.03893542518824846, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.03893542518824846 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.030712730070982592, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.030712730070982592 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24193548387096775, + "acc_stderr": 0.024362599693031086, + "acc_norm": 0.24193548387096775, + "acc_norm_stderr": 0.024362599693031086 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2863247863247863, + "acc_stderr": 0.029614323690456645, + "acc_norm": 0.2863247863247863, + "acc_norm_stderr": 0.029614323690456645 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.20754716981132076, + "acc_stderr": 0.024959918028911274, + "acc_norm": 0.20754716981132076, + "acc_norm_stderr": 0.024959918028911274 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721376, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721376 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.025497532639609546, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.025497532639609546 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.029929415408348387, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.029929415408348387 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.031862098516411426, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.031862098516411426 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2328042328042328, + "acc_stderr": 0.02176596167215454, + "acc_norm": 0.2328042328042328, + "acc_norm_stderr": 0.02176596167215454 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071138, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071138 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.025089478523765134, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.025089478523765134 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.029252823291803627, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.029252823291803627 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.20733944954128442, + "acc_stderr": 0.017381415563608674, + "acc_norm": 0.20733944954128442, + "acc_norm_stderr": 0.017381415563608674 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047182, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047182 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.024170840879341005, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.024170840879341005 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3305785123966942, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312337, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312337 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.017848089574913226, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.017848089574913226 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833585, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833585 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.02876511171804693, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.02876511171804693 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095277, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095277 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254174, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254174 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.025409301953225678, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.025409301953225678 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24472573839662448, + "acc_stderr": 0.02798569938703642, + "acc_norm": 0.24472573839662448, + "acc_norm_stderr": 0.02798569938703642 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.03256685484460389, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.03256685484460389 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.19393939393939394, + "acc_stderr": 0.0308741451365621, + "acc_norm": 0.19393939393939394, + "acc_norm_stderr": 0.0308741451365621 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006518, + "mc2": 0.4163135604722655, + "mc2_stderr": 0.015044751734204925 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2597402597402597, + "acc_stderr": 0.015075666411230305, + "acc_norm": 0.3707201889020071, + "acc_norm_stderr": 0.016605801289212595 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DooDooHyun/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.55", + "model_sha": "511690a94f6192d8b56dc822c6278000d32af054", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DooDooHyun/AIFT-42dot_LLM-PLM-1.3B-v1.51/result_2024-03-06 07:37:57.json b/DooDooHyun/AIFT-42dot_LLM-PLM-1.3B-v1.51/result_2024-03-06 07:37:57.json new file mode 100644 index 0000000000000000000000000000000000000000..5a5b5edfbe7e4a23a3356afd4bb84a5ac97d6c9f --- /dev/null +++ b/DooDooHyun/AIFT-42dot_LLM-PLM-1.3B-v1.51/result_2024-03-06 07:37:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2713310580204778, + "acc_stderr": 0.012993807727545792, + "acc_norm": 0.32337883959044367, + "acc_norm_stderr": 0.013669421630012123 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35222067317267475, + "acc_stderr": 0.0047668609071715405, + "acc_norm": 0.4458275243975304, + "acc_norm_stderr": 0.004960408362133239 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2573099415204678, + "acc_stderr": 0.03352799844161865, + "acc_norm": 0.2573099415204678, + "acc_norm_stderr": 0.03352799844161865 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1553398058252427, + "acc_stderr": 0.03586594738573975, + "acc_norm": 0.1553398058252427, + "acc_norm_stderr": 0.03586594738573975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24393358876117496, + "acc_stderr": 0.015357212665829479, + "acc_norm": 0.24393358876117496, + "acc_norm_stderr": 0.015357212665829479 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.03502553170678318, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.03502553170678318 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.03610805018031023, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.03610805018031023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2540192926045016, + "acc_stderr": 0.024723861504771686, + "acc_norm": 0.2540192926045016, + "acc_norm_stderr": 0.024723861504771686 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.038808483010823944, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.038808483010823944 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.16161616161616163, + "acc_stderr": 0.026225919863629293, + "acc_norm": 0.16161616161616163, + "acc_norm_stderr": 0.026225919863629293 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03724563619774634, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03724563619774634 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179326, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179326 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.02788682807838055, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.02788682807838055 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2230769230769231, + "acc_stderr": 0.021107730127244, + "acc_norm": 0.2230769230769231, + "acc_norm_stderr": 0.021107730127244 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.1724137931034483, + "acc_stderr": 0.026577672183036576, + "acc_norm": 0.1724137931034483, + "acc_norm_stderr": 0.026577672183036576 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.22903225806451613, + "acc_stderr": 0.023904914311782655, + "acc_norm": 0.22903225806451613, + "acc_norm_stderr": 0.023904914311782655 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.26495726495726496, + "acc_stderr": 0.028911208802749475, + "acc_norm": 0.26495726495726496, + "acc_norm_stderr": 0.028911208802749475 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108614, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108614 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145665, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145665 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.18497109826589594, + "acc_stderr": 0.0296056239817712, + "acc_norm": 0.18497109826589594, + "acc_norm_stderr": 0.0296056239817712 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.022497230190967554, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.022497230190967554 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2716049382716049, + "acc_stderr": 0.02474862449053737, + "acc_norm": 0.2716049382716049, + "acc_norm_stderr": 0.02474862449053737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.17098445595854922, + "acc_stderr": 0.02717121368316455, + "acc_norm": 0.17098445595854922, + "acc_norm_stderr": 0.02717121368316455 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518754, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518754 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.20550458715596331, + "acc_stderr": 0.01732435232501601, + "acc_norm": 0.20550458715596331, + "acc_norm_stderr": 0.01732435232501601 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.038932596106046734, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.038932596106046734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.023152722439402307, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.023152722439402307 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653697, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653697 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.017401816711427657, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.017401816711427657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.0259911176728133, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.0259911176728133 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29535864978902954, + "acc_stderr": 0.029696338713422896, + "acc_norm": 0.29535864978902954, + "acc_norm_stderr": 0.029696338713422896 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.22620599739243807, + "acc_stderr": 0.010685470750077785, + "acc_norm": 0.22620599739243807, + "acc_norm_stderr": 0.010685470750077785 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.03287666758603488, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.03287666758603488 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.01598359510181139, + "mc2": 0.43784783579631964, + "mc2_stderr": 0.014867064946462295 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.25737898465171194, + "acc_stderr": 0.015030899730346759, + "acc_norm": 0.4025974025974026, + "acc_norm_stderr": 0.01686102048640778 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DooDooHyun/AIFT-42dot_LLM-PLM-1.3B-v1.51", + "model_sha": "e0f88ee83e1c09208ce5c48b5c20eb4efdd41119", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DooDooHyun/AIFT-Yi-Ko-6B-ao-instruct-all-v0.54/result_2024-01-22 14:13:57.json b/DooDooHyun/AIFT-Yi-Ko-6B-ao-instruct-all-v0.54/result_2024-01-22 14:13:57.json new file mode 100644 index 0000000000000000000000000000000000000000..9d59d65d46f170bbd1e2affaece975eddb60e2a2 --- /dev/null +++ b/DooDooHyun/AIFT-Yi-Ko-6B-ao-instruct-all-v0.54/result_2024-01-22 14:13:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.01388881628678211, + "acc_norm": 0.41552901023890787, + "acc_norm_stderr": 0.01440136664121639 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40300736904999, + "acc_stderr": 0.004894997736719054, + "acc_norm": 0.5375423222465644, + "acc_norm_stderr": 0.00497569607624085 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5542784163473818, + "acc_stderr": 0.01777429728247951, + "acc_norm": 0.5542784163473818, + "acc_norm_stderr": 0.01777429728247951 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.0283332771095628, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.0283332771095628 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.034273086529999365, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.034273086529999365 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179964, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179964 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.032363611119519416, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.032363611119519416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502734, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961816, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961816 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.02813325257881564, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.02813325257881564 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.024026846392873502, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.024026846392873502 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.026918645383239015, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.026918645383239015 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.03919415545048409, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.03919415545048409 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583302, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583302 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.035339990940656964, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.035339990940656964 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6, + "acc_stderr": 0.021004201260420078, + "acc_norm": 0.6, + "acc_norm_stderr": 0.021004201260420078 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626567, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626567 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024103, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024103 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.044642857142857116, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.044642857142857116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.029886910547626985, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.029886910547626985 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2212290502793296, + "acc_stderr": 0.013882164598887265, + "acc_norm": 0.2212290502793296, + "acc_norm_stderr": 0.013882164598887265 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776125, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776125 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.031001209039894836, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.031001209039894836 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131775, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131775 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32073011734028684, + "acc_stderr": 0.011921199991782622, + "acc_norm": 0.32073011734028684, + "acc_norm_stderr": 0.011921199991782622 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608746, + "mc2": 0.4370086270534907, + "mc2_stderr": 0.01529592374430609 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.35064935064935066, + "acc_stderr": 0.0164055569038933, + "acc_norm": 0.39315230224321135, + "acc_norm_stderr": 0.01679326280128708 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DooDooHyun/AIFT-Yi-Ko-6B-ao-instruct-all-v0.54", + "model_sha": "b5c70d1bb01aa3b9b6f756c301d908c95d80900b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKA-DPO-test-v1/result_2023-11-08 09:27:44.json b/DopeorNope/COKA-DPO-test-v1/result_2023-11-08 09:27:44.json new file mode 100644 index 0000000000000000000000000000000000000000..b869087ecd73a1b44765bd3e0095ac5cd97b8ed3 --- /dev/null +++ b/DopeorNope/COKA-DPO-test-v1/result_2023-11-08 09:27:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4121160409556314, + "acc_stderr": 0.014383915302225396, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.014575583922019662 + }, + "harness|ko_hellaswag|10": { + "acc": 0.475502887870942, + "acc_stderr": 0.0049837889926811945, + "acc_norm": 0.5488946425014938, + "acc_norm_stderr": 0.004965866098318169 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36015325670498083, + "acc_stderr": 0.017166362471369302, + "acc_norm": 0.36015325670498083, + "acc_norm_stderr": 0.017166362471369302 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.037498507091740206, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.037498507091740206 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.029241883869628837, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.029241883869628837 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511116, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511116 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3086816720257235, + "acc_stderr": 0.026236965881153262, + "acc_norm": 0.3086816720257235, + "acc_norm_stderr": 0.026236965881153262 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134986, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134986 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.0320877955878675, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.0320877955878675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2689655172413793, + "acc_stderr": 0.03695183311650232, + "acc_norm": 0.2689655172413793, + "acc_norm_stderr": 0.03695183311650232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3025210084033613, + "acc_stderr": 0.02983796238829193, + "acc_norm": 0.3025210084033613, + "acc_norm_stderr": 0.02983796238829193 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.022815813098896597, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.022815813098896597 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.04524596007030049, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.04524596007030049 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.17733990147783252, + "acc_stderr": 0.026874337276808345, + "acc_norm": 0.17733990147783252, + "acc_norm_stderr": 0.026874337276808345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24838709677419354, + "acc_stderr": 0.024580028921481003, + "acc_norm": 0.24838709677419354, + "acc_norm_stderr": 0.024580028921481003 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.03271298896811159, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.03271298896811159 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.026341480371118352, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.026341480371118352 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910507, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910507 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.025040443877000683, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.025040443877000683 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.032578473844367746, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.032578473844367746 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.38308457711442784, + "acc_stderr": 0.0343751933733825, + "acc_norm": 0.38308457711442784, + "acc_norm_stderr": 0.0343751933733825 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.0321473730202947, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.0321473730202947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.021851509822031722, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.021851509822031722 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.024257901705323374, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.024257901705323374 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.024922001168886345, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.024922001168886345 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.31088082901554404, + "acc_stderr": 0.03340361906276586, + "acc_norm": 0.31088082901554404, + "acc_norm_stderr": 0.03340361906276586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28073394495412846, + "acc_stderr": 0.019266055045871616, + "acc_norm": 0.28073394495412846, + "acc_norm_stderr": 0.019266055045871616 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.025738854797818726, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.025738854797818726 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.033550453048829226, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.033550453048829226 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.018152871051538816, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.018152871051538816 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.024987106365642966, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.024987106365642966 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285714, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.18055555555555555, + "acc_stderr": 0.02623287897149166, + "acc_norm": 0.18055555555555555, + "acc_norm_stderr": 0.02623287897149166 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19117647058823528, + "acc_stderr": 0.023886881922440355, + "acc_norm": 0.19117647058823528, + "acc_norm_stderr": 0.023886881922440355 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22857142857142856, + "acc_stderr": 0.02688214492230774, + "acc_norm": 0.22857142857142856, + "acc_norm_stderr": 0.02688214492230774 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35443037974683544, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.35443037974683544, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2653194263363755, + "acc_stderr": 0.011276198843958855, + "acc_norm": 0.2653194263363755, + "acc_norm_stderr": 0.011276198843958855 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139404, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139404 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.01494881267906214, + "mc2": 0.38215532822863674, + "mc2_stderr": 0.016573212306306796 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28689492325855964, + "acc_stderr": 0.015550809966781775, + "acc_norm": 0.3955135773317591, + "acc_norm_stderr": 0.01681081590220604 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKA-DPO-test-v1", + "model_sha": "3cb9d8b6049bcf966d2bf418661c5b7228795949", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKAL-13b-v3/result_2023-10-28 20:08:45.json b/DopeorNope/COKAL-13b-v3/result_2023-10-28 20:08:45.json new file mode 100644 index 0000000000000000000000000000000000000000..8abd948ebfe1fec79e781da4981952adc0daa4b0 --- /dev/null +++ b/DopeorNope/COKAL-13b-v3/result_2023-10-28 20:08:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3575085324232082, + "acc_stderr": 0.014005494275916576, + "acc_norm": 0.4189419795221843, + "acc_norm_stderr": 0.014418106953639011 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40300736904999, + "acc_stderr": 0.004894997736719058, + "acc_norm": 0.5230033857797252, + "acc_norm_stderr": 0.004984497871025246 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5031928480204342, + "acc_stderr": 0.017879598945933085, + "acc_norm": 0.5031928480204342, + "acc_norm_stderr": 0.017879598945933085 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197598, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197598 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.032061837832361516, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.032061837832361516 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042328, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042328 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844072, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194974, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194974 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413324, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413324 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5302752293577981, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.5302752293577981, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529675, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529675 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859923, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859923 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483924, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.03195514741370672, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.03195514741370672 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36897001303780963, + "acc_stderr": 0.012323936650174857, + "acc_norm": 0.36897001303780963, + "acc_norm_stderr": 0.012323936650174857 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766368, + "mc2": 0.4258256217247513, + "mc2_stderr": 0.01532948017384573 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3707201889020071, + "acc_stderr": 0.016605801289212605, + "acc_norm": 0.42621015348288077, + "acc_norm_stderr": 0.01700212260948926 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKAL-13b-v3", + "model_sha": "fb5391a7f4dbc9677819d9e5fd98f1685173a0e8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKAL-DPO_test-v2-13b/result_2024-01-10 23:53:09.json b/DopeorNope/COKAL-DPO_test-v2-13b/result_2024-01-10 23:53:09.json new file mode 100644 index 0000000000000000000000000000000000000000..51a5d38add077a91c95c572d9c7639f822ab0c4a --- /dev/null +++ b/DopeorNope/COKAL-DPO_test-v2-13b/result_2024-01-10 23:53:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.507679180887372, + "acc_stderr": 0.01460966744089257, + "acc_norm": 0.5563139931740614, + "acc_norm_stderr": 0.014518421825670435 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5033857797251543, + "acc_stderr": 0.004989667009372646, + "acc_norm": 0.6352320254929297, + "acc_norm_stderr": 0.004803812631994954 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.565772669220945, + "acc_stderr": 0.01772458938967779, + "acc_norm": 0.565772669220945, + "acc_norm_stderr": 0.01772458938967779 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028337, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028337 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684973, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684973 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101813, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101813 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6128440366972477, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.6128440366972477, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.028180596328259287, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.028180596328259287 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.019821843688271775, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.019821843688271775 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468633, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468633 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933102, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36114732724902215, + "acc_stderr": 0.01226793547751903, + "acc_norm": 0.36114732724902215, + "acc_norm_stderr": 0.01226793547751903 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398393, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398393 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3684210526315789, + "mc1_stderr": 0.016886551261046046, + "mc2": 0.515036547042135, + "mc2_stderr": 0.01642149706217717 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4734356552538371, + "acc_stderr": 0.017166075717577747, + "acc_norm": 0.4923258559622196, + "acc_norm_stderr": 0.017188329219654276 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKAL-DPO_test-v2-13b", + "model_sha": "f0547cda863e13012fcbcf468e8e8381ee5d2ecb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKAL-DPO_test-v2/result_2023-11-10 08:12:00.json b/DopeorNope/COKAL-DPO_test-v2/result_2023-11-10 08:12:00.json new file mode 100644 index 0000000000000000000000000000000000000000..74bfc21694534b3a81ebdd41f626d55745138651 --- /dev/null +++ b/DopeorNope/COKAL-DPO_test-v2/result_2023-11-10 08:12:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.507679180887372, + "acc_stderr": 0.01460966744089257, + "acc_norm": 0.5563139931740614, + "acc_norm_stderr": 0.014518421825670435 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5033857797251543, + "acc_stderr": 0.004989667009372646, + "acc_norm": 0.6350328619796853, + "acc_norm_stderr": 0.004804370563856224 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.565772669220945, + "acc_stderr": 0.01772458938967779, + "acc_norm": 0.565772669220945, + "acc_norm_stderr": 0.01772458938967779 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028337, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028337 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684973, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684973 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101813, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101813 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6128440366972477, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.6128440366972477, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.028180596328259287, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.028180596328259287 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.019821843688271775, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.019821843688271775 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468633, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468633 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933102, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36114732724902215, + "acc_stderr": 0.01226793547751903, + "acc_norm": 0.36114732724902215, + "acc_norm_stderr": 0.01226793547751903 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398393, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398393 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3684210526315789, + "mc1_stderr": 0.016886551261046046, + "mc2": 0.5150391905534241, + "mc2_stderr": 0.016421569953399714 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4734356552538371, + "acc_stderr": 0.017166075717577747, + "acc_norm": 0.4923258559622196, + "acc_norm_stderr": 0.017188329219654276 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKAL-DPO_test-v2", + "model_sha": "effd7432a8cca9d0bb7b30cf9d093d004ed39616", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKALD-13B-v2/result_2023-11-05 16:40:51.json b/DopeorNope/COKALD-13B-v2/result_2023-11-05 16:40:51.json new file mode 100644 index 0000000000000000000000000000000000000000..02876c89813aee65cf3c4f0438a10b080987e063 --- /dev/null +++ b/DopeorNope/COKALD-13B-v2/result_2023-11-05 16:40:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40784982935153585, + "acc_stderr": 0.014361097288449708, + "acc_norm": 0.4778156996587031, + "acc_norm_stderr": 0.014597001927076133 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4287990440151364, + "acc_stderr": 0.004938930143234453, + "acc_norm": 0.574088826926907, + "acc_norm_stderr": 0.004934698012050241 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5632183908045977, + "acc_stderr": 0.017736470837800694, + "acc_norm": 0.5632183908045977, + "acc_norm_stderr": 0.017736470837800694 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.034812853382329645, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.034812853382329645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.618348623853211, + "acc_stderr": 0.02082814851702261, + "acc_norm": 0.618348623853211, + "acc_norm_stderr": 0.02082814851702261 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.02830457667314111, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.02830457667314111 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024106, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024106 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590954, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590954 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714854, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714854 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301854, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301854 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35853976531942633, + "acc_stderr": 0.012248487319682751, + "acc_norm": 0.35853976531942633, + "acc_norm_stderr": 0.012248487319682751 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559696, + "mc2": 0.4712352722064192, + "mc2_stderr": 0.015376328355595536 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.017168187201429253, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.01711541822522687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKALD-13B-v2", + "model_sha": "3e6e8e2882890e69078d236891f9212a5b9d7a50", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKALL-13B-v1/result_2023-11-01 07:29:19.json b/DopeorNope/COKALL-13B-v1/result_2023-11-01 07:29:19.json new file mode 100644 index 0000000000000000000000000000000000000000..2f74e7d84287d4cced3ad197e10240eaa22e9a05 --- /dev/null +++ b/DopeorNope/COKALL-13B-v1/result_2023-11-01 07:29:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4052901023890785, + "acc_stderr": 0.014346869060229327, + "acc_norm": 0.47013651877133106, + "acc_norm_stderr": 0.014585305840007105 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4251145190201155, + "acc_stderr": 0.004933500261683595, + "acc_norm": 0.5697072296355308, + "acc_norm_stderr": 0.004941051795214796 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.565772669220945, + "acc_stderr": 0.017724589389677785, + "acc_norm": 0.565772669220945, + "acc_norm_stderr": 0.017724589389677785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.547085201793722, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.547085201793722, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126167, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126167 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.031660988918880785, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.031660988918880785 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184407, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184407 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.0344578996436275, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.0344578996436275 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5889908256880734, + "acc_stderr": 0.021095050687277656, + "acc_norm": 0.5889908256880734, + "acc_norm_stderr": 0.021095050687277656 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.02811092849280908, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.02811092849280908 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094607, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094607 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293647, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293647 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.03018753206032938, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.03018753206032938 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744858, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741518, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741518 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.01539211880501501, + "mc2": 0.42109130378367604, + "mc2_stderr": 0.014773304652161631 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.5667060212514758, + "acc_norm_stderr": 0.017036683641893098 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKALL-13B-v1", + "model_sha": "a18441a1da10e5c23877d12fb89bde30d4c108c9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKALL-13B-v2/result_2023-11-01 15:07:31.json b/DopeorNope/COKALL-13B-v2/result_2023-11-01 15:07:31.json new file mode 100644 index 0000000000000000000000000000000000000000..06547c3701c4621eec2fa43100c186f32a2bb16d --- /dev/null +++ b/DopeorNope/COKALL-13B-v2/result_2023-11-01 15:07:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4069965870307167, + "acc_stderr": 0.01435639941800912, + "acc_norm": 0.4684300341296928, + "acc_norm_stderr": 0.014582236460866977 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42471619199362676, + "acc_stderr": 0.004932896472460567, + "acc_norm": 0.569308902609042, + "acc_norm_stderr": 0.004941609820763586 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5632183908045977, + "acc_stderr": 0.017736470837800698, + "acc_norm": 0.5632183908045977, + "acc_norm_stderr": 0.017736470837800698 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649038, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649038 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458006, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458006 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261117, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261117 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6036697247706422, + "acc_stderr": 0.020971469947900525, + "acc_norm": 0.6036697247706422, + "acc_norm_stderr": 0.020971469947900525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.019610851474880286, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.019610851474880286 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190714, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596445, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596445 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301857, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301857 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35528031290743156, + "acc_stderr": 0.012223623364044046, + "acc_norm": 0.35528031290743156, + "acc_norm_stderr": 0.012223623364044046 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834557, + "mc2": 0.4229553020954532, + "mc2_stderr": 0.01482225107189349 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.01717730199234255, + "acc_norm": 0.5608028335301063, + "acc_norm_stderr": 0.017062775744780705 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKALL-13B-v2", + "model_sha": "5aa12e623e32ecb5d455cc2b6ce9c1f2b597c19f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKALL-13B-v3/result_2023-11-02 03:38:01.json b/DopeorNope/COKALL-13B-v3/result_2023-11-02 03:38:01.json new file mode 100644 index 0000000000000000000000000000000000000000..6d2d2d209480ec1d8acf68bc8cda287e1fe8dea7 --- /dev/null +++ b/DopeorNope/COKALL-13B-v3/result_2023-11-02 03:38:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4061433447098976, + "acc_stderr": 0.014351656690097858, + "acc_norm": 0.47013651877133106, + "acc_norm_stderr": 0.014585305840007105 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4255128460466043, + "acc_stderr": 0.004934100774481221, + "acc_norm": 0.5705038836885082, + "acc_norm_stderr": 0.004939925958728879 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5644955300127714, + "acc_stderr": 0.01773058992792658, + "acc_norm": 0.5644955300127714, + "acc_norm_stderr": 0.01773058992792658 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.032321469162244695, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.032321469162244695 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962956, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650776, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650776 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959912, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959912 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5, + "acc_stderr": 0.02782074420373286, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02782074420373286 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6036697247706422, + "acc_stderr": 0.020971469947900525, + "acc_norm": 0.6036697247706422, + "acc_norm_stderr": 0.020971469947900525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.02807415894760065, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.02807415894760065 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296559, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296559 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.01973700899809461, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.01973700899809461 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042405, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190714, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744858, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34224250325945244, + "acc_stderr": 0.012117939998705878, + "acc_norm": 0.34224250325945244, + "acc_norm_stderr": 0.012117939998705878 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237265, + "mc2": 0.423972388627194, + "mc2_stderr": 0.014812434868238748 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.500590318772137, + "acc_stderr": 0.017190342123448586, + "acc_norm": 0.5749704840613932, + "acc_norm_stderr": 0.016996016308362887 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKALL-13B-v3", + "model_sha": "f894f6dfec6757a7fca3876b4a67e7112a136427", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKALL-13B-v4/result_2023-11-02 05:53:28.json b/DopeorNope/COKALL-13B-v4/result_2023-11-02 05:53:28.json new file mode 100644 index 0000000000000000000000000000000000000000..47166bdba4b6ae66b3a460568d738e8ab5d59b06 --- /dev/null +++ b/DopeorNope/COKALL-13B-v4/result_2023-11-02 05:53:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4061433447098976, + "acc_stderr": 0.014351656690097858, + "acc_norm": 0.47013651877133106, + "acc_norm_stderr": 0.014585305840007105 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4251145190201155, + "acc_stderr": 0.004933500261683595, + "acc_norm": 0.569806811392153, + "acc_norm_stderr": 0.0049409117792733786 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5644955300127714, + "acc_stderr": 0.017730589927926584, + "acc_norm": 0.5644955300127714, + "acc_norm_stderr": 0.017730589927926584 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126167, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126167 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.0483036602463533, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.0483036602463533 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.031660988918880785, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.031660988918880785 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184407, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184407 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.02293097307163335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.02293097307163335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5871559633027523, + "acc_stderr": 0.02110912813341391, + "acc_norm": 0.5871559633027523, + "acc_norm_stderr": 0.02110912813341391 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762626, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762626 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293647, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293647 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744858, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35267275097783574, + "acc_stderr": 0.012203286846053887, + "acc_norm": 0.35267275097783574, + "acc_norm_stderr": 0.012203286846053887 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015008, + "mc2": 0.42045074844260966, + "mc2_stderr": 0.014759134992850444 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.017182864434998564, + "acc_norm": 0.5690672963400236, + "acc_norm_stderr": 0.01702555819604314 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKALL-13B-v4", + "model_sha": "ace3123d6b76b6794d5b796d2ef84a3f1f88ff94", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKAL_merged_test-v1-13B/result_2023-12-19 08:25:29.json b/DopeorNope/COKAL_merged_test-v1-13B/result_2023-12-19 08:25:29.json new file mode 100644 index 0000000000000000000000000000000000000000..1e01e2a5ca4052611530e2c2c66a91bfba123d5f --- /dev/null +++ b/DopeorNope/COKAL_merged_test-v1-13B/result_2023-12-19 08:25:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46075085324232085, + "acc_stderr": 0.014566303676636581, + "acc_norm": 0.514505119453925, + "acc_norm_stderr": 0.014605241081370053 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4541923919537941, + "acc_stderr": 0.004968796800410414, + "acc_norm": 0.6054570802628958, + "acc_norm_stderr": 0.004877534215987093 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5670498084291188, + "acc_stderr": 0.017718469101513985, + "acc_norm": 0.5670498084291188, + "acc_norm_stderr": 0.017718469101513985 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.028333277109562804, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.028333277109562804 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5605381165919282, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.5605381165919282, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.02528558599001783, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.02528558599001783 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.028434533152681848, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.028434533152681848 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.02380952380952385, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.02380952380952385 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.026788811931562767, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.026788811931562767 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.0278074900442762, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.0278074900442762 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5834862385321101, + "acc_stderr": 0.021136376504030874, + "acc_norm": 0.5834862385321101, + "acc_norm_stderr": 0.021136376504030874 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924318, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924318 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510467998, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510467998 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024106, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024106 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714847, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714847 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35528031290743156, + "acc_stderr": 0.012223623364044043, + "acc_norm": 0.35528031290743156, + "acc_norm_stderr": 0.012223623364044043 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.034924061041636124, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.034924061041636124 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.038254602783800266, + "acc_norm": 0.6, + "acc_norm_stderr": 0.038254602783800266 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3292533659730722, + "mc1_stderr": 0.01645126444006824, + "mc2": 0.4904973367131087, + "mc2_stderr": 0.015682971125946653 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5407319952774499, + "acc_stderr": 0.01713321827653767, + "acc_norm": 0.577331759149941, + "acc_norm_stderr": 0.016983506079577604 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKAL_merged_test-v1-13B", + "model_sha": "4164e460dbf37491becf4f987dedaa0628cadbdd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKAL_pre_DPO_Test_v1-13b/result_2023-11-10 04:48:39.json b/DopeorNope/COKAL_pre_DPO_Test_v1-13b/result_2023-11-10 04:48:39.json new file mode 100644 index 0000000000000000000000000000000000000000..e27bcd456846e9f8be4496f4f048843533736ce2 --- /dev/null +++ b/DopeorNope/COKAL_pre_DPO_Test_v1-13b/result_2023-11-10 04:48:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4206484641638225, + "acc_stderr": 0.014426211252508406, + "acc_norm": 0.4786689419795222, + "acc_norm_stderr": 0.014598087973127104 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4311890061740689, + "acc_stderr": 0.004942302768002103, + "acc_norm": 0.5746863174666401, + "acc_norm_stderr": 0.004933800927560538 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.558109833971903, + "acc_stderr": 0.01775880053421441, + "acc_norm": 0.558109833971903, + "acc_norm_stderr": 0.01775880053421441 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232964, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232964 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736125, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736125 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653315, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03088273697413866, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03088273697413866 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833935, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833935 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6311926605504588, + "acc_stderr": 0.020686227560729534, + "acc_norm": 0.6311926605504588, + "acc_norm_stderr": 0.020686227560729534 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.039105257528497264, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.039105257528497264 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.019524316744866342, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.019524316744866342 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.031137304297185805, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.031137304297185805 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3533246414602347, + "acc_stderr": 0.012208408211082425, + "acc_norm": 0.3533246414602347, + "acc_norm_stderr": 0.012208408211082425 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882462, + "mc2": 0.4661645299206862, + "mc2_stderr": 0.015362128030709755 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47107438016528924, + "acc_stderr": 0.017161563949916345, + "acc_norm": 0.5230224321133412, + "acc_norm_stderr": 0.017172121546727634 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKAL_pre_DPO_Test_v1-13b", + "model_sha": "bb7af9fcf945355418b9457538f30f0fd41b5aac", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COKAL_pre_DPO_Test_v2-13b/result_2023-11-11 06:26:11.json b/DopeorNope/COKAL_pre_DPO_Test_v2-13b/result_2023-11-11 06:26:11.json new file mode 100644 index 0000000000000000000000000000000000000000..e6584845da85dd0faf2b1cb1373964286d58e03f --- /dev/null +++ b/DopeorNope/COKAL_pre_DPO_Test_v2-13b/result_2023-11-11 06:26:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41638225255972694, + "acc_stderr": 0.01440561827943618, + "acc_norm": 0.4803754266211604, + "acc_norm_stderr": 0.014600132075947096 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43158733320055764, + "acc_stderr": 0.004942853459371549, + "acc_norm": 0.5758812985461064, + "acc_norm_stderr": 0.004931984642695335 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.565772669220945, + "acc_stderr": 0.01772458938967779, + "acc_norm": 0.565772669220945, + "acc_norm_stderr": 0.01772458938967779 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542124, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542124 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.03240038086792747, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.03240038086792747 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.0348890161685273, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.0348890161685273 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986483, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883231, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883231 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844058, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844058 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.027807490044276198, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.027807490044276198 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.035260770955482405, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.035260770955482405 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6165137614678899, + "acc_stderr": 0.020847156641915984, + "acc_norm": 0.6165137614678899, + "acc_norm_stderr": 0.020847156641915984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.02838425670488304, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.02838425670488304 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.019610851474880286, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.019610851474880286 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755805, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755805 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647206, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647206 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.0303720158854282, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.0303720158854282 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35658409387222945, + "acc_stderr": 0.012233642989273888, + "acc_norm": 0.35658409387222945, + "acc_norm_stderr": 0.012233642989273888 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.01605899902610062, + "mc2": 0.46120733649464474, + "mc2_stderr": 0.015282924396450131 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4510035419126328, + "acc_stderr": 0.01710761885954935, + "acc_norm": 0.5242030696576151, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COKAL_pre_DPO_Test_v2-13b", + "model_sha": "e2fb97d3ef746540a5900ad1e19250dd74dc429f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COLA3-7B/result_2023-10-03 08:35:59.json b/DopeorNope/COLA3-7B/result_2023-10-03 08:35:59.json new file mode 100644 index 0000000000000000000000000000000000000000..50bd11017fdd1d0b7bb08b75a748f636ce3c91b8 --- /dev/null +++ b/DopeorNope/COLA3-7B/result_2023-10-03 08:35:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3310580204778157, + "acc_stderr": 0.013752062419817832, + "acc_norm": 0.3916382252559727, + "acc_norm_stderr": 0.014264122124938215 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3873730332603067, + "acc_stderr": 0.004861544478451855, + "acc_norm": 0.5097590121489743, + "acc_norm_stderr": 0.004988830884131634 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3300970873786408, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.3300970873786408, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3895274584929757, + "acc_stderr": 0.017438082556264594, + "acc_norm": 0.3895274584929757, + "acc_norm_stderr": 0.017438082556264594 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.029644006577009618, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.029644006577009618 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40192926045016075, + "acc_stderr": 0.027846476005930477, + "acc_norm": 0.40192926045016075, + "acc_norm_stderr": 0.027846476005930477 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.43434343434343436, + "acc_stderr": 0.035315058793591834, + "acc_norm": 0.43434343434343436, + "acc_norm_stderr": 0.035315058793591834 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3739495798319328, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.3739495798319328, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.02311936275823229, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.02311936275823229 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.047323326159788126, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.047323326159788126 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34516129032258064, + "acc_stderr": 0.027045746573534327, + "acc_norm": 0.34516129032258064, + "acc_norm_stderr": 0.027045746573534327 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.48717948717948717, + "acc_stderr": 0.032745319388423504, + "acc_norm": 0.48717948717948717, + "acc_norm_stderr": 0.032745319388423504 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33584905660377357, + "acc_stderr": 0.029067220146644823, + "acc_norm": 0.33584905660377357, + "acc_norm_stderr": 0.029067220146644823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.0472457740573157, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.0472457740573157 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184408, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184408 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.46766169154228854, + "acc_stderr": 0.035281314729336065, + "acc_norm": 0.46766169154228854, + "acc_norm_stderr": 0.035281314729336065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736411, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736411 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.407514450867052, + "acc_stderr": 0.026454578146931505, + "acc_norm": 0.407514450867052, + "acc_norm_stderr": 0.026454578146931505 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.32515337423312884, + "acc_stderr": 0.03680350371286461, + "acc_norm": 0.32515337423312884, + "acc_norm_stderr": 0.03680350371286461 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.02716368603827123, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.02716368603827123 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42018348623853213, + "acc_stderr": 0.021162420048273508, + "acc_norm": 0.42018348623853213, + "acc_norm_stderr": 0.021162420048273508 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.02824513402438729, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.02824513402438729 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119667, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119667 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.018824219512706214, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.018824219512706214 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553977, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553977 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3877551020408163, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.3877551020408163, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.01201414210184297, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.01201414210184297 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.033744993563193555, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.033744993563193555 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.01494881267906214, + "mc2": 0.3781293727977648, + "mc2_stderr": 0.014917319628125631 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21133412042502953, + "acc_stderr": 0.01403609034293031, + "acc_norm": 0.3022432113341204, + "acc_norm_stderr": 0.01578865486302237 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COLA3-7B", + "model_sha": "90a961edc95e63c6b777402191b76fbfa3ed3a8d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COLA3_13B/result_2023-10-05 10:17:21.json b/DopeorNope/COLA3_13B/result_2023-10-05 10:17:21.json new file mode 100644 index 0000000000000000000000000000000000000000..a9e122a6ed3818585ed3a3421f5d97640e497e90 --- /dev/null +++ b/DopeorNope/COLA3_13B/result_2023-10-05 10:17:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36860068259385664, + "acc_stderr": 0.014097810678042192, + "acc_norm": 0.42235494880546076, + "acc_norm_stderr": 0.014434138713379981 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4082852021509659, + "acc_stderr": 0.004905119039849461, + "acc_norm": 0.5435172276438957, + "acc_norm_stderr": 0.004970846697552308 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5057471264367817, + "acc_stderr": 0.017878782326129224, + "acc_norm": 0.5057471264367817, + "acc_norm_stderr": 0.017878782326129224 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.03078373675774564, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.03078373675774564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929187, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929187 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179961, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179961 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566197, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566197 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.02475600038213094, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.02475600038213094 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.04760548821460325, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.04760548821460325 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.026788811931562757, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.026788811931562757 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144807, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48256880733944957, + "acc_stderr": 0.021424291871853147, + "acc_norm": 0.48256880733944957, + "acc_norm_stderr": 0.021424291871853147 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604674, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604674 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.02830457667314112, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.02830457667314112 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32189542483660133, + "acc_stderr": 0.018901015322093085, + "acc_norm": 0.32189542483660133, + "acc_norm_stderr": 0.018901015322093085 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516994, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176851, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176851 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2426470588235294, + "acc_stderr": 0.026040662474201285, + "acc_norm": 0.2426470588235294, + "acc_norm_stderr": 0.026040662474201285 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.03254462010767859, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.03254462010767859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30247718383311606, + "acc_stderr": 0.0117315242341657, + "acc_norm": 0.30247718383311606, + "acc_norm_stderr": 0.0117315242341657 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826824, + "mc2": 0.40933802446057865, + "mc2_stderr": 0.014937193336867839 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4557260920897285, + "acc_stderr": 0.017122829143292648, + "acc_norm": 0.5147579693034239, + "acc_norm_stderr": 0.01718286443499856 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COLA3_13B", + "model_sha": "7725e7a1c6f8f022c7c4ec0286dd9f7fada126bd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/COLA_LO-7B/result_2023-10-03 17:04:14.json b/DopeorNope/COLA_LO-7B/result_2023-10-03 17:04:14.json new file mode 100644 index 0000000000000000000000000000000000000000..549d48ae36153fef33571db99b3abc62ef3f68ac --- /dev/null +++ b/DopeorNope/COLA_LO-7B/result_2023-10-03 17:04:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3319112627986348, + "acc_stderr": 0.013760988200880533, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.014252959848892884 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3857797251543517, + "acc_stderr": 0.004857840934549158, + "acc_norm": 0.5046803425612428, + "acc_norm_stderr": 0.004989562798280523 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.33980582524271846, + "acc_stderr": 0.04689765937278134, + "acc_norm": 0.33980582524271846, + "acc_norm_stderr": 0.04689765937278134 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41762452107279696, + "acc_stderr": 0.01763563732695152, + "acc_norm": 0.41762452107279696, + "acc_norm_stderr": 0.01763563732695152 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.02977164271249123, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.02977164271249123 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.03314190222110658, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.03314190222110658 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.41414141414141414, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.41414141414141414, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.022282141204204433, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.022282141204204433 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.02967833314144444, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.02967833314144444 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36774193548387096, + "acc_stderr": 0.02743086657997347, + "acc_norm": 0.36774193548387096, + "acc_norm_stderr": 0.02743086657997347 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5042735042735043, + "acc_stderr": 0.03275489264382132, + "acc_norm": 0.5042735042735043, + "acc_norm_stderr": 0.03275489264382132 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33962264150943394, + "acc_stderr": 0.029146904747798352, + "acc_norm": 0.33962264150943394, + "acc_norm_stderr": 0.029146904747798352 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.046313813194254635, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.046313813194254635 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.03445406271987054, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.03445406271987054 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43283582089552236, + "acc_stderr": 0.0350349092367328, + "acc_norm": 0.43283582089552236, + "acc_norm_stderr": 0.0350349092367328 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.03496101481191181, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.03496101481191181 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2328042328042328, + "acc_stderr": 0.021765961672154537, + "acc_norm": 0.2328042328042328, + "acc_norm_stderr": 0.021765961672154537 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.02681771813034892, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.02681771813034892 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.02716368603827123, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.02716368603827123 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3798165137614679, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.3798165137614679, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924318, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924318 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.48760330578512395, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119667, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119667 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32189542483660133, + "acc_stderr": 0.018901015322093095, + "acc_norm": 0.32189542483660133, + "acc_norm_stderr": 0.018901015322093095 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320203, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320203 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03004261583271486, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03004261583271486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.42616033755274263, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.42616033755274263, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33376792698826596, + "acc_stderr": 0.012043812655846146, + "acc_norm": 0.33376792698826596, + "acc_norm_stderr": 0.012043812655846146 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.033744993563193555, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.033744993563193555 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3575757575757576, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.3575757575757576, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520688, + "mc2": 0.3821911392219441, + "mc2_stderr": 0.014928316371274168 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21959858323494688, + "acc_stderr": 0.014232743085580275, + "acc_norm": 0.29634002361275086, + "acc_norm_stderr": 0.015699701628594232 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/COLA_LO-7B", + "model_sha": "4cccb5249ea36f58588c32fe58c6f104f89f0487", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/Dear_My_best_Friend-SFT-v2-13B/result_2023-11-25 04:59:10.json b/DopeorNope/Dear_My_best_Friend-SFT-v2-13B/result_2023-11-25 04:59:10.json new file mode 100644 index 0000000000000000000000000000000000000000..729986bb788de366337e28504f4e0e6bb63c2f1f --- /dev/null +++ b/DopeorNope/Dear_My_best_Friend-SFT-v2-13B/result_2023-11-25 04:59:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4138225255972696, + "acc_stderr": 0.014392730009221009, + "acc_norm": 0.4803754266211604, + "acc_norm_stderr": 0.014600132075947098 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4291973710416252, + "acc_stderr": 0.004939500404882179, + "acc_norm": 0.5743875721967735, + "acc_norm_stderr": 0.004934250390879774 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.017779225233394223, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.017779225233394223 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.02518914989476419, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.02518914989476419 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655816, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655816 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353928, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353928 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6256880733944954, + "acc_stderr": 0.020748959408988334, + "acc_norm": 0.6256880733944954, + "acc_norm_stderr": 0.020748959408988334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215923, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215923 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.028838921471251458, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.028838921471251458 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.030320243265004137, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.030320243265004137 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34876140808344197, + "acc_stderr": 0.012172035157127115, + "acc_norm": 0.34876140808344197, + "acc_norm_stderr": 0.012172035157127115 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394816, + "mc2": 0.46238471252084135, + "mc2_stderr": 0.015296846959143042 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46635182998819363, + "acc_stderr": 0.017151384117131865, + "acc_norm": 0.5324675324675324, + "acc_norm_stderr": 0.017154073716682868 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/Dear_My_best_Friend-SFT-v2-13B", + "model_sha": "ef7f609ba5694a3740f8a95e1c7699a1d42abb1f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/Dear_My_best_Friends-13B/result_2023-11-12 16:14:26.json b/DopeorNope/Dear_My_best_Friends-13B/result_2023-11-12 16:14:26.json new file mode 100644 index 0000000000000000000000000000000000000000..cb4e8806a2c21d90ea31d8404855b6c0ef7e553d --- /dev/null +++ b/DopeorNope/Dear_My_best_Friends-13B/result_2023-11-12 16:14:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.47440273037542663, + "acc_stderr": 0.014592230885298964, + "acc_norm": 0.5170648464163823, + "acc_norm_stderr": 0.014602878388536595 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45956980681139215, + "acc_stderr": 0.004973442060741618, + "acc_norm": 0.5844453296156145, + "acc_norm_stderr": 0.0049181021687179334 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4929757343550447, + "acc_stderr": 0.017878199003432214, + "acc_norm": 0.4929757343550447, + "acc_norm_stderr": 0.017878199003432214 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480864, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480864 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.03078373675774565, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.03078373675774565 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562807, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562807 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40336134453781514, + "acc_stderr": 0.03186608121408831, + "acc_norm": 0.40336134453781514, + "acc_norm_stderr": 0.03186608121408831 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561063, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561063 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.02813325257881565, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.02813325257881565 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5024875621890548, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.5024875621890548, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.023330654054535886, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.023330654054535886 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147124, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147124 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.028180596328259283, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.028180596328259283 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.0193733324207245, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.0193733324207245 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828979, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828979 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396587, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.031891418324213966, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.031891418324213966 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.011759939618085455, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.011759939618085455 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156474, + "mc2": 0.38803930344769905, + "mc2_stderr": 0.016051378097776924 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5088547815820543, + "acc_stderr": 0.01718765819933674, + "acc_norm": 0.5395513577331759, + "acc_norm_stderr": 0.01713648762604985 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/Dear_My_best_Friends-13B", + "model_sha": "6daa83ba6ec2d97df05703f1cb6665eb68b11e0e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/Dear_My_best_Friends-v2-13B/result_2023-11-25 04:58:26.json b/DopeorNope/Dear_My_best_Friends-v2-13B/result_2023-11-25 04:58:26.json new file mode 100644 index 0000000000000000000000000000000000000000..00e2119ea358ca7d24a9ee3d34d9508e45c5ceff --- /dev/null +++ b/DopeorNope/Dear_My_best_Friends-v2-13B/result_2023-11-25 04:58:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4761092150170648, + "acc_stderr": 0.014594701798071654, + "acc_norm": 0.5443686006825939, + "acc_norm_stderr": 0.01455374993930687 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4691296554471221, + "acc_stderr": 0.004980262025472487, + "acc_norm": 0.617307309300936, + "acc_norm_stderr": 0.004850508945116094 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.558109833971903, + "acc_stderr": 0.017758800534214407, + "acc_norm": 0.558109833971903, + "acc_norm_stderr": 0.017758800534214407 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339525, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339525 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841587, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841587 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761005, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761005 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123935, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123935 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047736, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047736 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.039158572914369714, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.039158572914369714 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6018348623853211, + "acc_stderr": 0.020987989422654254, + "acc_norm": 0.6018348623853211, + "acc_norm_stderr": 0.020987989422654254 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013317, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013317 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.019610851474880286, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.019610851474880286 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261445, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261445 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.03141470802586589, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.03141470802586589 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3604954367666232, + "acc_stderr": 0.012263110237299233, + "acc_norm": 0.3604954367666232, + "acc_norm_stderr": 0.012263110237299233 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.593939393939394, + "acc_stderr": 0.038348163554011806, + "acc_norm": 0.593939393939394, + "acc_norm_stderr": 0.038348163554011806 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3317013463892289, + "mc1_stderr": 0.016482148810241463, + "mc2": 0.47986111470028925, + "mc2_stderr": 0.01605545610064073 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.017182864434998564, + "acc_norm": 0.5171192443919717, + "acc_norm_stderr": 0.01718027524608563 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/Dear_My_best_Friends-v2-13B", + "model_sha": "89fadbe4d9c022448dd86b2405043887561cf791", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/KOAT-5.8b/result_2023-10-01 15:52:29.json b/DopeorNope/KOAT-5.8b/result_2023-10-01 15:52:29.json new file mode 100644 index 0000000000000000000000000000000000000000..8c9f1092e2ad00e70deff8ceb575581573e5a840 --- /dev/null +++ b/DopeorNope/KOAT-5.8b/result_2023-10-01 15:52:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25597269624573377, + "acc_stderr": 0.012753013241244513, + "acc_norm": 0.30716723549488056, + "acc_norm_stderr": 0.013481034054980945 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3408683529177455, + "acc_stderr": 0.00473032455662415, + "acc_norm": 0.4153555068711412, + "acc_norm_stderr": 0.004917761181740164 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.041858325989283136, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.041858325989283136 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21966794380587484, + "acc_stderr": 0.01480538447837116, + "acc_norm": 0.21966794380587484, + "acc_norm_stderr": 0.01480538447837116 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.03712537833614866, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.03712537833614866 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.22127659574468084, + "acc_stderr": 0.027136349602424063, + "acc_norm": 0.22127659574468084, + "acc_norm_stderr": 0.027136349602424063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944968, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944968 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24758842443729903, + "acc_stderr": 0.024513879973621967, + "acc_norm": 0.24758842443729903, + "acc_norm_stderr": 0.024513879973621967 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.25112107623318386, + "acc_stderr": 0.029105220833224605, + "acc_norm": 0.25112107623318386, + "acc_norm_stderr": 0.029105220833224605 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909281, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909281 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23737373737373738, + "acc_stderr": 0.03031371053819888, + "acc_norm": 0.23737373737373738, + "acc_norm_stderr": 0.03031371053819888 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.034559302019248124, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.034559302019248124 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.13725490196078433, + "acc_stderr": 0.03424084669891521, + "acc_norm": 0.13725490196078433, + "acc_norm_stderr": 0.03424084669891521 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.030176808288974337, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.030176808288974337 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2794871794871795, + "acc_stderr": 0.022752388839776826, + "acc_norm": 0.2794871794871795, + "acc_norm_stderr": 0.022752388839776826 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.13, + "acc_stderr": 0.033799766898963086, + "acc_norm": 0.13, + "acc_norm_stderr": 0.033799766898963086 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03755265865037181, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03755265865037181 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114485, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042767, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042767 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004264, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21132075471698114, + "acc_stderr": 0.02512576648482784, + "acc_norm": 0.21132075471698114, + "acc_norm_stderr": 0.02512576648482784 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.0449429086625209, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.0449429086625209 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.030769444967296014, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.030769444967296014 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.033450369167889904, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.033450369167889904 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.02210112878741543, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.02210112878741543 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.024836057868294677, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.024836057868294677 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27461139896373055, + "acc_stderr": 0.03221024508041154, + "acc_norm": 0.27461139896373055, + "acc_norm_stderr": 0.03221024508041154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23486238532110093, + "acc_stderr": 0.018175110510343602, + "acc_norm": 0.23486238532110093, + "acc_norm_stderr": 0.018175110510343602 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.025646863097137904, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.025646863097137904 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2066115702479339, + "acc_stderr": 0.03695980128098824, + "acc_norm": 0.2066115702479339, + "acc_norm_stderr": 0.03695980128098824 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.01672993756553755, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.01672993756553755 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902002, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902002 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.15178571428571427, + "acc_stderr": 0.034057028381856924, + "acc_norm": 0.15178571428571427, + "acc_norm_stderr": 0.034057028381856924 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.03038805130167812, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.03038805130167812 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.014487500852850417, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.014487500852850417 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.026799562024887685, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.026799562024887685 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22448979591836735, + "acc_stderr": 0.02671143055553839, + "acc_norm": 0.22448979591836735, + "acc_norm_stderr": 0.02671143055553839 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.23628691983122363, + "acc_stderr": 0.027652153144159267, + "acc_norm": 0.23628691983122363, + "acc_norm_stderr": 0.027652153144159267 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2653194263363755, + "acc_stderr": 0.011276198843958873, + "acc_norm": 0.2653194263363755, + "acc_norm_stderr": 0.011276198843958873 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693268, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693268 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885415, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885415 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.01505186948671501, + "mc2": 0.41023662722679205, + "mc2_stderr": 0.016160843398647234 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30460448642266824, + "acc_stderr": 0.01582336727312939, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.016366945603281273 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/KOAT-5.8b", + "model_sha": "768c40d2ffbddbc8aa15eed33234eef248eb43e7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/Ko-Mixtral-MoE-7Bx2/result_2024-01-20 07:42:16.json b/DopeorNope/Ko-Mixtral-MoE-7Bx2/result_2024-01-20 07:42:16.json new file mode 100644 index 0000000000000000000000000000000000000000..7f7cb46d551e2555bd0a2e41b9fe98e7c526b43c --- /dev/null +++ b/DopeorNope/Ko-Mixtral-MoE-7Bx2/result_2024-01-20 07:42:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32593856655290104, + "acc_stderr": 0.013697432466693239, + "acc_norm": 0.3703071672354949, + "acc_norm_stderr": 0.01411129875167495 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3575980880302729, + "acc_stderr": 0.004783133725599501, + "acc_norm": 0.45907189802828124, + "acc_norm_stderr": 0.0049730364538637176 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.017570705239256534, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.017570705239256534 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0314108219759624, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0314108219759624 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.38263665594855306, + "acc_stderr": 0.027604689028581986, + "acc_norm": 0.38263665594855306, + "acc_norm_stderr": 0.027604689028581986 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3991031390134529, + "acc_stderr": 0.03286745312567961, + "acc_norm": 0.3991031390134529, + "acc_norm_stderr": 0.03286745312567961 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201943, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201943 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.024784316942156367, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.024784316942156367 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.033554009049695646, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.033554009049695646 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.02766618207553963, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.02766618207553963 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.03035152732334494, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.03035152732334494 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.030052580579557838, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.030052580579557838 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.02479606060269994, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.02479606060269994 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43641618497109824, + "acc_stderr": 0.026700545424943677, + "acc_norm": 0.43641618497109824, + "acc_norm_stderr": 0.026700545424943677 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3765432098765432, + "acc_stderr": 0.026959344518747784, + "acc_norm": 0.3765432098765432, + "acc_norm_stderr": 0.026959344518747784 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442205, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442205 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46422018348623856, + "acc_stderr": 0.021382364775701906, + "acc_norm": 0.46422018348623856, + "acc_norm_stderr": 0.021382364775701906 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809082, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809082 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351585, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351585 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.019117213911495165, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.019117213911495165 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225601, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225601 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.02806499816704009, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.02806499816704009 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937599, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937599 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31681877444589307, + "acc_stderr": 0.011882349954723, + "acc_norm": 0.31681877444589307, + "acc_norm_stderr": 0.011882349954723 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.03434131164719129, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.03434131164719129 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.016002651487361005, + "mc2": 0.47967789733728444, + "mc2_stderr": 0.015628805381493933 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3955135773317591, + "acc_stderr": 0.016810815902206042, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.017090852631668336 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/Ko-Mixtral-MoE-7Bx2", + "model_sha": "af30206f35cca42c24b11722c944cfea46e42208", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/Ko-Mixtral-v1.3-MoE-7Bx2/result_2024-01-25 00:55:33.json b/DopeorNope/Ko-Mixtral-v1.3-MoE-7Bx2/result_2024-01-25 00:55:33.json new file mode 100644 index 0000000000000000000000000000000000000000..7481d8fbd50229a34c30649e3b144d35351b2621 --- /dev/null +++ b/DopeorNope/Ko-Mixtral-v1.3-MoE-7Bx2/result_2024-01-25 00:55:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28071672354948807, + "acc_stderr": 0.013131238126975588, + "acc_norm": 0.31399317406143346, + "acc_norm_stderr": 0.013562691224726295 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3169687313284206, + "acc_stderr": 0.004643441945489851, + "acc_norm": 0.37572196773551086, + "acc_norm_stderr": 0.004833189651626803 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.03446296217088426, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.03446296217088426 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.04931801994220414, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.04931801994220414 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3895274584929757, + "acc_stderr": 0.017438082556264594, + "acc_norm": 0.3895274584929757, + "acc_norm_stderr": 0.017438082556264594 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.0362933532994786, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.0362933532994786 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40514469453376206, + "acc_stderr": 0.02788238379132595, + "acc_norm": 0.40514469453376206, + "acc_norm_stderr": 0.02788238379132595 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34080717488789236, + "acc_stderr": 0.03181149747055359, + "acc_norm": 0.34080717488789236, + "acc_norm_stderr": 0.03181149747055359 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009225, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009225 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3838383838383838, + "acc_stderr": 0.03464881675016337, + "acc_norm": 0.3838383838383838, + "acc_norm_stderr": 0.03464881675016337 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.02770935967503249, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.02770935967503249 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.03193705726200293, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.03193705726200293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37358490566037733, + "acc_stderr": 0.029773082713319878, + "acc_norm": 0.37358490566037733, + "acc_norm_stderr": 0.029773082713319878 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228416, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228416 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696525, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983053, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983053 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.38439306358381503, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.38439306358381503, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.03814269893261837, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.03814269893261837 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35802469135802467, + "acc_stderr": 0.026675611926037093, + "acc_norm": 0.35802469135802467, + "acc_norm_stderr": 0.026675611926037093 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3160621761658031, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.3160621761658031, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3889908256880734, + "acc_stderr": 0.020902300887392866, + "acc_norm": 0.3889908256880734, + "acc_norm_stderr": 0.020902300887392866 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.028074158947600663, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.028074158947600663 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.045077322787750874, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.045077322787750874 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33169934640522875, + "acc_stderr": 0.019047485239360385, + "acc_norm": 0.33169934640522875, + "acc_norm_stderr": 0.019047485239360385 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257013, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966344, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966344 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.027257202606114948, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.027257202606114948 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3306122448979592, + "acc_stderr": 0.030116426296540603, + "acc_norm": 0.3306122448979592, + "acc_norm_stderr": 0.030116426296540603 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4092827004219409, + "acc_stderr": 0.03200704183359592, + "acc_norm": 0.4092827004219409, + "acc_norm_stderr": 0.03200704183359592 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2953063885267275, + "acc_stderr": 0.011651061936208828, + "acc_norm": 0.2953063885267275, + "acc_norm_stderr": 0.011651061936208828 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.03354092437591519, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.03354092437591519 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.47192954180885394, + "mc2_stderr": 0.015632043165156227 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3919716646989374, + "acc_stderr": 0.01678433211942408, + "acc_norm": 0.4734356552538371, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/Ko-Mixtral-v1.3-MoE-7Bx2", + "model_sha": "8915c4e51561aa3c26da0ae802319eda218341d7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/Mistralopithecus-v0.1-10.7B/result_2024-01-20 03:02:30.json b/DopeorNope/Mistralopithecus-v0.1-10.7B/result_2024-01-20 03:02:30.json new file mode 100644 index 0000000000000000000000000000000000000000..6a4388df9d0d5a731282166a5e430c455daa5ba3 --- /dev/null +++ b/DopeorNope/Mistralopithecus-v0.1-10.7B/result_2024-01-20 03:02:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1945392491467577, + "acc_stderr": 0.011567709174648727, + "acc_norm": 0.22525597269624573, + "acc_norm_stderr": 0.012207839995407309 + }, + "harness|ko_hellaswag|10": { + "acc": 0.26419040031866164, + "acc_stderr": 0.004400000822742062, + "acc_norm": 0.28201553475403307, + "acc_norm_stderr": 0.00449061224533522 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.0430125039969088, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.0430125039969088 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28735632183908044, + "acc_stderr": 0.0161824107306827, + "acc_norm": 0.28735632183908044, + "acc_norm_stderr": 0.0161824107306827 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785137, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785137 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.028504856470514196, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.028504856470514196 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944966, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944966 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818784, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818784 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.23766816143497757, + "acc_stderr": 0.028568079464714274, + "acc_norm": 0.23766816143497757, + "acc_norm_stderr": 0.028568079464714274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.031353050095330855, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.031353050095330855 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.021444547301560476, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.021444547301560476 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.030712730070982592, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.030712730070982592 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3258064516129032, + "acc_stderr": 0.026662010578567104, + "acc_norm": 0.3258064516129032, + "acc_norm_stderr": 0.026662010578567104 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3247863247863248, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.3247863247863248, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712163, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712163 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.27860696517412936, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.27860696517412936, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.034355680560478746, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.034355680560478746 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112133, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112133 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.03309615177059006, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.03309615177059006 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.02468531686725781, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.02468531686725781 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3374233128834356, + "acc_stderr": 0.03714908409935573, + "acc_norm": 0.3374233128834356, + "acc_norm_stderr": 0.03714908409935573 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.025702640260603767, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.025702640260603767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23486238532110093, + "acc_stderr": 0.01817511051034358, + "acc_norm": 0.23486238532110093, + "acc_norm_stderr": 0.01817511051034358 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242494, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242494 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.026787453111906532, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.026787453111906532 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932267, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932267 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.017952449196987866, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.017952449196987866 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.02657786094330785, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.02657786094330785 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364545, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364545 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.25, + "acc_stderr": 0.026303648393696036, + "acc_norm": 0.25, + "acc_norm_stderr": 0.026303648393696036 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27346938775510204, + "acc_stderr": 0.02853556033712844, + "acc_norm": 0.27346938775510204, + "acc_norm_stderr": 0.02853556033712844 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598035, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598035 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26727509778357234, + "acc_stderr": 0.011302607515637525, + "acc_norm": 0.26727509778357234, + "acc_norm_stderr": 0.011302607515637525 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.03182231867647553, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.03182231867647553 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.035014387062967806, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.035014387062967806 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522503, + "mc2": 0.4735407532438836, + "mc2_stderr": 0.016105064055149304 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24439197166469895, + "acc_stderr": 0.01477428824694931, + "acc_norm": 0.33293978748524206, + "acc_norm_stderr": 0.016202431208373794 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/Mistralopithecus-v0.1-10.7B", + "model_sha": "f079b2f8a58e65c68bb606977ed13648aa954cc1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/SOLAR_C-v1-10.7B/result_2023-12-28 05:34:17.json b/DopeorNope/SOLAR_C-v1-10.7B/result_2023-12-28 05:34:17.json new file mode 100644 index 0000000000000000000000000000000000000000..7cc948bf80c33a049f89376eeeeebd75c11e6e47 --- /dev/null +++ b/DopeorNope/SOLAR_C-v1-10.7B/result_2023-12-28 05:34:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4121160409556314, + "acc_stderr": 0.0143839153022254, + "acc_norm": 0.47525597269624575, + "acc_norm_stderr": 0.014593487694937736 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2984465245966939, + "acc_stderr": 0.004566412808642454, + "acc_norm": 0.34475204142601074, + "acc_norm_stderr": 0.004743160034271155 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5134099616858238, + "acc_stderr": 0.017873531736510365, + "acc_norm": 0.5134099616858238, + "acc_norm_stderr": 0.017873531736510365 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.028386198084177673, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.028386198084177673 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370332, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370332 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.532258064516129, + "acc_stderr": 0.028384747788813332, + "acc_norm": 0.532258064516129, + "acc_norm_stderr": 0.028384747788813332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131143, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131143 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067877, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067877 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723369, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723369 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756656, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756656 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.03423465100104282, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.03423465100104282 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.563302752293578, + "acc_stderr": 0.02126482015871421, + "acc_norm": 0.563302752293578, + "acc_norm_stderr": 0.02126482015871421 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.042943408452120926, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.042943408452120926 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4199346405228758, + "acc_stderr": 0.019966811178256483, + "acc_norm": 0.4199346405228758, + "acc_norm_stderr": 0.019966811178256483 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.029097675599463926, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.029097675599463926 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.293854748603352, + "acc_stderr": 0.015235075776719603, + "acc_norm": 0.293854748603352, + "acc_norm_stderr": 0.015235075776719603 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.03197694118713672, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.03197694118713672 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7130801687763713, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.7130801687763713, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.378748370273794, + "acc_stderr": 0.01238905210500373, + "acc_norm": 0.378748370273794, + "acc_norm_stderr": 0.01238905210500373 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3353733170134639, + "mc1_stderr": 0.016527534039668987, + "mc2": 0.4883439191552012, + "mc2_stderr": 0.01576336696184338 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.10153482880755609, + "acc_stderr": 0.010384198041619998, + "acc_norm": 0.30814639905548996, + "acc_norm_stderr": 0.01587451515629839 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/SOLAR_C-v1-10.7B", + "model_sha": "9521d07028323f3055664fe03904caeac51b6141", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/SOLAR_C-v2-10.7B/result_2023-12-31 03:36:54.json b/DopeorNope/SOLAR_C-v2-10.7B/result_2023-12-31 03:36:54.json new file mode 100644 index 0000000000000000000000000000000000000000..33652780923e83b9aaebd2519c4cf39239ee78b2 --- /dev/null +++ b/DopeorNope/SOLAR_C-v2-10.7B/result_2023-12-31 03:36:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4377133105802048, + "acc_stderr": 0.014497573881108282, + "acc_norm": 0.4726962457337884, + "acc_norm_stderr": 0.014589589101985994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4387572196773551, + "acc_stderr": 0.004952209831856589, + "acc_norm": 0.5931089424417447, + "acc_norm_stderr": 0.004902502514738597 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5977011494252874, + "acc_stderr": 0.017535294529068945, + "acc_norm": 0.5977011494252874, + "acc_norm_stderr": 0.017535294529068945 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5319148936170213, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.5319148936170213, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.02812534098397271, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.02812534098397271 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03358618145732523, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03358618145732523 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.04810840148082636, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.04810840148082636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5966386554621849, + "acc_stderr": 0.031866081214088314, + "acc_norm": 0.5966386554621849, + "acc_norm_stderr": 0.031866081214088314 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.541025641025641, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.541025641025641, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5516129032258065, + "acc_stderr": 0.02829205683011273, + "acc_norm": 0.5516129032258065, + "acc_norm_stderr": 0.02829205683011273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.02645350805404034, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.02645350805404034 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.029723278961476664, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.029723278961476664 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719197, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719197 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41534391534391535, + "acc_stderr": 0.025379524910778408, + "acc_norm": 0.41534391534391535, + "acc_norm_stderr": 0.025379524910778408 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206177, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5432098765432098, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.5432098765432098, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6678899082568808, + "acc_stderr": 0.020192682985423344, + "acc_norm": 0.6678899082568808, + "acc_norm_stderr": 0.020192682985423344 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.020226106567657807, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.020226106567657807 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.02909767559946393, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.02909767559946393 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3396648044692737, + "acc_stderr": 0.015839400406212494, + "acc_norm": 0.3396648044692737, + "acc_norm_stderr": 0.015839400406212494 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4632352941176471, + "acc_stderr": 0.03029061918048569, + "acc_norm": 0.4632352941176471, + "acc_norm_stderr": 0.03029061918048569 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598025, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598025 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39308996088657105, + "acc_stderr": 0.012474899613873958, + "acc_norm": 0.39308996088657105, + "acc_norm_stderr": 0.012474899613873958 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.03364487286088299, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.03364487286088299 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3072215422276622, + "mc1_stderr": 0.016150201321323002, + "mc2": 0.4665292926452916, + "mc2_stderr": 0.015401236485664634 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.017175671279836446, + "acc_norm": 0.5360094451003542, + "acc_norm_stderr": 0.01714571536548667 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/SOLAR_C-v2-10.7B", + "model_sha": "cadae29eebed0be5db4f227ee918e9e988c9715b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/SOLAR_D-v2-10.7B/result_2023-12-31 13:40:31.json b/DopeorNope/SOLAR_D-v2-10.7B/result_2023-12-31 13:40:31.json new file mode 100644 index 0000000000000000000000000000000000000000..cfb7265d9ee9cbb85364efca6a34e1e357347ef8 --- /dev/null +++ b/DopeorNope/SOLAR_D-v2-10.7B/result_2023-12-31 13:40:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44197952218430037, + "acc_stderr": 0.014512682523128342, + "acc_norm": 0.47525597269624575, + "acc_norm_stderr": 0.014593487694937738 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4400517825134435, + "acc_stderr": 0.004953787146510924, + "acc_norm": 0.5938060147381, + "acc_norm_stderr": 0.0049011789179008464 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5964240102171137, + "acc_stderr": 0.01754433223792644, + "acc_norm": 0.5964240102171137, + "acc_norm_stderr": 0.01754433223792644 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5276595744680851, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.5276595744680851, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.02812534098397271, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.02812534098397271 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.03345678422756776, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.03345678422756776 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.047551296160629475, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.047551296160629475 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.592436974789916, + "acc_stderr": 0.03191863374478466, + "acc_norm": 0.592436974789916, + "acc_norm_stderr": 0.03191863374478466 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5358974358974359, + "acc_stderr": 0.02528558599001786, + "acc_norm": 0.5358974358974359, + "acc_norm_stderr": 0.02528558599001786 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5516129032258065, + "acc_stderr": 0.02829205683011273, + "acc_norm": 0.5516129032258065, + "acc_norm_stderr": 0.02829205683011273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922737, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922737 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3814814814814815, + "acc_stderr": 0.02961671892749759, + "acc_norm": 0.3814814814814815, + "acc_norm_stderr": 0.02961671892749759 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41798941798941797, + "acc_stderr": 0.02540255550326091, + "acc_norm": 0.41798941798941797, + "acc_norm_stderr": 0.02540255550326091 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.02678881193156276, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.02678881193156276 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.027701228468542595, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.027701228468542595 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6623853211009174, + "acc_stderr": 0.020275265986638907, + "acc_norm": 0.6623853211009174, + "acc_norm_stderr": 0.020275265986638907 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536671, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536671 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249034, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249034 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.48856209150326796, + "acc_stderr": 0.02022254151561087, + "acc_norm": 0.48856209150326796, + "acc_norm_stderr": 0.02022254151561087 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.02909767559946393, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.02909767559946393 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.329608938547486, + "acc_stderr": 0.01572153107518388, + "acc_norm": 0.329608938547486, + "acc_norm_stderr": 0.01572153107518388 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598025, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598025 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3898305084745763, + "acc_stderr": 0.012456386619082598, + "acc_norm": 0.3898305084745763, + "acc_norm_stderr": 0.012456386619082598 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.03364487286088299, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.03364487286088299 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155055, + "mc2": 0.4675763272729592, + "mc2_stderr": 0.01542512830327087 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4781582054309327, + "acc_stderr": 0.017173944474294375, + "acc_norm": 0.5312868949232585, + "acc_norm_stderr": 0.017156666859785483 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/SOLAR_D-v2-10.7B", + "model_sha": "e6ee5ef0a6f5c8c4497f8a439b347d2a540a2b34", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/Yi_lee-v1-6B/result_2023-12-05 15:04:18.json b/DopeorNope/Yi_lee-v1-6B/result_2023-12-05 15:04:18.json new file mode 100644 index 0000000000000000000000000000000000000000..ad01e028b508e9450f624b281361e92547feab38 --- /dev/null +++ b/DopeorNope/Yi_lee-v1-6B/result_2023-12-05 15:04:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3532423208191126, + "acc_stderr": 0.013967822714840055, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398324 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3986257717586138, + "acc_stderr": 0.004886147907627406, + "acc_norm": 0.5336586337382991, + "acc_norm_stderr": 0.004978462690966918 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.545338441890166, + "acc_stderr": 0.0178063045850526, + "acc_norm": 0.545338441890166, + "acc_norm_stderr": 0.0178063045850526 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.032321469162244695, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.032321469162244695 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305693, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305693 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03427308652999936, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03427308652999936 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5064516129032258, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.5064516129032258, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.030770900763851316, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.030770900763851316 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6440366972477064, + "acc_stderr": 0.020528559278244218, + "acc_norm": 0.6440366972477064, + "acc_norm_stderr": 0.020528559278244218 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.039701582732351734, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.039701582732351734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.019922115682786685, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.019922115682786685 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199502, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199502 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03246887243637649, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03246887243637649 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121603, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121603 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4530612244897959, + "acc_stderr": 0.03186785930004129, + "acc_norm": 0.4530612244897959, + "acc_norm_stderr": 0.03186785930004129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3272490221642764, + "acc_stderr": 0.011983819806464747, + "acc_norm": 0.3272490221642764, + "acc_norm_stderr": 0.011983819806464747 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.038517163193983926, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.038517163193983926 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766372, + "mc2": 0.4054851425091592, + "mc2_stderr": 0.014739428749798467 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.538370720188902, + "acc_stderr": 0.017139660221845553, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.017014038119297498 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/Yi_lee-v1-6B", + "model_sha": "74357eee5f2ba34e74129c7955b9cf228e68d857", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/Yi_lee-v1-DPO-6B/result_2023-12-06 09:37:46.json b/DopeorNope/Yi_lee-v1-DPO-6B/result_2023-12-06 09:37:46.json new file mode 100644 index 0000000000000000000000000000000000000000..dcc6df1d6b8a940386d2e6dd309d1f8d341ea7db --- /dev/null +++ b/DopeorNope/Yi_lee-v1-DPO-6B/result_2023-12-06 09:37:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3532423208191126, + "acc_stderr": 0.013967822714840055, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398324 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3988249352718582, + "acc_stderr": 0.004886559008754986, + "acc_norm": 0.5338577972515435, + "acc_norm_stderr": 0.004978328190775524 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5440613026819924, + "acc_stderr": 0.01781040392543535, + "acc_norm": 0.5440613026819924, + "acc_norm_stderr": 0.01781040392543535 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305693, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305693 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03427308652999936, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03427308652999936 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.025339003010106522, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.025339003010106522 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5096774193548387, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.5096774193548387, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.030770900763851316, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.030770900763851316 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.037940126746970296, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.037940126746970296 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6385321100917432, + "acc_stderr": 0.02059808200993736, + "acc_norm": 0.6385321100917432, + "acc_norm_stderr": 0.02059808200993736 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.019922115682786685, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.019922115682786685 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199502, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199502 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329387, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329387 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.011971507294982777, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.011971507294982777 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070264, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070264 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766372, + "mc2": 0.4052668487406766, + "mc2_stderr": 0.014737077394415871 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5348288075560803, + "acc_stderr": 0.017148598015747425, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.017014038119297498 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/Yi_lee-v1-DPO-6B", + "model_sha": "fc6c12a061bb2f785c762593f50a2da372b1c5e8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/Yi_lee-v2-DPO-6B/result_2023-12-12 07:57:04.json b/DopeorNope/Yi_lee-v2-DPO-6B/result_2023-12-12 07:57:04.json new file mode 100644 index 0000000000000000000000000000000000000000..c73ff3e0e98ca56f04e813850208c4a628b4a451 --- /dev/null +++ b/DopeorNope/Yi_lee-v2-DPO-6B/result_2023-12-12 07:57:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3506825938566553, + "acc_stderr": 0.01394463593072609, + "acc_norm": 0.40784982935153585, + "acc_norm_stderr": 0.014361097288449696 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3966341366261701, + "acc_stderr": 0.004881990487628913, + "acc_norm": 0.527185819557857, + "acc_norm_stderr": 0.004982400368939667 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5593869731800766, + "acc_stderr": 0.01775339697390848, + "acc_norm": 0.5593869731800766, + "acc_norm_stderr": 0.01775339697390848 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.0378913442461155, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.0378913442461155 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.02837327096106942, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.02837327096106942 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555498, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555498 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179964, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179964 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.0253106392549339, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.0253106392549339 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674078, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230182, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230182 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.02437319786798305, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.02437319786798305 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.041227287076512825, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.041227287076512825 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756653, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756653 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6275229357798165, + "acc_stderr": 0.0207283684576385, + "acc_norm": 0.6275229357798165, + "acc_norm_stderr": 0.0207283684576385 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.028180596328259293, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.028180596328259293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.0399930971277747, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.0399930971277747 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762626, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762626 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.02772498944950931, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.02772498944950931 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833585, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833585 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025425, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025425 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.01450897945355398, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.01450897945355398 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.031680911612338825, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.031680911612338825 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851859, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.01197150729498278, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.01197150729498278 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.4452362200636334, + "mc2_stderr": 0.01509557301201901 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5301062573789846, + "acc_stderr": 0.017159163590170223, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.016929480234495226 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/Yi_lee-v2-DPO-6B", + "model_sha": "3c1d2d605a5c621cfa2351b4b9061519fde23730", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/ZeroCoka-7B/result_2023-10-11 12:06:32.json b/DopeorNope/ZeroCoka-7B/result_2023-10-11 12:06:32.json new file mode 100644 index 0000000000000000000000000000000000000000..60906afd8437b4b94874dfb43e59640d427cbdf0 --- /dev/null +++ b/DopeorNope/ZeroCoka-7B/result_2023-10-11 12:06:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27986348122866894, + "acc_stderr": 0.013119040897725923, + "acc_norm": 0.3455631399317406, + "acc_norm_stderr": 0.013896938461145687 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36566421031667, + "acc_stderr": 0.0048063163427093936, + "acc_norm": 0.48466440948018324, + "acc_norm_stderr": 0.004987433862274562 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41890166028097064, + "acc_stderr": 0.017643205052377185, + "acc_norm": 0.41890166028097064, + "acc_norm_stderr": 0.017643205052377185 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424004, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.029644006577009618, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.029644006577009618 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3858520900321543, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.3858520900321543, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3838383838383838, + "acc_stderr": 0.03464881675016339, + "acc_norm": 0.3838383838383838, + "acc_norm_stderr": 0.03464881675016339 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.0316314580755238, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.0316314580755238 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.02340092891831049, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.02340092891831049 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3709677419354839, + "acc_stderr": 0.02748054188795359, + "acc_norm": 0.3709677419354839, + "acc_norm_stderr": 0.02748054188795359 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4829059829059829, + "acc_stderr": 0.032736940493481824, + "acc_norm": 0.4829059829059829, + "acc_norm_stderr": 0.032736940493481824 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3471698113207547, + "acc_stderr": 0.02930010170554965, + "acc_norm": 0.3471698113207547, + "acc_norm_stderr": 0.02930010170554965 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302506, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302506 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.02578787422095932, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.02578787422095932 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119996, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119996 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4228855721393035, + "acc_stderr": 0.03493231777421282, + "acc_norm": 0.4228855721393035, + "acc_norm_stderr": 0.03493231777421282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267437, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267437 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.0220190800122179, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.0220190800122179 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.38439306358381503, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.38439306358381503, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724145, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724145 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.027163686038271226, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.027163686038271226 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38860103626943004, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.38860103626943004, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3779816513761468, + "acc_stderr": 0.02078918706672811, + "acc_norm": 0.3779816513761468, + "acc_norm_stderr": 0.02078918706672811 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604672, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604672 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171566, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171566 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.045641987674327526, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.045641987674327526 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.03823428969926605, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.03823428969926605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.018718067052623216, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.018718067052623216 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534778, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534778 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261446, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261446 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125474, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125474 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3142857142857143, + "acc_stderr": 0.029719329422417468, + "acc_norm": 0.3142857142857143, + "acc_norm_stderr": 0.029719329422417468 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45147679324894513, + "acc_stderr": 0.0323936001739747, + "acc_norm": 0.45147679324894513, + "acc_norm_stderr": 0.0323936001739747 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3135593220338983, + "acc_stderr": 0.01184923429145932, + "acc_norm": 0.3135593220338983, + "acc_norm_stderr": 0.01184923429145932 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.038049136539710114, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.038049136539710114 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456411, + "mc2": 0.3826229918315052, + "mc2_stderr": 0.015120737226444851 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21251475796930341, + "acc_stderr": 0.014064703386174934, + "acc_norm": 0.29988193624557263, + "acc_norm_stderr": 0.015753447615429458 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/ZeroCoka-7B", + "model_sha": "3025135b08f7d052531fcd8f6a4a5a97e4e25c76", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/Zero_COKE_K-13B/result_2023-10-08 06:50:15.json b/DopeorNope/Zero_COKE_K-13B/result_2023-10-08 06:50:15.json new file mode 100644 index 0000000000000000000000000000000000000000..883f9a68bf67e13282896527af6324985db9d323 --- /dev/null +++ b/DopeorNope/Zero_COKE_K-13B/result_2023-10-08 06:50:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35238907849829354, + "acc_stderr": 0.01396014260059869, + "acc_norm": 0.3984641638225256, + "acc_norm_stderr": 0.014306946052735569 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3679545907189803, + "acc_stderr": 0.0048126332800782715, + "acc_norm": 0.46932881896036643, + "acc_norm_stderr": 0.004980384575535391 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.01784491809046854, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.01784491809046854 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085335, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085335 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.03314190222110656, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.03314190222110656 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.02829205683011273, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.02829205683011273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524586, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524586 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.03775205013583639, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.03775205013583639 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46972477064220186, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.46972477064220186, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377563, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377563 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528777, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528777 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013317, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013317 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.018999707383162666, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.018999707383162666 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650147, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293648, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293648 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2927374301675978, + "acc_stderr": 0.015218109544410182, + "acc_norm": 0.2927374301675978, + "acc_norm_stderr": 0.015218109544410182 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280065, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280065 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533485, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533485 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3239895697522816, + "acc_stderr": 0.011952840809646563, + "acc_norm": 0.3239895697522816, + "acc_norm_stderr": 0.011952840809646563 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03484941514429231, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03484941514429231 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32068543451652387, + "mc1_stderr": 0.0163391703732809, + "mc2": 0.498111749136946, + "mc2_stderr": 0.015897921630313217 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4203069657615112, + "acc_stderr": 0.016970598281177703, + "acc_norm": 0.4344746162927981, + "acc_norm_stderr": 0.01704209862082494 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/Zero_COKE_K-13B", + "model_sha": "fda4838dd7feb06c1289ae143810c67a59a72961", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/love_but_hate-10.7B/result_2024-01-03 17:22:25.json b/DopeorNope/love_but_hate-10.7B/result_2024-01-03 17:22:25.json new file mode 100644 index 0000000000000000000000000000000000000000..ff55e7d8291d6e6b7a567db49d6b20c8d6666315 --- /dev/null +++ b/DopeorNope/love_but_hate-10.7B/result_2024-01-03 17:22:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4325938566552901, + "acc_stderr": 0.014478005694182526, + "acc_norm": 0.49829351535836175, + "acc_norm_stderr": 0.01461130570505699 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44891455885281817, + "acc_stderr": 0.004963669199433393, + "acc_norm": 0.6058554072893846, + "acc_norm_stderr": 0.004876674814874702 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6309067688378033, + "acc_stderr": 0.017256283109124627, + "acc_norm": 0.6309067688378033, + "acc_norm_stderr": 0.017256283109124627 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.027604689028581986, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.027604689028581986 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6616161616161617, + "acc_stderr": 0.03371124142626305, + "acc_norm": 0.6616161616161617, + "acc_norm_stderr": 0.03371124142626305 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.025323990861736253, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.025323990861736253 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406795, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406795 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5451612903225806, + "acc_stderr": 0.02832774309156106, + "acc_norm": 0.5451612903225806, + "acc_norm_stderr": 0.02832774309156106 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8247863247863247, + "acc_stderr": 0.024904439098918225, + "acc_norm": 0.8247863247863247, + "acc_norm_stderr": 0.024904439098918225 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.0284934650910286, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.0284934650910286 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.03096590312357303, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.03096590312357303 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.73, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5679012345679012, + "acc_stderr": 0.027563010971606672, + "acc_norm": 0.5679012345679012, + "acc_norm_stderr": 0.027563010971606672 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6735751295336787, + "acc_stderr": 0.033840286211432945, + "acc_norm": 0.6735751295336787, + "acc_norm_stderr": 0.033840286211432945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6440366972477064, + "acc_stderr": 0.020528559278244218, + "acc_norm": 0.6440366972477064, + "acc_norm_stderr": 0.020528559278244218 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.02018014484330729, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.02018014484330729 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199506, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199506 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331149, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331149 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596445, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596445 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.03164209487942942, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.03164209487942942 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7046413502109705, + "acc_stderr": 0.029696338713422882, + "acc_norm": 0.7046413502109705, + "acc_norm_stderr": 0.029696338713422882 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38265971316818775, + "acc_stderr": 0.012413595882893263, + "acc_norm": 0.38265971316818775, + "acc_norm_stderr": 0.012413595882893263 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.038154943086889305, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.038154943086889305 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.39167686658506734, + "mc1_stderr": 0.017087795881769615, + "mc2": 0.5513156843136133, + "mc2_stderr": 0.015893218978109937 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3872491145218418, + "acc_stderr": 0.01674757799164279, + "acc_norm": 0.4002361275088548, + "acc_norm_stderr": 0.016844693510505052 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/love_but_hate-10.7B", + "model_sha": "43b58aed54ef7f6d71eec46253b394cfbf129afb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/mistralopithecus-v1-SFT/result_2023-11-26 08:04:14.json b/DopeorNope/mistralopithecus-v1-SFT/result_2023-11-26 08:04:14.json new file mode 100644 index 0000000000000000000000000000000000000000..cdbafcb5e800c4d0e2bad66f3477b81561ff05ad --- /dev/null +++ b/DopeorNope/mistralopithecus-v1-SFT/result_2023-11-26 08:04:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4283276450511945, + "acc_stderr": 0.014460496367599008, + "acc_norm": 0.47696245733788395, + "acc_norm_stderr": 0.014595873205358269 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40689105755825533, + "acc_stderr": 0.004902502514738604, + "acc_norm": 0.5343557060346544, + "acc_norm_stderr": 0.00497798845250264 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.03833185275213025, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.03833185275213025 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5504469987228607, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.5504469987228607, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489424, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489424 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.028434533152681848, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.028434533152681848 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809447, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809447 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206174, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833942, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833942 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336938, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336938 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.021311335009708582, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.021311335009708582 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521664, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521664 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449848, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449848 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.02746470844202214, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.02746470844202214 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.18659217877094972, + "acc_stderr": 0.013029631416358349, + "acc_norm": 0.18659217877094972, + "acc_norm_stderr": 0.013029631416358349 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.02922719246003202, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.02922719246003202 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.031601069934496004, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.031601069934496004 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897634, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897634 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396708, + "mc2": 0.4537563569615343, + "mc2_stderr": 0.015481816857869497 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44391971664698937, + "acc_stderr": 0.017081884623542543, + "acc_norm": 0.47461629279811096, + "acc_norm_stderr": 0.017168187201429253 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/mistralopithecus-v1-SFT", + "model_sha": "d287f71c14d2bfbcaa053dcaa6b9b22dd5bc0f1a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/mistralopithecus-v1-dpo-7b/result_2023-11-26 08:11:29.json b/DopeorNope/mistralopithecus-v1-dpo-7b/result_2023-11-26 08:11:29.json new file mode 100644 index 0000000000000000000000000000000000000000..b9b490954d6695aff4afab90ac2f9d1287a59229 --- /dev/null +++ b/DopeorNope/mistralopithecus-v1-dpo-7b/result_2023-11-26 08:11:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4872013651877133, + "acc_stderr": 0.014606603181012546, + "acc_norm": 0.5273037542662116, + "acc_norm_stderr": 0.014589589101985994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4403505277833101, + "acc_stderr": 0.004954146286513353, + "acc_norm": 0.55646285600478, + "acc_norm_stderr": 0.004957863944093124 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4929757343550447, + "acc_stderr": 0.01787819900343221, + "acc_norm": 0.4929757343550447, + "acc_norm_stderr": 0.01787819900343221 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.02821768355665231, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.02821768355665231 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828063, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828063 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.03046365674734025, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.03046365674734025 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911522, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911522 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4021164021164021, + "acc_stderr": 0.02525303255499768, + "acc_norm": 0.4021164021164021, + "acc_norm_stderr": 0.02525303255499768 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.027563010971606676, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.027563010971606676 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.02143642095552942, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.02143642095552942 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.01965992249362334, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.01965992249362334 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534795, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534795 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456053, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456053 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20335195530726258, + "acc_stderr": 0.013461351487507506, + "acc_norm": 0.20335195530726258, + "acc_norm_stderr": 0.013461351487507506 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682487, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682487 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3109517601043025, + "acc_stderr": 0.0118222529177992, + "acc_norm": 0.3109517601043025, + "acc_norm_stderr": 0.0118222529177992 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.016002651487361016, + "mc2": 0.4499453306291458, + "mc2_stderr": 0.016369397422184195 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3187721369539551, + "acc_stderr": 0.016021427055309578, + "acc_norm": 0.3270365997638725, + "acc_norm_stderr": 0.016129047485457022 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/mistralopithecus-v1-dpo-7b", + "model_sha": "ec7557bb2a4fbbb775d057f98f98ae6b4430c8d5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/mistralopithecus-v1-dpo/result_2023-11-26 08:04:46.json b/DopeorNope/mistralopithecus-v1-dpo/result_2023-11-26 08:04:46.json new file mode 100644 index 0000000000000000000000000000000000000000..7c3ee66112659101e7efd30db17a59614ef0962c --- /dev/null +++ b/DopeorNope/mistralopithecus-v1-dpo/result_2023-11-26 08:04:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4872013651877133, + "acc_stderr": 0.014606603181012546, + "acc_norm": 0.5273037542662116, + "acc_norm_stderr": 0.014589589101985994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4404501095399323, + "acc_stderr": 0.004954265595373461, + "acc_norm": 0.5565624377614021, + "acc_norm_stderr": 0.004957750897152936 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4929757343550447, + "acc_stderr": 0.01787819900343221, + "acc_norm": 0.4929757343550447, + "acc_norm_stderr": 0.01787819900343221 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.02821768355665231, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.02821768355665231 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828063, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828063 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.03046365674734025, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.03046365674734025 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911522, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911522 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4021164021164021, + "acc_stderr": 0.02525303255499768, + "acc_norm": 0.4021164021164021, + "acc_norm_stderr": 0.02525303255499768 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.027563010971606676, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.027563010971606676 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.02143642095552942, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.02143642095552942 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.01965992249362334, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.01965992249362334 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534795, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534795 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456053, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456053 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20335195530726258, + "acc_stderr": 0.013461351487507506, + "acc_norm": 0.20335195530726258, + "acc_norm_stderr": 0.013461351487507506 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682487, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682487 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3109517601043025, + "acc_stderr": 0.0118222529177992, + "acc_norm": 0.3109517601043025, + "acc_norm_stderr": 0.0118222529177992 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.016002651487361016, + "mc2": 0.44995826505029746, + "mc2_stderr": 0.016369720959182137 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3187721369539551, + "acc_stderr": 0.016021427055309578, + "acc_norm": 0.3270365997638725, + "acc_norm_stderr": 0.016129047485457022 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/mistralopithecus-v1-dpo", + "model_sha": "ec7557bb2a4fbbb775d057f98f98ae6b4430c8d5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/DopeorNope/mistralopithecus-v2-dpo-7b/result_2023-11-26 09:15:23.json b/DopeorNope/mistralopithecus-v2-dpo-7b/result_2023-11-26 09:15:23.json new file mode 100644 index 0000000000000000000000000000000000000000..5449cfa7dc3410d7847b6fee9bca5c91149a1d68 --- /dev/null +++ b/DopeorNope/mistralopithecus-v2-dpo-7b/result_2023-11-26 09:15:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.17235494880546076, + "acc_stderr": 0.011037113093461295, + "acc_norm": 0.2525597269624573, + "acc_norm_stderr": 0.012696728980207706 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25423222465644296, + "acc_stderr": 0.00434538861452003, + "acc_norm": 0.24576777534355707, + "acc_norm_stderr": 0.0042966158627866305 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796617, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539265, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539265 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715014, + "mc2": 0.496495319717773, + "mc2_stderr": 0.016950807749782918 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08382526564344746, + "acc_stderr": 0.009527773913592165, + "acc_norm": 0.38961038961038963, + "acc_norm_stderr": 0.016766161671893494 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "DopeorNope/mistralopithecus-v2-dpo-7b", + "model_sha": "4481f2a07c5b4c31f650c94b558bec12ff8cddff", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ENERGY-DRINK-LOVE/DataVortexS_dpov3/result_2024-03-15 02:41:08.json b/ENERGY-DRINK-LOVE/DataVortexS_dpov3/result_2024-03-15 02:41:08.json new file mode 100644 index 0000000000000000000000000000000000000000..749c0d20d1df32d8c728fb2d937f26d9ad150809 --- /dev/null +++ b/ENERGY-DRINK-LOVE/DataVortexS_dpov3/result_2024-03-15 02:41:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5051194539249146, + "acc_stderr": 0.014610624890309154, + "acc_norm": 0.5622866894197952, + "acc_norm_stderr": 0.014497573881108294 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5048795060744872, + "acc_stderr": 0.004989543796593296, + "acc_norm": 0.6914957179844653, + "acc_norm_stderr": 0.0046093200248939 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.04689765937278133, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.04689765937278133 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6564495530012772, + "acc_stderr": 0.01698214563265247, + "acc_norm": 0.6564495530012772, + "acc_norm_stderr": 0.01698214563265247 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079023, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.027604689028582, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.027604689028582 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5829596412556054, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.5829596412556054, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7121212121212122, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.7121212121212122, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.04858083574266346, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.04858083574266346 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.03156663099215416, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.03156663099215416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5358974358974359, + "acc_stderr": 0.025285585990017866, + "acc_norm": 0.5358974358974359, + "acc_norm_stderr": 0.025285585990017866 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.034524539038220385, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.034524539038220385 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5774193548387097, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.5774193548387097, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.02777883590493543, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.02777883590493543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5358490566037736, + "acc_stderr": 0.030693675018458006, + "acc_norm": 0.5358490566037736, + "acc_norm_stderr": 0.030693675018458006 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731571, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731571 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02911661760608301, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02911661760608301 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.025197101074246483, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.025197101074246483 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.027339546640662737, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.027339546640662737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7150259067357513, + "acc_stderr": 0.032577140777096614, + "acc_norm": 0.7150259067357513, + "acc_norm_stderr": 0.032577140777096614 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.04702880432049615, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.04702880432049615 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6862385321100918, + "acc_stderr": 0.01989472334146913, + "acc_norm": 0.6862385321100918, + "acc_norm_stderr": 0.01989472334146913 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.042857142857142816, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.042857142857142816 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.02827549015679145, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.02827549015679145 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.041733491480835, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.041733491480835 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6052631578947368, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.6052631578947368, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.02022394600507431, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.02022394600507431 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.03408655867977749, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.03408655867977749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.39553072625698327, + "acc_stderr": 0.01635341541007578, + "acc_norm": 0.39553072625698327, + "acc_norm_stderr": 0.01635341541007578 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47794117647058826, + "acc_stderr": 0.030343264224213528, + "acc_norm": 0.47794117647058826, + "acc_norm_stderr": 0.030343264224213528 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.03164209487942942, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.03164209487942942 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7721518987341772, + "acc_stderr": 0.02730348459906941, + "acc_norm": 0.7721518987341772, + "acc_norm_stderr": 0.02730348459906941 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4380704041720991, + "acc_stderr": 0.012671902782567638, + "acc_norm": 0.4380704041720991, + "acc_norm_stderr": 0.012671902782567638 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.03364487286088297, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.03364487286088297 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03681050869161549, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03681050869161549 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.48714810281517745, + "mc1_stderr": 0.017497717944299825, + "mc2": 0.6787477378614019, + "mc2_stderr": 0.01581366676635949 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5371900826446281, + "acc_stderr": 0.0171427361176433, + "acc_norm": 0.5489964580873672, + "acc_norm_stderr": 0.017107618859549357 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ENERGY-DRINK-LOVE/DataVortexS_dpov3", + "model_sha": "91f4236822f715e7ac8297c8a083ca5f5f87f9c9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ENERGY-DRINK-LOVE/SOLAR_merge_DPOv3/result_2024-03-07 01:59:27.json b/ENERGY-DRINK-LOVE/SOLAR_merge_DPOv3/result_2024-03-07 01:59:27.json new file mode 100644 index 0000000000000000000000000000000000000000..d0db64b1edf5fcd25af52d5f5a0b6585a7b284e6 --- /dev/null +++ b/ENERGY-DRINK-LOVE/SOLAR_merge_DPOv3/result_2024-03-07 01:59:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4522184300341297, + "acc_stderr": 0.014544519880633815, + "acc_norm": 0.5119453924914675, + "acc_norm_stderr": 0.014607220340597164 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44831706831308504, + "acc_stderr": 0.004963053161193614, + "acc_norm": 0.6139215295757817, + "acc_norm_stderr": 0.004858539527872465 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.038110796698335316, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.038110796698335316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6028097062579821, + "acc_stderr": 0.01749790503715935, + "acc_norm": 0.6028097062579821, + "acc_norm_stderr": 0.01749790503715935 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5234042553191489, + "acc_stderr": 0.0326501947503358, + "acc_norm": 0.5234042553191489, + "acc_norm_stderr": 0.0326501947503358 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5659163987138264, + "acc_stderr": 0.028150232244535604, + "acc_norm": 0.5659163987138264, + "acc_norm_stderr": 0.028150232244535604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.600896860986547, + "acc_stderr": 0.03286745312567961, + "acc_norm": 0.600896860986547, + "acc_norm_stderr": 0.03286745312567961 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7121212121212122, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.7121212121212122, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.031566630992154156, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.031566630992154156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.025275892070240648, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.025275892070240648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145631, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145631 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.02804098138076153, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.02804098138076153 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4470899470899471, + "acc_stderr": 0.025606723995777025, + "acc_norm": 0.4470899470899471, + "acc_norm_stderr": 0.025606723995777025 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5809248554913294, + "acc_stderr": 0.02656417811142263, + "acc_norm": 0.5809248554913294, + "acc_norm_stderr": 0.02656417811142263 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5771604938271605, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.5771604938271605, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7357512953367875, + "acc_stderr": 0.03182155050916648, + "acc_norm": 0.7357512953367875, + "acc_norm_stderr": 0.03182155050916648 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6568807339449542, + "acc_stderr": 0.02035477773608604, + "acc_norm": 0.6568807339449542, + "acc_norm_stderr": 0.02035477773608604 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.028358956313423545, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.028358956313423545 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884124, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.040335656678483205, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.040335656678483205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.020220920829626916, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.020220920829626916 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596147, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2681564245810056, + "acc_stderr": 0.014816119635317012, + "acc_norm": 0.2681564245810056, + "acc_norm_stderr": 0.014816119635317012 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5073529411764706, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.5073529411764706, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6163265306122448, + "acc_stderr": 0.03113088039623593, + "acc_norm": 0.6163265306122448, + "acc_norm_stderr": 0.03113088039623593 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7215189873417721, + "acc_stderr": 0.02917868230484253, + "acc_norm": 0.7215189873417721, + "acc_norm_stderr": 0.02917868230484253 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.40547588005215124, + "acc_stderr": 0.012539960672377214, + "acc_norm": 0.40547588005215124, + "acc_norm_stderr": 0.012539960672377214 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6519607843137255, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.6519607843137255, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4112607099143207, + "mc1_stderr": 0.017225627083660853, + "mc2": 0.5811015486950803, + "mc2_stderr": 0.015747385952450874 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.017175671279836446, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.017185069732676528 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ENERGY-DRINK-LOVE/SOLAR_merge_DPOv3", + "model_sha": "8881819d4eb5c58d069fcf4a151ba4167d65d70b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ENERGY-DRINK-LOVE/TQ3_leaderboard_inst_v2/result_2024-03-26 09:17:13.json b/ENERGY-DRINK-LOVE/TQ3_leaderboard_inst_v2/result_2024-03-26 09:17:13.json new file mode 100644 index 0000000000000000000000000000000000000000..430f2afb0712ac36ab353e3459a0ccf542571a77 --- /dev/null +++ b/ENERGY-DRINK-LOVE/TQ3_leaderboard_inst_v2/result_2024-03-26 09:17:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6493174061433447, + "acc_stderr": 0.013944635930726094, + "acc_norm": 0.6988054607508533, + "acc_norm_stderr": 0.013406741767847626 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4402509460266879, + "acc_stderr": 0.004954026775425766, + "acc_norm": 0.5941047600079665, + "acc_norm_stderr": 0.004900608529778611 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.42718446601941745, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.42718446601941745, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5504469987228607, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.5504469987228607, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.30638297872340425, + "acc_stderr": 0.030135906478517566, + "acc_norm": 0.30638297872340425, + "acc_norm_stderr": 0.030135906478517566 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.02817391776176287, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.02817391776176287 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.042607351576445594, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.042607351576445594 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4393939393939394, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.4393939393939394, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386215, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36554621848739494, + "acc_stderr": 0.0312821770636846, + "acc_norm": 0.36554621848739494, + "acc_norm_stderr": 0.0312821770636846 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.024321738484602357, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.024321738484602357 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.031947400722655395, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.031947400722655395 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3903225806451613, + "acc_stderr": 0.027751256636969576, + "acc_norm": 0.3903225806451613, + "acc_norm_stderr": 0.027751256636969576 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5598290598290598, + "acc_stderr": 0.032520741720630506, + "acc_norm": 0.5598290598290598, + "acc_norm_stderr": 0.032520741720630506 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.023000086859068642, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.023000086859068642 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.023786203255508283, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.023786203255508283 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3374233128834356, + "acc_stderr": 0.03714908409935574, + "acc_norm": 0.3374233128834356, + "acc_norm_stderr": 0.03714908409935574 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.026571483480719974, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.026571483480719974 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442205, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442205 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.021364122533881695, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.021364122533881695 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.027363593284684944, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.027363593284684944 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04391326286724071, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04391326286724071 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403165, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403165 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32516339869281047, + "acc_stderr": 0.018950886770806297, + "acc_norm": 0.32516339869281047, + "acc_norm_stderr": 0.018950886770806297 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729906, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729906 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225601, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225601 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131117, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131117 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.33877551020408164, + "acc_stderr": 0.03029950656215418, + "acc_norm": 0.33877551020408164, + "acc_norm_stderr": 0.03029950656215418 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2522816166883963, + "acc_stderr": 0.01109278905687522, + "acc_norm": 0.2522816166883963, + "acc_norm_stderr": 0.01109278905687522 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.03646204963253812, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.03646204963253812 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4430844553243574, + "mc1_stderr": 0.017389730346877103, + "mc2": 0.5646747011741157, + "mc2_stderr": 0.01513052312933085 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31759149940968123, + "acc_stderr": 0.016005581876229313, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.016538691603327712 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ENERGY-DRINK-LOVE/TQ3_leaderboard_inst_v2", + "model_sha": "bc5ee05438d6d41b25e86b9d0bff1d1417ff0475", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ENERGY-DRINK-LOVE/deepnoid_DPOv3/result_2024-03-18 22:34:22.json b/ENERGY-DRINK-LOVE/deepnoid_DPOv3/result_2024-03-18 22:34:22.json new file mode 100644 index 0000000000000000000000000000000000000000..ef745ff9287cac106b0a0bb3094141078f818970 --- /dev/null +++ b/ENERGY-DRINK-LOVE/deepnoid_DPOv3/result_2024-03-18 22:34:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6501706484641638, + "acc_stderr": 0.013936809212158287, + "acc_norm": 0.7073378839590444, + "acc_norm_stderr": 0.013295916103619418 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4049990041824338, + "acc_stderr": 0.004898886080687925, + "acc_norm": 0.5402310296753635, + "acc_norm_stderr": 0.004973602904247793 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6140350877192983, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.6140350877192983, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.048257293373563895, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.048257293373563895 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6628352490421456, + "acc_stderr": 0.016905207420803547, + "acc_norm": 0.6628352490421456, + "acc_norm_stderr": 0.016905207420803547 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.39, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.39, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.027604689028581975, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.027604689028581975 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5964125560538116, + "acc_stderr": 0.032928028193303135, + "acc_norm": 0.5964125560538116, + "acc_norm_stderr": 0.032928028193303135 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.033456784227567773, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.033456784227567773 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062948, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062948 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5794871794871795, + "acc_stderr": 0.02502861027671087, + "acc_norm": 0.5794871794871795, + "acc_norm_stderr": 0.02502861027671087 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.603225806451613, + "acc_stderr": 0.027831231605767944, + "acc_norm": 0.603225806451613, + "acc_norm_stderr": 0.027831231605767944 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.027046857630716688, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.027046857630716688 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.03067609659938918, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.03067609659938918 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547307, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547307 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6865671641791045, + "acc_stderr": 0.03280188205348642, + "acc_norm": 0.6865671641791045, + "acc_norm_stderr": 0.03280188205348642 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.038118909889404126, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.038118909889404126 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.025075981767601688, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.025075981767601688 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.79, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.79, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.02751374728437943, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.02751374728437943 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6683937823834197, + "acc_stderr": 0.03397636541089118, + "acc_norm": 0.6683937823834197, + "acc_norm_stderr": 0.03397636541089118 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7211009174311926, + "acc_stderr": 0.019227468876463524, + "acc_norm": 0.7211009174311926, + "acc_norm_stderr": 0.019227468876463524 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.545751633986928, + "acc_stderr": 0.028509807802626592, + "acc_norm": 0.545751633986928, + "acc_norm_stderr": 0.028509807802626592 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.04017901275981749, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.04017901275981749 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.020220920829626916, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.020220920829626916 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.02914454478159615, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.02914454478159615 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2681564245810056, + "acc_stderr": 0.014816119635316998, + "acc_norm": 0.2681564245810056, + "acc_norm_stderr": 0.014816119635316998 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.0497569851956243, + "acc_norm": 0.57, + "acc_norm_stderr": 0.0497569851956243 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596455, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596455 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5959183673469388, + "acc_stderr": 0.03141470802586587, + "acc_norm": 0.5959183673469388, + "acc_norm_stderr": 0.03141470802586587 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4217731421121252, + "acc_stderr": 0.012612974369390986, + "acc_norm": 0.4217731421121252, + "acc_norm_stderr": 0.012612974369390986 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5637254901960784, + "acc_stderr": 0.03480693138457038, + "acc_norm": 0.5637254901960784, + "acc_norm_stderr": 0.03480693138457038 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5556915544675642, + "mc1_stderr": 0.017394586250743173, + "mc2": 0.667792255703621, + "mc2_stderr": 0.014654040790786606 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4923258559622196, + "acc_stderr": 0.01718832921965428, + "acc_norm": 0.5407319952774499, + "acc_norm_stderr": 0.01713321827653767 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ENERGY-DRINK-LOVE/deepnoid_DPOv3", + "model_sha": "eaba91ba1e62b5c131fadd4f746e0ff10993dd04", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ENERGY-DRINK-LOVE/eeve_dpo-v3/result_2024-02-28 00:04:00.json b/ENERGY-DRINK-LOVE/eeve_dpo-v3/result_2024-02-28 00:04:00.json new file mode 100644 index 0000000000000000000000000000000000000000..1aa550d98ff28234de150c637e4bb86c111c4c6b --- /dev/null +++ b/ENERGY-DRINK-LOVE/eeve_dpo-v3/result_2024-02-28 00:04:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.523037542662116, + "acc_stderr": 0.014595873205358269, + "acc_norm": 0.5750853242320819, + "acc_norm_stderr": 0.01444569896852077 + }, + "harness|ko_hellaswag|10": { + "acc": 0.48884684325831507, + "acc_stderr": 0.004988539870174626, + "acc_norm": 0.6700856403106951, + "acc_norm_stderr": 0.004692208279690599 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7017543859649122, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.7017543859649122, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.0458212416016155, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.0458212416016155 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7292464878671775, + "acc_stderr": 0.01588988836256049, + "acc_norm": 0.7292464878671775, + "acc_norm_stderr": 0.01588988836256049 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.032683358999363386, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.032683358999363386 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.03892212195333045, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.03892212195333045 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6237942122186495, + "acc_stderr": 0.02751392568354943, + "acc_norm": 0.6237942122186495, + "acc_norm_stderr": 0.02751392568354943 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6278026905829597, + "acc_stderr": 0.032443052830087304, + "acc_norm": 0.6278026905829597, + "acc_norm_stderr": 0.032443052830087304 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.042764865428145914, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.042764865428145914 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7626262626262627, + "acc_stderr": 0.0303137105381989, + "acc_norm": 0.7626262626262627, + "acc_norm_stderr": 0.0303137105381989 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.634453781512605, + "acc_stderr": 0.03128217706368462, + "acc_norm": 0.634453781512605, + "acc_norm_stderr": 0.03128217706368462 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.558974358974359, + "acc_stderr": 0.025174048384000725, + "acc_norm": 0.558974358974359, + "acc_norm_stderr": 0.025174048384000725 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365425, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365425 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6451612903225806, + "acc_stderr": 0.027218889773308753, + "acc_norm": 0.6451612903225806, + "acc_norm_stderr": 0.027218889773308753 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8290598290598291, + "acc_stderr": 0.024662496845209804, + "acc_norm": 0.8290598290598291, + "acc_norm_stderr": 0.024662496845209804 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6075471698113207, + "acc_stderr": 0.03005258057955785, + "acc_norm": 0.6075471698113207, + "acc_norm_stderr": 0.03005258057955785 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131143, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131143 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.746268656716418, + "acc_stderr": 0.03076944496729602, + "acc_norm": 0.746268656716418, + "acc_norm_stderr": 0.03076944496729602 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43915343915343913, + "acc_stderr": 0.02555992055053101, + "acc_norm": 0.43915343915343913, + "acc_norm_stderr": 0.02555992055053101 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5763888888888888, + "acc_stderr": 0.04132125019723369, + "acc_norm": 0.5763888888888888, + "acc_norm_stderr": 0.04132125019723369 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6416184971098265, + "acc_stderr": 0.0258167567915842, + "acc_norm": 0.6416184971098265, + "acc_norm_stderr": 0.0258167567915842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138938, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138938 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.026725868809100793, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.026725868809100793 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.772020725388601, + "acc_stderr": 0.03027690994517826, + "acc_norm": 0.772020725388601, + "acc_norm_stderr": 0.03027690994517826 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.04702880432049615, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.04702880432049615 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7394495412844037, + "acc_stderr": 0.018819182034850068, + "acc_norm": 0.7394495412844037, + "acc_norm_stderr": 0.018819182034850068 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.02768418188330289, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.02768418188330289 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.03984979653302872, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.03984979653302872 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.545751633986928, + "acc_stderr": 0.020142974553795205, + "acc_norm": 0.545751633986928, + "acc_norm_stderr": 0.020142974553795205 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4432624113475177, + "acc_stderr": 0.029634838473766002, + "acc_norm": 0.4432624113475177, + "acc_norm_stderr": 0.029634838473766002 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22346368715083798, + "acc_stderr": 0.013932068638579759, + "acc_norm": 0.22346368715083798, + "acc_norm_stderr": 0.013932068638579759 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.7, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5257352941176471, + "acc_stderr": 0.030332578094555026, + "acc_norm": 0.5257352941176471, + "acc_norm_stderr": 0.030332578094555026 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.7020408163265306, + "acc_stderr": 0.029279567411065674, + "acc_norm": 0.7020408163265306, + "acc_norm_stderr": 0.029279567411065674 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7932489451476793, + "acc_stderr": 0.0263616516683891, + "acc_norm": 0.7932489451476793, + "acc_norm_stderr": 0.0263616516683891 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4165580182529335, + "acc_stderr": 0.01259115324505739, + "acc_norm": 0.4165580182529335, + "acc_norm_stderr": 0.01259115324505739 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7647058823529411, + "acc_stderr": 0.02977177522814562, + "acc_norm": 0.7647058823529411, + "acc_norm_stderr": 0.02977177522814562 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7212121212121212, + "acc_stderr": 0.0350143870629678, + "acc_norm": 0.7212121212121212, + "acc_norm_stderr": 0.0350143870629678 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3929008567931457, + "mc1_stderr": 0.017097248285233065, + "mc2": 0.5485844702331015, + "mc2_stderr": 0.01573653357077097 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5371900826446281, + "acc_stderr": 0.0171427361176433, + "acc_norm": 0.5419126328217237, + "acc_norm_stderr": 0.017129852117911147 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ENERGY-DRINK-LOVE/eeve_dpo-v3", + "model_sha": "d9361567b4ee0a81312e0dc2e6c94ca37cb61e3c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ENERGY-DRINK-LOVE/eeve_leaderboard_inst_v1.5/result_2024-02-28 00:02:56.json b/ENERGY-DRINK-LOVE/eeve_leaderboard_inst_v1.5/result_2024-02-28 00:02:56.json new file mode 100644 index 0000000000000000000000000000000000000000..5b39342a6113b12eeeb306627f9eeac19cb1a92c --- /dev/null +++ b/ENERGY-DRINK-LOVE/eeve_leaderboard_inst_v1.5/result_2024-02-28 00:02:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44368600682593856, + "acc_stderr": 0.014518421825670447, + "acc_norm": 0.4906143344709898, + "acc_norm_stderr": 0.014608816322065 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4576777534355706, + "acc_stderr": 0.0049718741597776965, + "acc_norm": 0.6170085640310695, + "acc_norm_stderr": 0.0048512275270708935 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.70242656449553, + "acc_stderr": 0.016349111912909435, + "acc_norm": 0.70242656449553, + "acc_norm_stderr": 0.016349111912909435 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.038913644958358175, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.038913644958358175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6012861736334405, + "acc_stderr": 0.0278093225857745, + "acc_norm": 0.6012861736334405, + "acc_norm_stderr": 0.0278093225857745 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5964125560538116, + "acc_stderr": 0.032928028193303135, + "acc_norm": 0.5964125560538116, + "acc_norm_stderr": 0.032928028193303135 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.032742879140268674, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.032742879140268674 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5756302521008403, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.5756302521008403, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.025275892070240648, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.025275892070240648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6944444444444444, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.6944444444444444, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6064516129032258, + "acc_stderr": 0.027791878753132267, + "acc_norm": 0.6064516129032258, + "acc_norm_stderr": 0.027791878753132267 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.02760192138141762, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.02760192138141762 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5547169811320755, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.5547169811320755, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307695, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307695 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.81, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.81, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5838150289017341, + "acc_stderr": 0.026538189104705477, + "acc_norm": 0.5838150289017341, + "acc_norm_stderr": 0.026538189104705477 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5895061728395061, + "acc_stderr": 0.027371350925124768, + "acc_norm": 0.5895061728395061, + "acc_norm_stderr": 0.027371350925124768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6256880733944954, + "acc_stderr": 0.020748959408988327, + "acc_norm": 0.6256880733944954, + "acc_norm_stderr": 0.020748959408988327 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5849673202614379, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.5849673202614379, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6052631578947368, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.6052631578947368, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.020219083895133924, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.020219083895133924 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347247, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347247 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574894, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574894 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4889705882352941, + "acc_stderr": 0.030365446477275675, + "acc_norm": 0.4889705882352941, + "acc_norm_stderr": 0.030365446477275675 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6448979591836734, + "acc_stderr": 0.030635655150387634, + "acc_norm": 0.6448979591836734, + "acc_norm_stderr": 0.030635655150387634 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.0298180247497531, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.0298180247497531 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3650586701434159, + "acc_stderr": 0.012296373743443475, + "acc_norm": 0.3650586701434159, + "acc_norm_stderr": 0.012296373743443475 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.03296245110172229, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.03296245110172229 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766372, + "mc2": 0.4167656798454404, + "mc2_stderr": 0.015096503185058233 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48760330578512395, + "acc_stderr": 0.01718506973267653, + "acc_norm": 0.526564344746163, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ENERGY-DRINK-LOVE/eeve_leaderboard_inst_v1.5", + "model_sha": "78505257163f6e45ed327b68f7bddb7c9ff509fd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ENERGY-DRINK-LOVE/komt_DPOv3/result_2024-03-15 02:42:33.json b/ENERGY-DRINK-LOVE/komt_DPOv3/result_2024-03-15 02:42:33.json new file mode 100644 index 0000000000000000000000000000000000000000..b63a2fed650a516d0a8918a6a6f40a59649274c2 --- /dev/null +++ b/ENERGY-DRINK-LOVE/komt_DPOv3/result_2024-03-15 02:42:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.514505119453925, + "acc_stderr": 0.014605241081370053, + "acc_norm": 0.5750853242320819, + "acc_norm_stderr": 0.014445698968520776 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5127464648476399, + "acc_stderr": 0.004988159744742496, + "acc_norm": 0.7033459470225055, + "acc_norm_stderr": 0.004558491550673699 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280042, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280042 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6500638569604087, + "acc_stderr": 0.017055679797150426, + "acc_norm": 0.6500638569604087, + "acc_norm_stderr": 0.017055679797150426 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.032436186361081025, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.032436186361081025 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.027466610213140116, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.027466610213140116 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.648854961832061, + "acc_stderr": 0.0418644516301375, + "acc_norm": 0.648854961832061, + "acc_norm_stderr": 0.0418644516301375 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.702020202020202, + "acc_stderr": 0.03258630383836556, + "acc_norm": 0.702020202020202, + "acc_norm_stderr": 0.03258630383836556 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.048786087144669955, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.048786087144669955 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.0316314580755238, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.0316314580755238 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5564102564102564, + "acc_stderr": 0.025189149894764215, + "acc_norm": 0.5564102564102564, + "acc_norm_stderr": 0.025189149894764215 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906276, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906276 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5870967741935483, + "acc_stderr": 0.02800913812540038, + "acc_norm": 0.5870967741935483, + "acc_norm_stderr": 0.02800913812540038 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.027046857630716663, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.027046857630716663 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273958, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273958 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41534391534391535, + "acc_stderr": 0.025379524910778405, + "acc_norm": 0.41534391534391535, + "acc_norm_stderr": 0.025379524910778405 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5486111111111112, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.5486111111111112, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.595679012345679, + "acc_stderr": 0.027306625297327677, + "acc_norm": 0.595679012345679, + "acc_norm_stderr": 0.027306625297327677 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6770642201834862, + "acc_stderr": 0.020048115923415325, + "acc_norm": 0.6770642201834862, + "acc_norm_stderr": 0.020048115923415325 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6143790849673203, + "acc_stderr": 0.02787074527829027, + "acc_norm": 0.6143790849673203, + "acc_norm_stderr": 0.02787074527829027 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.040179012759817494, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.040179012759817494 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5016339869281046, + "acc_stderr": 0.020227726838150124, + "acc_norm": 0.5016339869281046, + "acc_norm_stderr": 0.020227726838150124 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.39106145251396646, + "acc_stderr": 0.016320763763808383, + "acc_norm": 0.39106145251396646, + "acc_norm_stderr": 0.016320763763808383 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.030320243265004137, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.030320243265004137 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6081632653061224, + "acc_stderr": 0.031251275910891656, + "acc_norm": 0.6081632653061224, + "acc_norm_stderr": 0.031251275910891656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4380704041720991, + "acc_stderr": 0.01267190278256764, + "acc_norm": 0.4380704041720991, + "acc_norm_stderr": 0.01267190278256764 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391243, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391243 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5079559363525091, + "mc1_stderr": 0.017501285074551825, + "mc2": 0.6849191804406951, + "mc2_stderr": 0.015690245522535593 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.551357733175915, + "acc_stderr": 0.017099430514725792, + "acc_norm": 0.5631641086186541, + "acc_norm_stderr": 0.017052633559856076 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ENERGY-DRINK-LOVE/komt_DPOv3", + "model_sha": "5ae1a1246cf3533ace0dfb1e18319b3762432ea4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_Open-Hermes_LDCC-SOLAR-10.7B_DPOv3/result_2024-03-08 00:52:09.json b/ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_Open-Hermes_LDCC-SOLAR-10.7B_DPOv3/result_2024-03-08 00:52:09.json new file mode 100644 index 0000000000000000000000000000000000000000..746559a3cb51e5a5e92963539e6819aac99e35b2 --- /dev/null +++ b/ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_Open-Hermes_LDCC-SOLAR-10.7B_DPOv3/result_2024-03-08 00:52:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4513651877133106, + "acc_stderr": 0.014542104569955265, + "acc_norm": 0.5085324232081911, + "acc_norm_stderr": 0.014609263165632175 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46335391356303524, + "acc_stderr": 0.004976361454341332, + "acc_norm": 0.6296554471220872, + "acc_norm_stderr": 0.0048191004568678125 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6549707602339181, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.6549707602339181, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.685823754789272, + "acc_stderr": 0.01659929173588492, + "acc_norm": 0.685823754789272, + "acc_norm_stderr": 0.01659929173588492 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.0424463323835323, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.0424463323835323 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5787781350482315, + "acc_stderr": 0.028043399858210628, + "acc_norm": 0.5787781350482315, + "acc_norm_stderr": 0.028043399858210628 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5650224215246636, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.5650224215246636, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7373737373737373, + "acc_stderr": 0.03135305009533087, + "acc_norm": 0.7373737373737373, + "acc_norm_stderr": 0.03135305009533087 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.03228410626716391, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.03228410626716391 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5461538461538461, + "acc_stderr": 0.025242770987126194, + "acc_norm": 0.5461538461538461, + "acc_norm_stderr": 0.025242770987126194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5935483870967742, + "acc_stderr": 0.027941727346256308, + "acc_norm": 0.5935483870967742, + "acc_norm_stderr": 0.027941727346256308 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.027236013946196673, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.027236013946196673 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6965174129353234, + "acc_stderr": 0.03251006816458618, + "acc_norm": 0.6965174129353234, + "acc_norm_stderr": 0.03251006816458618 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273957, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273957 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5751445086705202, + "acc_stderr": 0.026613350840261733, + "acc_norm": 0.5751445086705202, + "acc_norm_stderr": 0.026613350840261733 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6049382716049383, + "acc_stderr": 0.02720111766692565, + "acc_norm": 0.6049382716049383, + "acc_norm_stderr": 0.02720111766692565 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6787564766839378, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.6787564766839378, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6330275229357798, + "acc_stderr": 0.020664675659520536, + "acc_norm": 0.6330275229357798, + "acc_norm_stderr": 0.020664675659520536 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6045751633986928, + "acc_stderr": 0.027996723180631452, + "acc_norm": 0.6045751633986928, + "acc_norm_stderr": 0.027996723180631452 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7603305785123967, + "acc_stderr": 0.03896878985070417, + "acc_norm": 0.7603305785123967, + "acc_norm_stderr": 0.03896878985070417 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.04026097083296563, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.04026097083296563 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.020200164564804588, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.020200164564804588 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.02878222756134724, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.02878222756134724 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833587, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833587 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538271, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538271 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2, + "acc_stderr": 0.013378001241813072, + "acc_norm": 0.2, + "acc_norm_stderr": 0.013378001241813072 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.0303720158854282, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.0303720158854282 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6, + "acc_stderr": 0.031362502409358936, + "acc_norm": 0.6, + "acc_norm_stderr": 0.031362502409358936 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.02981802474975309, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.02981802474975309 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.378748370273794, + "acc_stderr": 0.012389052105003743, + "acc_norm": 0.378748370273794, + "acc_norm_stderr": 0.012389052105003743 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.4275177008853309, + "mc2_stderr": 0.015353974714185103 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3990554899645809, + "acc_stderr": 0.0168363772928493, + "acc_norm": 0.43919716646989376, + "acc_norm_stderr": 0.0170627757447807 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_Open-Hermes_LDCC-SOLAR-10.7B_DPOv3", + "model_sha": "d4112540ee507d0a4fc61d60c954600c82984058", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_Open-Hermes_LDCC-SOLAR-10.7B_SFT/result_2024-03-04 12:34:24.json b/ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_Open-Hermes_LDCC-SOLAR-10.7B_SFT/result_2024-03-04 12:34:24.json new file mode 100644 index 0000000000000000000000000000000000000000..a8f68f639644a0d777b13b0ddea988bd4f8c657e --- /dev/null +++ b/ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_Open-Hermes_LDCC-SOLAR-10.7B_SFT/result_2024-03-04 12:34:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42662116040955633, + "acc_stderr": 0.014453185592920293, + "acc_norm": 0.4761092150170648, + "acc_norm_stderr": 0.014594701798071654 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44991037641904, + "acc_stderr": 0.0049646798459184365, + "acc_norm": 0.6059549890460068, + "acc_norm_stderr": 0.004876459434619797 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6257309941520468, + "acc_stderr": 0.037116011853894806, + "acc_norm": 0.6257309941520468, + "acc_norm_stderr": 0.037116011853894806 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6717752234993615, + "acc_stderr": 0.01679168564019289, + "acc_norm": 0.6717752234993615, + "acc_norm_stderr": 0.01679168564019289 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532306, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532306 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.032600385118357715, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.032600385118357715 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5819935691318328, + "acc_stderr": 0.028013651891995072, + "acc_norm": 0.5819935691318328, + "acc_norm_stderr": 0.028013651891995072 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.702020202020202, + "acc_stderr": 0.032586303838365555, + "acc_norm": 0.702020202020202, + "acc_norm_stderr": 0.032586303838365555 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.03225294232399639, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.03225294232399639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.02532399086173626, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.02532399086173626 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5806451612903226, + "acc_stderr": 0.028071588901091828, + "acc_norm": 0.5806451612903226, + "acc_norm_stderr": 0.028071588901091828 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.026035386098951292, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.026035386098951292 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851295, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851295 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.04582004841505417, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.04582004841505417 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083032, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083032 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6865671641791045, + "acc_stderr": 0.03280188205348642, + "acc_norm": 0.6865671641791045, + "acc_norm_stderr": 0.03280188205348642 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.02668013476167922, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.02668013476167922 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.027237415094592484, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.027237415094592484 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6839378238341969, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.6839378238341969, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594964, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594964 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.618348623853211, + "acc_stderr": 0.0208281485170226, + "acc_norm": 0.618348623853211, + "acc_norm_stderr": 0.0208281485170226 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.027914055510468, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.027914055510468 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.04026097083296563, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.04026097083296563 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.020200164564804588, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.020200164564804588 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347243, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347243 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2022346368715084, + "acc_stderr": 0.013433729483320986, + "acc_norm": 0.2022346368715084, + "acc_norm_stderr": 0.013433729483320986 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.0303720158854282, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.0303720158854282 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6, + "acc_stderr": 0.031362502409358936, + "acc_norm": 0.6, + "acc_norm_stderr": 0.031362502409358936 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6962025316455697, + "acc_stderr": 0.029936696387138594, + "acc_norm": 0.6962025316455697, + "acc_norm_stderr": 0.029936696387138594 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37809647979139505, + "acc_stderr": 0.012384878406798095, + "acc_norm": 0.37809647979139505, + "acc_norm_stderr": 0.012384878406798095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.03434131164719129, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.03434131164719129 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.037131580674819135, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.037131580674819135 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087298, + "mc2": 0.40370494061094203, + "mc2_stderr": 0.014912269705733993 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.39315230224321135, + "acc_stderr": 0.01679326280128708, + "acc_norm": 0.4498229043683589, + "acc_norm_stderr": 0.01710357334382571 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_Open-Hermes_LDCC-SOLAR-10.7B_SFT", + "model_sha": "8b0d7ec8189c550741754e5a0c6fb830f43e0335", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_deup_LDCC-SOLAR-10.7B_SFT/result_2024-03-07 22:11:58.json b/ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_deup_LDCC-SOLAR-10.7B_SFT/result_2024-03-07 22:11:58.json new file mode 100644 index 0000000000000000000000000000000000000000..705012e89b9b38617967dafce295241d86002c07 --- /dev/null +++ b/ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_deup_LDCC-SOLAR-10.7B_SFT/result_2024-03-07 22:11:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42150170648464164, + "acc_stderr": 0.01443019706932602, + "acc_norm": 0.4761092150170648, + "acc_norm_stderr": 0.014594701798071657 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44682334196375223, + "acc_stderr": 0.00496148138002378, + "acc_norm": 0.6104361680940051, + "acc_norm_stderr": 0.004866547422355566 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6257309941520468, + "acc_stderr": 0.03711601185389481, + "acc_norm": 0.6257309941520468, + "acc_norm_stderr": 0.03711601185389481 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.016857391247472545, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.016857391247472545 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.038913644958358196, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.038913644958358196 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5916398713826366, + "acc_stderr": 0.02791705074848463, + "acc_norm": 0.5916398713826366, + "acc_norm_stderr": 0.02791705074848463 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5964125560538116, + "acc_stderr": 0.032928028193303135, + "acc_norm": 0.5964125560538116, + "acc_norm_stderr": 0.032928028193303135 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6919191919191919, + "acc_stderr": 0.03289477330098615, + "acc_norm": 0.6919191919191919, + "acc_norm_stderr": 0.03289477330098615 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.03228410626716391, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.03228410626716391 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5205128205128206, + "acc_stderr": 0.02532966316348994, + "acc_norm": 0.5205128205128206, + "acc_norm_stderr": 0.02532966316348994 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5548387096774193, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.5548387096774193, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5547169811320755, + "acc_stderr": 0.03058805297427066, + "acc_norm": 0.5547169811320755, + "acc_norm_stderr": 0.03058805297427066 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.032658195885126966, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.032658195885126966 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.02519710107424649, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.02519710107424649 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111502, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111502 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.026817718130348923, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.026817718130348923 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6172839506172839, + "acc_stderr": 0.027044538138402595, + "acc_norm": 0.6172839506172839, + "acc_norm_stderr": 0.027044538138402595 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6580310880829016, + "acc_stderr": 0.03423465100104284, + "acc_norm": 0.6580310880829016, + "acc_norm_stderr": 0.03423465100104284 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.636697247706422, + "acc_stderr": 0.020620603919625804, + "acc_norm": 0.636697247706422, + "acc_norm_stderr": 0.020620603919625804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.028275490156791455, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.028275490156791455 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46895424836601307, + "acc_stderr": 0.020188804456361887, + "acc_norm": 0.46895424836601307, + "acc_norm_stderr": 0.020188804456361887 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.033812000056435254, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.033812000056435254 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2212290502793296, + "acc_stderr": 0.013882164598887288, + "acc_norm": 0.2212290502793296, + "acc_norm_stderr": 0.013882164598887288 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.48161764705882354, + "acc_stderr": 0.030352303395351964, + "acc_norm": 0.48161764705882354, + "acc_norm_stderr": 0.030352303395351964 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6877637130801688, + "acc_stderr": 0.030165137867847018, + "acc_norm": 0.6877637130801688, + "acc_norm_stderr": 0.030165137867847018 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3741851368970013, + "acc_stderr": 0.012359335618172065, + "acc_norm": 0.3741851368970013, + "acc_norm_stderr": 0.012359335618172065 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.014896277441041852, + "mc2": 0.371641156065405, + "mc2_stderr": 0.014765639375053739 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4887839433293979, + "acc_stderr": 0.017186028469489283, + "acc_norm": 0.5336481700118064, + "acc_norm_stderr": 0.017151384117131865 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ENERGY-DRINK-LOVE/leaderboard_inst_v1.3_deup_LDCC-SOLAR-10.7B_SFT", + "model_sha": "145c81e2b96a7a498dae1de112ec0062c812dfc8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ENERGY-DRINK-LOVE/leaderboard_inst_v1.5_LDCC-SOLAR-10.7B_SFT/result_2024-03-04 07:17:52.json b/ENERGY-DRINK-LOVE/leaderboard_inst_v1.5_LDCC-SOLAR-10.7B_SFT/result_2024-03-04 07:17:52.json new file mode 100644 index 0000000000000000000000000000000000000000..c31a938c932525e7dd6b3e67cb895cef602fbc83 --- /dev/null +++ b/ENERGY-DRINK-LOVE/leaderboard_inst_v1.5_LDCC-SOLAR-10.7B_SFT/result_2024-03-04 07:17:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44795221843003413, + "acc_stderr": 0.014532011498211669, + "acc_norm": 0.4931740614334471, + "acc_norm_stderr": 0.014610029151379813 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4473212507468632, + "acc_stderr": 0.004962010338226348, + "acc_norm": 0.5994821748655647, + "acc_norm_stderr": 0.004890019356021089 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6023391812865497, + "acc_stderr": 0.03753638955761691, + "acc_norm": 0.6023391812865497, + "acc_norm_stderr": 0.03753638955761691 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.046561471100123514, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.046561471100123514 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6628352490421456, + "acc_stderr": 0.016905207420803547, + "acc_norm": 0.6628352490421456, + "acc_norm_stderr": 0.016905207420803547 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.502127659574468, + "acc_stderr": 0.03268572658667492, + "acc_norm": 0.502127659574468, + "acc_norm_stderr": 0.03268572658667492 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5916398713826366, + "acc_stderr": 0.027917050748484627, + "acc_norm": 0.5916398713826366, + "acc_norm_stderr": 0.027917050748484627 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7070707070707071, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.7070707070707071, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5462184873949579, + "acc_stderr": 0.032339434681820885, + "acc_norm": 0.5462184873949579, + "acc_norm_stderr": 0.032339434681820885 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.025294608023986455, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.025294608023986455 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969566, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969566 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5774193548387097, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.5774193548387097, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.028286324075564404, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.028286324075564404 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.030437794342983042, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.030437794342983042 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.032941184790540944, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.032941184790540944 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332786, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332786 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6330275229357798, + "acc_stderr": 0.020664675659520536, + "acc_norm": 0.6330275229357798, + "acc_norm_stderr": 0.020664675659520536 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5849673202614379, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.5849673202614379, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.040633027314866725, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.040633027314866725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.02008736207670285, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.02008736207670285 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053479, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053479 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19106145251396647, + "acc_stderr": 0.013148479802450798, + "acc_norm": 0.19106145251396647, + "acc_norm_stderr": 0.013148479802450798 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5367647058823529, + "acc_stderr": 0.030290619180485694, + "acc_norm": 0.5367647058823529, + "acc_norm_stderr": 0.030290619180485694 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5918367346938775, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.5918367346938775, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6835443037974683, + "acc_stderr": 0.030274974880218974, + "acc_norm": 0.6835443037974683, + "acc_norm_stderr": 0.030274974880218974 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35984354628422427, + "acc_stderr": 0.012258260483689805, + "acc_norm": 0.35984354628422427, + "acc_norm_stderr": 0.012258260483689805 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.03364487286088299, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.03364487286088299 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283344, + "mc2": 0.3960929779706412, + "mc2_stderr": 0.014964127725897232 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3140495867768595, + "acc_stderr": 0.01595733243429507, + "acc_norm": 0.3612750885478158, + "acc_norm_stderr": 0.016515463022412007 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ENERGY-DRINK-LOVE/leaderboard_inst_v1.5_LDCC-SOLAR-10.7B_SFT", + "model_sha": "a209a3297068a834c50c3141d8dc56cd78754280", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ENERGY-DRINK-LOVE/nox_DPOv3/result_2024-03-26 01:58:14.json b/ENERGY-DRINK-LOVE/nox_DPOv3/result_2024-03-26 01:58:14.json new file mode 100644 index 0000000000000000000000000000000000000000..4487b3b9a08c12eb3f98a73947e137cc211537e9 --- /dev/null +++ b/ENERGY-DRINK-LOVE/nox_DPOv3/result_2024-03-26 01:58:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7073378839590444, + "acc_stderr": 0.013295916103619425, + "acc_norm": 0.7457337883959044, + "acc_norm_stderr": 0.01272499994515774 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5908185620394344, + "acc_stderr": 0.004906779523192669, + "acc_norm": 0.7459669388568014, + "acc_norm_stderr": 0.004344266179634919 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7309941520467836, + "acc_stderr": 0.0340105262010409, + "acc_norm": 0.7309941520467836, + "acc_norm_stderr": 0.0340105262010409 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7281553398058253, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.7281553398058253, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.698595146871009, + "acc_stderr": 0.016409091097268787, + "acc_norm": 0.698595146871009, + "acc_norm_stderr": 0.016409091097268787 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4978723404255319, + "acc_stderr": 0.03268572658667491, + "acc_norm": 0.4978723404255319, + "acc_norm_stderr": 0.03268572658667491 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.0389136449583582, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.0389136449583582 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6495176848874598, + "acc_stderr": 0.027098652621301747, + "acc_norm": 0.6495176848874598, + "acc_norm_stderr": 0.027098652621301747 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6457399103139013, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.6457399103139013, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7878787878787878, + "acc_stderr": 0.029126522834586815, + "acc_norm": 0.7878787878787878, + "acc_norm_stderr": 0.029126522834586815 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.04122737111370332, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.04122737111370332 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.024321738484602354, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.024321738484602354 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301812, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301812 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4729064039408867, + "acc_stderr": 0.03512819077876106, + "acc_norm": 0.4729064039408867, + "acc_norm_stderr": 0.03512819077876106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6161290322580645, + "acc_stderr": 0.027666182075539638, + "acc_norm": 0.6161290322580645, + "acc_norm_stderr": 0.027666182075539638 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8418803418803419, + "acc_stderr": 0.023902325549560417, + "acc_norm": 0.8418803418803419, + "acc_norm_stderr": 0.023902325549560417 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5660377358490566, + "acc_stderr": 0.030503292013342596, + "acc_norm": 0.5660377358490566, + "acc_norm_stderr": 0.030503292013342596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630882, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630882 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.03958027231121569, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.03958027231121569 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7611940298507462, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.7611940298507462, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4576719576719577, + "acc_stderr": 0.02565886886205832, + "acc_norm": 0.4576719576719577, + "acc_norm_stderr": 0.02565886886205832 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.625, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.625, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932263, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932263 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5982658959537572, + "acc_stderr": 0.026394104177643634, + "acc_norm": 0.5982658959537572, + "acc_norm_stderr": 0.026394104177643634 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6196319018404908, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.6196319018404908, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.744954128440367, + "acc_stderr": 0.018688500856535863, + "acc_norm": 0.744954128440367, + "acc_norm_stderr": 0.018688500856535863 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6209150326797386, + "acc_stderr": 0.027780141207023344, + "acc_norm": 0.6209150326797386, + "acc_norm_stderr": 0.027780141207023344 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6644736842105263, + "acc_stderr": 0.038424985593952674, + "acc_norm": 0.6644736842105263, + "acc_norm_stderr": 0.038424985593952674 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5931372549019608, + "acc_stderr": 0.019873802005061173, + "acc_norm": 0.5931372549019608, + "acc_norm_stderr": 0.019873802005061173 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.42907801418439717, + "acc_stderr": 0.029525914302558562, + "acc_norm": 0.42907801418439717, + "acc_norm_stderr": 0.029525914302558562 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.37206703910614525, + "acc_stderr": 0.016165847583563292, + "acc_norm": 0.37206703910614525, + "acc_norm_stderr": 0.016165847583563292 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5367647058823529, + "acc_stderr": 0.03029061918048569, + "acc_norm": 0.5367647058823529, + "acc_norm_stderr": 0.03029061918048569 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.710204081632653, + "acc_stderr": 0.02904308868330434, + "acc_norm": 0.710204081632653, + "acc_norm_stderr": 0.02904308868330434 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7383966244725738, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.7383966244725738, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.45045632333767927, + "acc_stderr": 0.012707390438502348, + "acc_norm": 0.45045632333767927, + "acc_norm_stderr": 0.012707390438502348 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6813725490196079, + "acc_stderr": 0.03270287181482082, + "acc_norm": 0.6813725490196079, + "acc_norm_stderr": 0.03270287181482082 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165634, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165634 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7172582619339045, + "mc1_stderr": 0.015764770836777298, + "mc2": 0.8192245102794692, + "mc2_stderr": 0.012628467709646754 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5147579693034239, + "acc_stderr": 0.017182864434998564, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.017161563949916348 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ENERGY-DRINK-LOVE/nox_DPOv3", + "model_sha": "b83a769f764d3060a516c3ecbedad554a1922e46", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Easy-Systems/easy-ko-Llama3-8b-Instruct-v1/result_2024-05-16 23:53:42.json b/Easy-Systems/easy-ko-Llama3-8b-Instruct-v1/result_2024-05-16 23:53:42.json new file mode 100644 index 0000000000000000000000000000000000000000..8b717c5df4d476aab5fb234fb7ae7d5dc1210562 --- /dev/null +++ b/Easy-Systems/easy-ko-Llama3-8b-Instruct-v1/result_2024-05-16 23:53:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40955631399317405, + "acc_stderr": 0.014370358632472432, + "acc_norm": 0.4522184300341297, + "acc_norm_stderr": 0.014544519880633825 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3794064927305318, + "acc_stderr": 0.004842476363739972, + "acc_norm": 0.49790878311093406, + "acc_norm_stderr": 0.004989737768749952 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5057471264367817, + "acc_stderr": 0.01787878232612923, + "acc_norm": 0.5057471264367817, + "acc_norm_stderr": 0.01787878232612923 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736128, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736128 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5129032258064516, + "acc_stderr": 0.02843453315268187, + "acc_norm": 0.5129032258064516, + "acc_norm_stderr": 0.02843453315268187 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392923, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808093, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808093 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.02663653974111608, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.02663653974111608 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5761467889908257, + "acc_stderr": 0.021187263209087523, + "acc_norm": 0.5761467889908257, + "acc_norm_stderr": 0.021187263209087523 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.028614624752805434, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.028614624752805434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309172, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309172 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.44281045751633985, + "acc_stderr": 0.020095083154577347, + "acc_norm": 0.44281045751633985, + "acc_norm_stderr": 0.020095083154577347 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265015, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265015 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875193, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875193 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647206, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647206 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2837988826815642, + "acc_stderr": 0.015078358970751757, + "acc_norm": 0.2837988826815642, + "acc_norm_stderr": 0.015078358970751757 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225418, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225418 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36766623207301175, + "acc_stderr": 0.012314845910071705, + "acc_norm": 0.36766623207301175, + "acc_norm_stderr": 0.012314845910071705 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.038154943086889305, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.038154943086889305 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768545, + "mc2": 0.46980069864120316, + "mc2_stderr": 0.01587190224210581 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4911452184179457, + "acc_stderr": 0.01718765819933674, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.017185069732676538 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Easy-Systems/easy-ko-Llama3-8b-Instruct-v1", + "model_sha": "b557406b69518b2ffa38a9eed3963b57ae0294bf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexM-7B-Instruct-v0.1/result_2024-01-04 00:35:22.json b/Edentns/DataVortexM-7B-Instruct-v0.1/result_2024-01-04 00:35:22.json new file mode 100644 index 0000000000000000000000000000000000000000..edc367007ad39c4ffa828e85956be951e93fe29f --- /dev/null +++ b/Edentns/DataVortexM-7B-Instruct-v0.1/result_2024-01-04 00:35:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2901023890784983, + "acc_stderr": 0.013261573677520773, + "acc_norm": 0.3412969283276451, + "acc_norm_stderr": 0.01385583128749772 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33827922724556864, + "acc_stderr": 0.004721571443354458, + "acc_norm": 0.4235212109141605, + "acc_norm_stderr": 0.0049310654341736815 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41890166028097064, + "acc_stderr": 0.017643205052377167, + "acc_norm": 0.41890166028097064, + "acc_norm_stderr": 0.017643205052377167 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.38263665594855306, + "acc_stderr": 0.02760468902858198, + "acc_norm": 0.38263665594855306, + "acc_norm_stderr": 0.02760468902858198 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459157, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459157 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.032284106267163895, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.032284106267163895 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36153846153846153, + "acc_stderr": 0.02435958146539697, + "acc_norm": 0.36153846153846153, + "acc_norm_stderr": 0.02435958146539697 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33004926108374383, + "acc_stderr": 0.03308530426228258, + "acc_norm": 0.33004926108374383, + "acc_norm_stderr": 0.03308530426228258 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.027379871229943252, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.027379871229943252 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.031804252043840985, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.031804252043840985 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37358490566037733, + "acc_stderr": 0.02977308271331988, + "acc_norm": 0.37358490566037733, + "acc_norm_stderr": 0.02977308271331988 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815646, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815646 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736413, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736413 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307695, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307695 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566018, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566018 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.026680134761679214, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.026680134761679214 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005135, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005135 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4018348623853211, + "acc_stderr": 0.02102010617299701, + "acc_norm": 0.4018348623853211, + "acc_norm_stderr": 0.02102010617299701 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.045454545454545456, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.045454545454545456 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.03823428969926606, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.03823428969926606 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32189542483660133, + "acc_stderr": 0.018901015322093095, + "acc_norm": 0.32189542483660133, + "acc_norm_stderr": 0.018901015322093095 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.02872386385328128, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.02872386385328128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.014655780837497722, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.014655780837497722 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841196, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.0325446201076786, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.0325446201076786 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2940026075619296, + "acc_stderr": 0.011636062953698602, + "acc_norm": 0.2940026075619296, + "acc_norm_stderr": 0.011636062953698602 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.02955429260569506, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.02955429260569506 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.01584631510139481, + "mc2": 0.45464957639603176, + "mc2_stderr": 0.015510382231630185 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31995277449822906, + "acc_stderr": 0.01603715384028053, + "acc_norm": 0.3837072018890201, + "acc_norm_stderr": 0.01671892463723183 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexM-7B-Instruct-v0.1", + "model_sha": "8a43dc63c63ee8e03297327e2fc3e040c3997c0c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-dpo-v0.1/result_2024-01-10 04:37:18.json b/Edentns/DataVortexS-10.7B-dpo-v0.1/result_2024-01-10 04:37:18.json new file mode 100644 index 0000000000000000000000000000000000000000..a2006f5a165e20a2df38dacaa54e84f9bb9f7760 --- /dev/null +++ b/Edentns/DataVortexS-10.7B-dpo-v0.1/result_2024-01-10 04:37:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36006825938566556, + "acc_stderr": 0.01402751681458519, + "acc_norm": 0.4786689419795222, + "acc_norm_stderr": 0.014598087973127108 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3886675960963951, + "acc_stderr": 0.004864513262194307, + "acc_norm": 0.5717984465245967, + "acc_norm_stderr": 0.004938068627349492 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6491228070175439, + "acc_stderr": 0.03660298834049163, + "acc_norm": 0.6491228070175439, + "acc_norm_stderr": 0.03660298834049163 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6704980842911877, + "acc_stderr": 0.016808322261740442, + "acc_norm": 0.6704980842911877, + "acc_norm_stderr": 0.016808322261740442 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.15, + "acc_stderr": 0.03588702812826372, + "acc_norm": 0.15, + "acc_norm_stderr": 0.03588702812826372 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5361702127659574, + "acc_stderr": 0.03260038511835771, + "acc_norm": 0.5361702127659574, + "acc_norm_stderr": 0.03260038511835771 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6430868167202572, + "acc_stderr": 0.02721042037593402, + "acc_norm": 0.6430868167202572, + "acc_norm_stderr": 0.02721042037593402 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.03318833286217281, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.03318833286217281 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6641221374045801, + "acc_stderr": 0.04142313771996664, + "acc_norm": 0.6641221374045801, + "acc_norm_stderr": 0.04142313771996664 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7070707070707071, + "acc_stderr": 0.032424979581788166, + "acc_norm": 0.7070707070707071, + "acc_norm_stderr": 0.032424979581788166 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5666666666666667, + "acc_stderr": 0.0251246535258851, + "acc_norm": 0.5666666666666667, + "acc_norm_stderr": 0.0251246535258851 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6064516129032258, + "acc_stderr": 0.02779187875313226, + "acc_norm": 0.6064516129032258, + "acc_norm_stderr": 0.02779187875313226 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.025372139671722933, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.025372139671722933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5660377358490566, + "acc_stderr": 0.03050329201334259, + "acc_norm": 0.5660377358490566, + "acc_norm_stderr": 0.03050329201334259 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.039580272311215706, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.039580272311215706 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7014925373134329, + "acc_stderr": 0.032357437893550445, + "acc_norm": 0.7014925373134329, + "acc_norm_stderr": 0.032357437893550445 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4312169312169312, + "acc_stderr": 0.025506481698138215, + "acc_norm": 0.4312169312169312, + "acc_norm_stderr": 0.025506481698138215 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6242774566473989, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.6242774566473989, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5895061728395061, + "acc_stderr": 0.027371350925124764, + "acc_norm": 0.5895061728395061, + "acc_norm_stderr": 0.027371350925124764 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.032922966391551414, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.032922966391551414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6880733944954128, + "acc_stderr": 0.019862967976707245, + "acc_norm": 0.6880733944954128, + "acc_norm_stderr": 0.019862967976707245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.02807415894760066, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.02807415894760066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7603305785123967, + "acc_stderr": 0.03896878985070417, + "acc_norm": 0.7603305785123967, + "acc_norm_stderr": 0.03896878985070417 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.04026097083296563, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.04026097083296563 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.02022394600507432, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.02022394600507432 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4148936170212766, + "acc_stderr": 0.029392236584612493, + "acc_norm": 0.4148936170212766, + "acc_norm_stderr": 0.029392236584612493 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5694444444444444, + "acc_stderr": 0.03376922151252336, + "acc_norm": 0.5694444444444444, + "acc_norm_stderr": 0.03376922151252336 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.01513160884996376, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.01513160884996376 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.0303720158854282, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.0303720158854282 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6040816326530613, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.6040816326530613, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.42046936114732725, + "acc_stderr": 0.012607654553832705, + "acc_norm": 0.42046936114732725, + "acc_norm_stderr": 0.012607654553832705 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.696078431372549, + "acc_stderr": 0.03228210387037891, + "acc_norm": 0.696078431372549, + "acc_norm_stderr": 0.03228210387037891 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.703030303030303, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.703030303030303, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3488372093023256, + "mc1_stderr": 0.01668441985998688, + "mc2": 0.5363520506790478, + "mc2_stderr": 0.01620621230829838 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48642266824085006, + "acc_stderr": 0.01718401506040145, + "acc_norm": 0.525383707201889, + "acc_norm_stderr": 0.017168187201429246 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-dpo-v0.1", + "model_sha": "76fafe2757d64b083d4f20b46798d9592827aacf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-dpo-v1.0/result_2024-01-19 01:31:49.json b/Edentns/DataVortexS-10.7B-dpo-v1.0/result_2024-01-19 01:31:49.json new file mode 100644 index 0000000000000000000000000000000000000000..723fdd9180907c3f62ad19d688c491497ddee376 --- /dev/null +++ b/Edentns/DataVortexS-10.7B-dpo-v1.0/result_2024-01-19 01:31:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5093856655290102, + "acc_stderr": 0.014608816322065, + "acc_norm": 0.5691126279863481, + "acc_norm_stderr": 0.014471133392642471 + }, + "harness|ko_hellaswag|10": { + "acc": 0.48526190001991637, + "acc_stderr": 0.004987613263678177, + "acc_norm": 0.6581358295160327, + "acc_norm_stderr": 0.004733649274814523 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.672514619883041, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.672514619883041, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6475095785440613, + "acc_stderr": 0.017084150244081376, + "acc_norm": 0.6475095785440613, + "acc_norm_stderr": 0.017084150244081376 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340354, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340354 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5916398713826366, + "acc_stderr": 0.02791705074848463, + "acc_norm": 0.5916398713826366, + "acc_norm_stderr": 0.02791705074848463 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5695067264573991, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.5695067264573991, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985905, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985905 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6722689075630253, + "acc_stderr": 0.03048991141767323, + "acc_norm": 0.6722689075630253, + "acc_norm_stderr": 0.03048991141767323 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5923076923076923, + "acc_stderr": 0.02491524398598785, + "acc_norm": 0.5923076923076923, + "acc_norm_stderr": 0.02491524398598785 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.045245960070300476, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.045245960070300476 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.02804098138076153, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.02804098138076153 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.027236013946196673, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.027236013946196673 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5547169811320755, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.5547169811320755, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02911661760608301, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02911661760608301 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268815, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268815 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4576719576719577, + "acc_stderr": 0.025658868862058325, + "acc_norm": 0.4576719576719577, + "acc_norm_stderr": 0.025658868862058325 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5953757225433526, + "acc_stderr": 0.026424816594009852, + "acc_norm": 0.5953757225433526, + "acc_norm_stderr": 0.026424816594009852 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.02712511551316685, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.02712511551316685 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7202072538860104, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.7202072538860104, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6770642201834862, + "acc_stderr": 0.020048115923415332, + "acc_norm": 0.6770642201834862, + "acc_norm_stderr": 0.020048115923415332 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.04463112720677172, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.04463112720677172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49836601307189543, + "acc_stderr": 0.020227726838150124, + "acc_norm": 0.49836601307189543, + "acc_norm_stderr": 0.020227726838150124 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.029275532159704725, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.029275532159704725 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.03395322726375797, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.03395322726375797 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3039106145251397, + "acc_stderr": 0.015382845587584517, + "acc_norm": 0.3039106145251397, + "acc_norm_stderr": 0.015382845587584517 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5257352941176471, + "acc_stderr": 0.030332578094555033, + "acc_norm": 0.5257352941176471, + "acc_norm_stderr": 0.030332578094555033 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.729957805907173, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.729957805907173, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4028683181225554, + "acc_stderr": 0.012526955577118007, + "acc_norm": 0.4028683181225554, + "acc_norm_stderr": 0.012526955577118007 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6862745098039216, + "acc_stderr": 0.03256685484460388, + "acc_norm": 0.6862745098039216, + "acc_norm_stderr": 0.03256685484460388 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.41982864137086906, + "mc1_stderr": 0.017277030301775766, + "mc2": 0.5876649986857919, + "mc2_stderr": 0.016374992952232537 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5348288075560803, + "acc_stderr": 0.017148598015747422, + "acc_norm": 0.5430932703659976, + "acc_norm_stderr": 0.017126389093086777 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-dpo-v1.0", + "model_sha": "d42328a3af59fe42ef67d45ddf256538fa5d5d0c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-dpo-v1.1/result_2024-01-21 11:53:17.json b/Edentns/DataVortexS-10.7B-dpo-v1.1/result_2024-01-21 11:53:17.json new file mode 100644 index 0000000000000000000000000000000000000000..b3a29aeeefcd80cbaed7b17fa5ab9625690d60a5 --- /dev/null +++ b/Edentns/DataVortexS-10.7B-dpo-v1.1/result_2024-01-21 11:53:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4880546075085324, + "acc_stderr": 0.014607220340597171, + "acc_norm": 0.5435153583617748, + "acc_norm_stderr": 0.01455594976049644 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4584744074885481, + "acc_stderr": 0.0049725431277678695, + "acc_norm": 0.6344353714399522, + "acc_norm_stderr": 0.0048060390390089434 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.695906432748538, + "acc_stderr": 0.03528211258245233, + "acc_norm": 0.695906432748538, + "acc_norm_stderr": 0.03528211258245233 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6934865900383141, + "acc_stderr": 0.01648695289304152, + "acc_norm": 0.6934865900383141, + "acc_norm_stderr": 0.01648695289304152 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6919191919191919, + "acc_stderr": 0.03289477330098615, + "acc_norm": 0.6919191919191919, + "acc_norm_stderr": 0.03289477330098615 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179327, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179327 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.032363611119519416, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.032363611119519416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.558974358974359, + "acc_stderr": 0.02517404838400073, + "acc_norm": 0.558974358974359, + "acc_norm_stderr": 0.02517404838400073 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5935483870967742, + "acc_stderr": 0.027941727346256304, + "acc_norm": 0.5935483870967742, + "acc_norm_stderr": 0.027941727346256304 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.027236013946196697, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.027236013946196697 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6616915422885572, + "acc_stderr": 0.033455630703391914, + "acc_norm": 0.6616915422885572, + "acc_norm_stderr": 0.033455630703391914 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596433, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596433 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.79, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.79, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.026830805998952233, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.026830805998952233 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6632124352331606, + "acc_stderr": 0.03410780251836184, + "acc_norm": 0.6632124352331606, + "acc_norm_stderr": 0.03410780251836184 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6385321100917432, + "acc_stderr": 0.02059808200993737, + "acc_norm": 0.6385321100917432, + "acc_norm_stderr": 0.02059808200993737 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5, + "acc_stderr": 0.028629916715693413, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028629916715693413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.040260970832965634, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.040260970832965634 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.02022394600507429, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.02022394600507429 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293646, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293646 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2223463687150838, + "acc_stderr": 0.013907189208156881, + "acc_norm": 0.2223463687150838, + "acc_norm_stderr": 0.013907189208156881 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569746, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7215189873417721, + "acc_stderr": 0.029178682304842548, + "acc_norm": 0.7215189873417721, + "acc_norm_stderr": 0.029178682304842548 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3617992177314211, + "acc_stderr": 0.012272736233262936, + "acc_norm": 0.3617992177314211, + "acc_norm_stderr": 0.012272736233262936 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.03364487286088298, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.03364487286088298 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.0381549430868893, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.0381549430868893 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.34761321909424725, + "mc1_stderr": 0.016670769188897306, + "mc2": 0.5385013086039373, + "mc2_stderr": 0.015618614265978098 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6103896103896104, + "acc_stderr": 0.016766161671893525, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.01675692157106943 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-dpo-v1.1", + "model_sha": "0c1209f805eebdc65d8c8c71c398bb156f6f8d86", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-dpo-v1.10/result_2024-02-02 00:04:50.json b/Edentns/DataVortexS-10.7B-dpo-v1.10/result_2024-02-02 00:04:50.json new file mode 100644 index 0000000000000000000000000000000000000000..8ae1006ec3a94e4817baf0b93195776092f4d407 --- /dev/null +++ b/Edentns/DataVortexS-10.7B-dpo-v1.10/result_2024-02-02 00:04:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.48208191126279865, + "acc_stderr": 0.014602005585490978, + "acc_norm": 0.5426621160409556, + "acc_norm_stderr": 0.014558106543924075 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45817566221868156, + "acc_stderr": 0.004972293764978729, + "acc_norm": 0.6316470822545309, + "acc_norm_stderr": 0.004813719952829978 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6608187134502924, + "acc_stderr": 0.03631053496488905, + "acc_norm": 0.6608187134502924, + "acc_norm_stderr": 0.03631053496488905 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.0458212416016155, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.0458212416016155 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6794380587484036, + "acc_stderr": 0.016688893310803764, + "acc_norm": 0.6794380587484036, + "acc_norm_stderr": 0.016688893310803764 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789958, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789958 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5980707395498392, + "acc_stderr": 0.027846476005930477, + "acc_norm": 0.5980707395498392, + "acc_norm_stderr": 0.027846476005930477 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6565656565656566, + "acc_stderr": 0.03383201223244442, + "acc_norm": 0.6565656565656566, + "acc_norm_stderr": 0.03383201223244442 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752173, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752173 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5358974358974359, + "acc_stderr": 0.025285585990017862, + "acc_norm": 0.5358974358974359, + "acc_norm_stderr": 0.025285585990017862 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.567741935483871, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.567741935483871, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5509433962264151, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.5509433962264151, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251983, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251983 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719197, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719197 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.03400598505599015, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.03400598505599015 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04174752578923183, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04174752578923183 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6476683937823834, + "acc_stderr": 0.034474782864143565, + "acc_norm": 0.6476683937823834, + "acc_norm_stderr": 0.034474782864143565 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6385321100917432, + "acc_stderr": 0.020598082009937378, + "acc_norm": 0.6385321100917432, + "acc_norm_stderr": 0.020598082009937378 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.040633027314866725, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.040633027314866725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.02018014484330729, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.02018014484330729 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966727, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966727 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966342, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966342 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7046413502109705, + "acc_stderr": 0.02969633871342288, + "acc_norm": 0.7046413502109705, + "acc_norm_stderr": 0.02969633871342288 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35528031290743156, + "acc_stderr": 0.01222362336404404, + "acc_norm": 0.35528031290743156, + "acc_norm_stderr": 0.01222362336404404 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.0381549430868893, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.0381549430868893 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.36474908200734396, + "mc1_stderr": 0.016850961061720113, + "mc2": 0.5508439175836903, + "mc2_stderr": 0.015795400382869848 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.58913813459268, + "acc_stderr": 0.01691497276784105, + "acc_norm": 0.5914994096812278, + "acc_norm_stderr": 0.016900062879427122 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-dpo-v1.10", + "model_sha": "e02900b2f1209ccb6b04d8428a467677d99e0550", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-dpo-v1.11/result_2024-02-02 00:07:20.json b/Edentns/DataVortexS-10.7B-dpo-v1.11/result_2024-02-02 00:07:20.json new file mode 100644 index 0000000000000000000000000000000000000000..59a6cd3a32f3085bf9670e3d1c9f82dbf596d620 --- /dev/null +++ b/Edentns/DataVortexS-10.7B-dpo-v1.11/result_2024-02-02 00:07:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5, + "acc_stderr": 0.014611390804670088, + "acc_norm": 0.5597269624573379, + "acc_norm_stderr": 0.014506769524804251 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4976100378410675, + "acc_stderr": 0.004989724408664502, + "acc_norm": 0.6868153754232225, + "acc_norm_stderr": 0.004628409084218777 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6023391812865497, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.6023391812865497, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6538952745849298, + "acc_stderr": 0.017011965266412077, + "acc_norm": 0.6538952745849298, + "acc_norm_stderr": 0.017011965266412077 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6109324758842444, + "acc_stderr": 0.027690337536485376, + "acc_norm": 0.6109324758842444, + "acc_norm_stderr": 0.027690337536485376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5695067264573991, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.5695067264573991, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7070707070707071, + "acc_stderr": 0.032424979581788166, + "acc_norm": 0.7070707070707071, + "acc_norm_stderr": 0.032424979581788166 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.048580835742663454, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.048580835742663454 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.03142946637883708, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.03142946637883708 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5666666666666667, + "acc_stderr": 0.0251246535258851, + "acc_norm": 0.5666666666666667, + "acc_norm_stderr": 0.0251246535258851 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5870967741935483, + "acc_stderr": 0.028009138125400384, + "acc_norm": 0.5870967741935483, + "acc_norm_stderr": 0.028009138125400384 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.028286324075564424, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.028286324075564424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948482, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948482 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.025424835086924003, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.025424835086924003 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5864197530864198, + "acc_stderr": 0.027402042040269966, + "acc_norm": 0.5864197530864198, + "acc_norm_stderr": 0.027402042040269966 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.032922966391551414, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.032922966391551414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6770642201834862, + "acc_stderr": 0.02004811592341532, + "acc_norm": 0.6770642201834862, + "acc_norm_stderr": 0.02004811592341532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.028332397483664278, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.028332397483664278 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.040089737857792046, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.040089737857792046 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.020223946005074312, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.020223946005074312 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.03409386946992699, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.03409386946992699 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.4, + "acc_stderr": 0.016384638410380823, + "acc_norm": 0.4, + "acc_norm_stderr": 0.016384638410380823 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4889705882352941, + "acc_stderr": 0.030365446477275668, + "acc_norm": 0.4889705882352941, + "acc_norm_stderr": 0.030365446477275668 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5918367346938775, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.5918367346938775, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7426160337552743, + "acc_stderr": 0.02845882099146031, + "acc_norm": 0.7426160337552743, + "acc_norm_stderr": 0.02845882099146031 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.43741851368970014, + "acc_stderr": 0.012669813464935719, + "acc_norm": 0.43741851368970014, + "acc_norm_stderr": 0.012669813464935719 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.03393388584958406, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.03393388584958406 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03681050869161549, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03681050869161549 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.47980416156670747, + "mc1_stderr": 0.017489216849737043, + "mc2": 0.6674146470053609, + "mc2_stderr": 0.01573182499922601 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5312868949232585, + "acc_stderr": 0.017156666859785456, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.0171427361176433 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-dpo-v1.11", + "model_sha": "7cde644083fce16dcd1666c536a4dd1438a3c0a3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-dpo-v1.12/result_2024-02-02 00:04:10.json b/Edentns/DataVortexS-10.7B-dpo-v1.12/result_2024-02-02 00:04:10.json new file mode 100644 index 0000000000000000000000000000000000000000..80ecd26d14a3357b3991a8b3ede1e6409f11e412 --- /dev/null +++ b/Edentns/DataVortexS-10.7B-dpo-v1.12/result_2024-02-02 00:04:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5042662116040956, + "acc_stderr": 0.014610858923956959, + "acc_norm": 0.5443686006825939, + "acc_norm_stderr": 0.01455374993930687 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4971121290579566, + "acc_stderr": 0.004989698183207843, + "acc_norm": 0.6720772754431388, + "acc_norm_stderr": 0.00468497069690295 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6257309941520468, + "acc_stderr": 0.03711601185389482, + "acc_norm": 0.6257309941520468, + "acc_norm_stderr": 0.03711601185389482 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280042, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280042 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6641123882503193, + "acc_stderr": 0.016889407235171683, + "acc_norm": 0.6641123882503193, + "acc_norm_stderr": 0.016889407235171683 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.03257901482099836, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.03257901482099836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333045, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333045 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5852090032154341, + "acc_stderr": 0.02798268045975956, + "acc_norm": 0.5852090032154341, + "acc_norm_stderr": 0.02798268045975956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5964125560538116, + "acc_stderr": 0.032928028193303135, + "acc_norm": 0.5964125560538116, + "acc_norm_stderr": 0.032928028193303135 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7121212121212122, + "acc_stderr": 0.03225883512300992, + "acc_norm": 0.7121212121212122, + "acc_norm_stderr": 0.03225883512300992 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.047551296160629475, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.047551296160629475 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5820512820512821, + "acc_stderr": 0.025007329882461207, + "acc_norm": 0.5820512820512821, + "acc_norm_stderr": 0.025007329882461207 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.567741935483871, + "acc_stderr": 0.0281817397200194, + "acc_norm": 0.567741935483871, + "acc_norm_stderr": 0.0281817397200194 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.026853450377009144, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.026853450377009144 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5584905660377358, + "acc_stderr": 0.030561590426731837, + "acc_norm": 0.5584905660377358, + "acc_norm_stderr": 0.030561590426731837 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731573, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731573 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.02918571494985741, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.02918571494985741 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518026, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518026 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.47619047619047616, + "acc_stderr": 0.02572209706438851, + "acc_norm": 0.47619047619047616, + "acc_norm_stderr": 0.02572209706438851 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.026720034380514998, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.026720034380514998 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6234567901234568, + "acc_stderr": 0.026959344518747784, + "acc_norm": 0.6234567901234568, + "acc_norm_stderr": 0.026959344518747784 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.03308818594415749, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.03308818594415749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6880733944954128, + "acc_stderr": 0.019862967976707245, + "acc_norm": 0.6880733944954128, + "acc_norm_stderr": 0.019862967976707245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.48412698412698413, + "acc_stderr": 0.04469881854072606, + "acc_norm": 0.48412698412698413, + "acc_norm_stderr": 0.04469881854072606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.028180596328259287, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.028180596328259287 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.625, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.625, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.020227402794434864, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.020227402794434864 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41134751773049644, + "acc_stderr": 0.029354911159940975, + "acc_norm": 0.41134751773049644, + "acc_norm_stderr": 0.029354911159940975 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.046695106638751926, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.046695106638751926 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.32513966480446926, + "acc_stderr": 0.015666542785053566, + "acc_norm": 0.32513966480446926, + "acc_norm_stderr": 0.015666542785053566 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4742647058823529, + "acc_stderr": 0.03033257809455504, + "acc_norm": 0.4742647058823529, + "acc_norm_stderr": 0.03033257809455504 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5387755102040817, + "acc_stderr": 0.031912820526692774, + "acc_norm": 0.5387755102040817, + "acc_norm_stderr": 0.031912820526692774 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.8059071729957806, + "acc_stderr": 0.02574490253229094, + "acc_norm": 0.8059071729957806, + "acc_norm_stderr": 0.02574490253229094 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4028683181225554, + "acc_stderr": 0.012526955577118007, + "acc_norm": 0.4028683181225554, + "acc_norm_stderr": 0.012526955577118007 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6862745098039216, + "acc_stderr": 0.03256685484460387, + "acc_norm": 0.6862745098039216, + "acc_norm_stderr": 0.03256685484460387 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4394124847001224, + "mc1_stderr": 0.01737452048251371, + "mc2": 0.6187514885795848, + "mc2_stderr": 0.01610052121364447 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4982290436835891, + "acc_stderr": 0.017190246276231863, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.017189767032130817 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-dpo-v1.12", + "model_sha": "31e67196838657abed64641859c0a95db65bba95", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-dpo-v1.2/result_2024-01-25 03:32:56.json b/Edentns/DataVortexS-10.7B-dpo-v1.2/result_2024-01-25 03:32:56.json new file mode 100644 index 0000000000000000000000000000000000000000..ad099a39481a0a1bbb5fe2ffad6e475ed283f7cb --- /dev/null +++ b/Edentns/DataVortexS-10.7B-dpo-v1.2/result_2024-01-25 03:32:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4786689419795222, + "acc_stderr": 0.014598087973127108, + "acc_norm": 0.5273037542662116, + "acc_norm_stderr": 0.014589589101986001 + }, + "harness|ko_hellaswag|10": { + "acc": 0.47739494124676357, + "acc_stderr": 0.004984679359375627, + "acc_norm": 0.6482772356104362, + "acc_norm_stderr": 0.0047653207849021086 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036155076303109344, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036155076303109344 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041696, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6206896551724138, + "acc_stderr": 0.01735126811754445, + "acc_norm": 0.6206896551724138, + "acc_norm_stderr": 0.01735126811754445 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.572347266881029, + "acc_stderr": 0.02809924077580957, + "acc_norm": 0.572347266881029, + "acc_norm_stderr": 0.02809924077580957 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.547085201793722, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.547085201793722, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.03154449888270286, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.03154449888270286 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6680672268907563, + "acc_stderr": 0.03058869701378364, + "acc_norm": 0.6680672268907563, + "acc_norm_stderr": 0.03058869701378364 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5871794871794872, + "acc_stderr": 0.024962683564331793, + "acc_norm": 0.5871794871794872, + "acc_norm_stderr": 0.024962683564331793 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5645161290322581, + "acc_stderr": 0.028206225591502734, + "acc_norm": 0.5645161290322581, + "acc_norm_stderr": 0.028206225591502734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.027046857630716663, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.027046857630716663 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.029723278961476664, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.029723278961476664 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273958, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273958 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.02563425811555496, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.02563425811555496 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5809248554913294, + "acc_stderr": 0.02656417811142263, + "acc_norm": 0.5809248554913294, + "acc_norm_stderr": 0.02656417811142263 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6172839506172839, + "acc_stderr": 0.027044538138402602, + "acc_norm": 0.6172839506172839, + "acc_norm_stderr": 0.027044538138402602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7409326424870466, + "acc_stderr": 0.03161877917935411, + "acc_norm": 0.7409326424870466, + "acc_norm_stderr": 0.03161877917935411 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.046306532033665956, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.046306532033665956 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.671559633027523, + "acc_stderr": 0.02013590279729841, + "acc_norm": 0.671559633027523, + "acc_norm_stderr": 0.02013590279729841 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.48412698412698413, + "acc_stderr": 0.04469881854072606, + "acc_norm": 0.48412698412698413, + "acc_norm_stderr": 0.04469881854072606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.02835895631342355, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.02835895631342355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49836601307189543, + "acc_stderr": 0.020227726838150117, + "acc_norm": 0.49836601307189543, + "acc_norm_stderr": 0.020227726838150117 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.02899908090480619, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.02899908090480619 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.014655780837497717, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.014655780837497717 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03035969707904611, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03035969707904611 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.02782078198114968, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.02782078198114968 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.42046936114732725, + "acc_stderr": 0.012607654553832707, + "acc_norm": 0.42046936114732725, + "acc_norm_stderr": 0.012607654553832707 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6862745098039216, + "acc_stderr": 0.03256685484460388, + "acc_norm": 0.6862745098039216, + "acc_norm_stderr": 0.03256685484460388 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.035886248000917095, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.035886248000917095 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.41003671970624234, + "mc1_stderr": 0.017217844717449318, + "mc2": 0.583646136539335, + "mc2_stderr": 0.016204158336690234 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5242030696576151, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.017142736117643304 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-dpo-v1.2", + "model_sha": "4620f8491ea4cc4b2d47f41ee7a6f32a6b1bfa01", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-dpo-v1.3/result_2024-01-26 00:10:47.json b/Edentns/DataVortexS-10.7B-dpo-v1.3/result_2024-01-26 00:10:47.json new file mode 100644 index 0000000000000000000000000000000000000000..ecf051444644ade83cfce67bcf9438b9d2774b5a --- /dev/null +++ b/Edentns/DataVortexS-10.7B-dpo-v1.3/result_2024-01-26 00:10:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.48378839590443684, + "acc_stderr": 0.014603708567414948, + "acc_norm": 0.5298634812286689, + "acc_norm_stderr": 0.014585305840007102 + }, + "harness|ko_hellaswag|10": { + "acc": 0.47161919936267677, + "acc_stderr": 0.004981736689518744, + "acc_norm": 0.6479784903405696, + "acc_norm_stderr": 0.00476624553960663 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6641123882503193, + "acc_stderr": 0.016889407235171683, + "acc_norm": 0.6641123882503193, + "acc_norm_stderr": 0.016889407235171683 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.502127659574468, + "acc_stderr": 0.03268572658667493, + "acc_norm": 0.502127659574468, + "acc_norm_stderr": 0.03268572658667493 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6495176848874598, + "acc_stderr": 0.027098652621301747, + "acc_norm": 0.6495176848874598, + "acc_norm_stderr": 0.027098652621301747 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6412556053811659, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.6412556053811659, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6793893129770993, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.6793893129770993, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.03154449888270286, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.03154449888270286 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5282051282051282, + "acc_stderr": 0.025310639254933855, + "acc_norm": 0.5282051282051282, + "acc_norm_stderr": 0.025310639254933855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5709677419354838, + "acc_stderr": 0.028156036538233193, + "acc_norm": 0.5709677419354838, + "acc_norm_stderr": 0.028156036538233193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.02665569965392276, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.02665569965392276 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5509433962264151, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.5509433962264151, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.04582004841505418, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.04582004841505418 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137602, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932262, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932262 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.630057803468208, + "acc_stderr": 0.02599247202930639, + "acc_norm": 0.630057803468208, + "acc_norm_stderr": 0.02599247202930639 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.02700252103451647, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.02700252103451647 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.694300518134715, + "acc_stderr": 0.033248379397581594, + "acc_norm": 0.694300518134715, + "acc_norm_stderr": 0.033248379397581594 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.671559633027523, + "acc_stderr": 0.020135902797298395, + "acc_norm": 0.671559633027523, + "acc_norm_stderr": 0.020135902797298395 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6013071895424836, + "acc_stderr": 0.028036092273891765, + "acc_norm": 0.6013071895424836, + "acc_norm_stderr": 0.028036092273891765 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5375816993464052, + "acc_stderr": 0.02017061497496977, + "acc_norm": 0.5375816993464052, + "acc_norm_stderr": 0.02017061497496977 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.029275532159704732, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.029275532159704732 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767864, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767864 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4632352941176471, + "acc_stderr": 0.030290619180485687, + "acc_norm": 0.4632352941176471, + "acc_norm_stderr": 0.030290619180485687 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.7142857142857143, + "acc_stderr": 0.028920583220675568, + "acc_norm": 0.7142857142857143, + "acc_norm_stderr": 0.028920583220675568 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036423, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036423 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3956975228161669, + "acc_stderr": 0.012489290735449018, + "acc_norm": 0.3956975228161669, + "acc_norm_stderr": 0.012489290735449018 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7352941176470589, + "acc_stderr": 0.03096451792692341, + "acc_norm": 0.7352941176470589, + "acc_norm_stderr": 0.03096451792692341 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7151515151515152, + "acc_stderr": 0.0352439084451178, + "acc_norm": 0.7151515151515152, + "acc_norm_stderr": 0.0352439084451178 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.379436964504284, + "mc1_stderr": 0.016987039266142995, + "mc2": 0.5387447145181132, + "mc2_stderr": 0.01572553347000412 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6044864226682408, + "acc_stderr": 0.016810815902206042, + "acc_norm": 0.6174734356552538, + "acc_norm_stderr": 0.01670916538722883 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-dpo-v1.3", + "model_sha": "d09ba5c290b169397b8b4939fed230605c17d38b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-dpo-v1.4/result_2024-01-26 00:59:33.json b/Edentns/DataVortexS-10.7B-dpo-v1.4/result_2024-01-26 00:59:33.json new file mode 100644 index 0000000000000000000000000000000000000000..e22f4d0b3cc61cf1e167ddd9085de66892e2599e --- /dev/null +++ b/Edentns/DataVortexS-10.7B-dpo-v1.4/result_2024-01-26 00:59:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4598976109215017, + "acc_stderr": 0.01456431885692485, + "acc_norm": 0.5204778156996587, + "acc_norm_stderr": 0.014599131353035004 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4569806811392153, + "acc_stderr": 0.004971278309204199, + "acc_norm": 0.6292571200955985, + "acc_norm_stderr": 0.004820166002253078 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.665389527458493, + "acc_stderr": 0.016873468641592157, + "acc_norm": 0.665389527458493, + "acc_norm_stderr": 0.016873468641592157 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6109324758842444, + "acc_stderr": 0.027690337536485376, + "acc_norm": 0.6109324758842444, + "acc_norm_stderr": 0.027690337536485376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6367713004484304, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.6367713004484304, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6564885496183206, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.6564885496183206, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.03154449888270285, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.03154449888270285 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5672268907563025, + "acc_stderr": 0.032183581077426124, + "acc_norm": 0.5672268907563025, + "acc_norm_stderr": 0.032183581077426124 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.02535100632816969, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02535100632816969 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5548387096774193, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.5548387096774193, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.026035386098951292, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.026035386098951292 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5433962264150943, + "acc_stderr": 0.030656748696739435, + "acc_norm": 0.5433962264150943, + "acc_norm_stderr": 0.030656748696739435 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066482, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066482 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.03096590312357303, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.03096590312357303 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.025075981767601688, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.025075981767601688 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.04166666666666666, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.04166666666666666 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6242774566473989, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.6242774566473989, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6327160493827161, + "acc_stderr": 0.026822801759507898, + "acc_norm": 0.6327160493827161, + "acc_norm_stderr": 0.026822801759507898 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.694300518134715, + "acc_stderr": 0.03324837939758159, + "acc_norm": 0.694300518134715, + "acc_norm_stderr": 0.03324837939758159 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583704, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583704 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6623853211009174, + "acc_stderr": 0.020275265986638903, + "acc_norm": 0.6623853211009174, + "acc_norm_stderr": 0.020275265986638903 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.02827549015679145, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.02827549015679145 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5592105263157895, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.5592105263157895, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.020200164564804588, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.020200164564804588 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.02904919034254345, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.02904919034254345 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.033851779760448106, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.033851779760448106 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21340782122905028, + "acc_stderr": 0.013702859932196094, + "acc_norm": 0.21340782122905028, + "acc_norm_stderr": 0.013702859932196094 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121596, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6408163265306123, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.6408163265306123, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7468354430379747, + "acc_stderr": 0.028304657943035303, + "acc_norm": 0.7468354430379747, + "acc_norm_stderr": 0.028304657943035303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38657105606258146, + "acc_stderr": 0.012437288868088727, + "acc_norm": 0.38657105606258146, + "acc_norm_stderr": 0.012437288868088727 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6911764705882353, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.6911764705882353, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.703030303030303, + "acc_stderr": 0.03567969772268047, + "acc_norm": 0.703030303030303, + "acc_norm_stderr": 0.03567969772268047 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3488372093023256, + "mc1_stderr": 0.016684419859986865, + "mc2": 0.504220057699691, + "mc2_stderr": 0.015580354868697804 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4887839433293979, + "acc_stderr": 0.017186028469489287, + "acc_norm": 0.500590318772137, + "acc_norm_stderr": 0.017190342123448586 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-dpo-v1.4", + "model_sha": "5e53c971e2dd66918e597b865d6d64bbd8bd9d35", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-dpo-v1.5/result_2024-01-28 09:25:57.json b/Edentns/DataVortexS-10.7B-dpo-v1.5/result_2024-01-28 09:25:57.json new file mode 100644 index 0000000000000000000000000000000000000000..236683c55384054d4c32f9a7314ae5d6f82b8bf3 --- /dev/null +++ b/Edentns/DataVortexS-10.7B-dpo-v1.5/result_2024-01-28 09:25:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46757679180887374, + "acc_stderr": 0.014580637569995421, + "acc_norm": 0.5213310580204779, + "acc_norm_stderr": 0.014598087973127104 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44692292372037445, + "acc_stderr": 0.004961587574275634, + "acc_norm": 0.6127265484963155, + "acc_norm_stderr": 0.0048613146132868365 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.665389527458493, + "acc_stderr": 0.01687346864159216, + "acc_norm": 0.665389527458493, + "acc_norm_stderr": 0.01687346864159216 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5106382978723404, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.5106382978723404, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.0389136449583582, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.0389136449583582 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5884244372990354, + "acc_stderr": 0.02795048149440127, + "acc_norm": 0.5884244372990354, + "acc_norm_stderr": 0.02795048149440127 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6053811659192825, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.6053811659192825, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.043171711948702556, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.043171711948702556 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6554621848739496, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.6554621848739496, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5717948717948718, + "acc_stderr": 0.025088301454694827, + "acc_norm": 0.5717948717948718, + "acc_norm_stderr": 0.025088301454694827 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5967741935483871, + "acc_stderr": 0.027906150826041143, + "acc_norm": 0.5967741935483871, + "acc_norm_stderr": 0.027906150826041143 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.811965811965812, + "acc_stderr": 0.025598193686652247, + "acc_norm": 0.811965811965812, + "acc_norm_stderr": 0.025598193686652247 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.569811320754717, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.569811320754717, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083018, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273958, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273958 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.025634258115554965, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.025634258115554965 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6327160493827161, + "acc_stderr": 0.026822801759507894, + "acc_norm": 0.6327160493827161, + "acc_norm_stderr": 0.026822801759507894 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7357512953367875, + "acc_stderr": 0.03182155050916647, + "acc_norm": 0.7357512953367875, + "acc_norm_stderr": 0.03182155050916647 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.5175438596491229, + "acc_stderr": 0.04700708033551038, + "acc_norm": 0.5175438596491229, + "acc_norm_stderr": 0.04700708033551038 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6678899082568808, + "acc_stderr": 0.02019268298542334, + "acc_norm": 0.6678899082568808, + "acc_norm_stderr": 0.02019268298542334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6052631578947368, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.6052631578947368, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.020220920829626923, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.020220920829626923 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40070921985815605, + "acc_stderr": 0.02923346574557309, + "acc_norm": 0.40070921985815605, + "acc_norm_stderr": 0.02923346574557309 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.046695106638751926, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.046695106638751926 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21340782122905028, + "acc_stderr": 0.013702859932196098, + "acc_norm": 0.21340782122905028, + "acc_norm_stderr": 0.013702859932196098 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.73, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.73, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4742647058823529, + "acc_stderr": 0.03033257809455504, + "acc_norm": 0.4742647058823529, + "acc_norm_stderr": 0.03033257809455504 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036416, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036416 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41590612777053454, + "acc_stderr": 0.012588323850313604, + "acc_norm": 0.41590612777053454, + "acc_norm_stderr": 0.012588323850313604 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7009803921568627, + "acc_stderr": 0.03213325717373618, + "acc_norm": 0.7009803921568627, + "acc_norm_stderr": 0.03213325717373618 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3268053855569155, + "mc1_stderr": 0.016419874731135042, + "mc2": 0.4970578062305813, + "mc2_stderr": 0.015424652603467051 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5702479338842975, + "acc_stderr": 0.017019847535972205, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.016876941165045616 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-dpo-v1.5", + "model_sha": "2dc899f4cad7392c95519aedb56395197399fc12", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-dpo-v1.6/result_2024-01-28 09:26:01.json b/Edentns/DataVortexS-10.7B-dpo-v1.6/result_2024-01-28 09:26:01.json new file mode 100644 index 0000000000000000000000000000000000000000..c36da29fd281ded1959cf3057c5cacbcc315c25f --- /dev/null +++ b/Edentns/DataVortexS-10.7B-dpo-v1.6/result_2024-01-28 09:26:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.48464163822525597, + "acc_stderr": 0.014604496129394908, + "acc_norm": 0.53839590443686, + "acc_norm_stderr": 0.014568245550296358 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4939255128460466, + "acc_stderr": 0.004989413158034799, + "acc_norm": 0.6790479984066919, + "acc_norm_stderr": 0.0046588829290995165 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.038110796698335316, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.038110796698335316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6781609195402298, + "acc_stderr": 0.016706381415057904, + "acc_norm": 0.6781609195402298, + "acc_norm_stderr": 0.016706381415057904 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6077170418006431, + "acc_stderr": 0.027731258647011994, + "acc_norm": 0.6077170418006431, + "acc_norm_stderr": 0.027731258647011994 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5964125560538116, + "acc_stderr": 0.03292802819330314, + "acc_norm": 0.5964125560538116, + "acc_norm_stderr": 0.03292802819330314 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7121212121212122, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.7121212121212122, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.048971049527263666, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.048971049527263666 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5666666666666667, + "acc_stderr": 0.0251246535258851, + "acc_norm": 0.5666666666666667, + "acc_norm_stderr": 0.0251246535258851 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5741935483870968, + "acc_stderr": 0.028129112709165908, + "acc_norm": 0.5741935483870968, + "acc_norm_stderr": 0.028129112709165908 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809446, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809446 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5547169811320755, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.5547169811320755, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.046737523336702384, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.046737523336702384 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.02918571494985741, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.02918571494985741 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4021164021164021, + "acc_stderr": 0.02525303255499769, + "acc_norm": 0.4021164021164021, + "acc_norm_stderr": 0.02525303255499769 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756656, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756656 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5617283950617284, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.5617283950617284, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7150259067357513, + "acc_stderr": 0.032577140777096614, + "acc_norm": 0.7150259067357513, + "acc_norm_stderr": 0.032577140777096614 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.046774730044912, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.046774730044912 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.673394495412844, + "acc_stderr": 0.020106990889937306, + "acc_norm": 0.673394495412844, + "acc_norm_stderr": 0.020106990889937306 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.565359477124183, + "acc_stderr": 0.028384256704883034, + "acc_norm": 0.565359477124183, + "acc_norm_stderr": 0.028384256704883034 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.042943408452120926, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.042943408452120926 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.020227402794434867, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.020227402794434867 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.029097675599463926, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.029097675599463926 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.37988826815642457, + "acc_stderr": 0.016232826818678502, + "acc_norm": 0.37988826815642457, + "acc_norm_stderr": 0.016232826818678502 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.39, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5367647058823529, + "acc_stderr": 0.030290619180485694, + "acc_norm": 0.5367647058823529, + "acc_norm_stderr": 0.030290619180485694 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7468354430379747, + "acc_stderr": 0.028304657943035307, + "acc_norm": 0.7468354430379747, + "acc_norm_stderr": 0.028304657943035307 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.40547588005215124, + "acc_stderr": 0.012539960672377209, + "acc_norm": 0.40547588005215124, + "acc_norm_stderr": 0.012539960672377209 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6225490196078431, + "acc_stderr": 0.03402272044340703, + "acc_norm": 0.6225490196078431, + "acc_norm_stderr": 0.03402272044340703 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.03588624800091708, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.03588624800091708 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.47368421052631576, + "mc1_stderr": 0.017479241161975526, + "mc2": 0.6460211319664573, + "mc2_stderr": 0.016128307064999306 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5430932703659976, + "acc_stderr": 0.01712638909308678, + "acc_norm": 0.5737898465171193, + "acc_norm_stderr": 0.017002122609489252 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-dpo-v1.6", + "model_sha": "c6e33d98405845bd570a434319e443c3f6677a0f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-dpo-v1.7/result_2024-01-29 04:15:16.json b/Edentns/DataVortexS-10.7B-dpo-v1.7/result_2024-01-29 04:15:16.json new file mode 100644 index 0000000000000000000000000000000000000000..59e602d86398b83a2555010da6b60f4661d049b4 --- /dev/null +++ b/Edentns/DataVortexS-10.7B-dpo-v1.7/result_2024-01-29 04:15:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4974402730375427, + "acc_stderr": 0.014611199329843784, + "acc_norm": 0.5554607508532423, + "acc_norm_stderr": 0.01452122640562708 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45937064329814775, + "acc_stderr": 0.004973280417705513, + "acc_norm": 0.6339374626568413, + "acc_norm_stderr": 0.004807423343224586 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.04689765937278135, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.04689765937278135 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.70242656449553, + "acc_stderr": 0.01634911191290943, + "acc_norm": 0.70242656449553, + "acc_norm_stderr": 0.01634911191290943 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033583, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033583 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6767676767676768, + "acc_stderr": 0.033322999210706444, + "acc_norm": 0.6767676767676768, + "acc_norm_stderr": 0.033322999210706444 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5538461538461539, + "acc_stderr": 0.025203571773028323, + "acc_norm": 0.5538461538461539, + "acc_norm_stderr": 0.025203571773028323 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6129032258064516, + "acc_stderr": 0.027709359675032495, + "acc_norm": 0.6129032258064516, + "acc_norm_stderr": 0.027709359675032495 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.03070948699255654, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.03070948699255654 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3814814814814815, + "acc_stderr": 0.029616718927497596, + "acc_norm": 0.3814814814814815, + "acc_norm_stderr": 0.029616718927497596 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.4105960264900662, + "acc_stderr": 0.04016689594849929, + "acc_norm": 0.4105960264900662, + "acc_norm_stderr": 0.04016689594849929 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054096, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054096 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.037940126746970296, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.037940126746970296 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.02475747390275206, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.02475747390275206 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.026756255129663765, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.026756255129663765 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6580310880829016, + "acc_stderr": 0.03423465100104283, + "acc_norm": 0.6580310880829016, + "acc_norm_stderr": 0.03423465100104283 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958215, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958215 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.636697247706422, + "acc_stderr": 0.020620603919625807, + "acc_norm": 0.636697247706422, + "acc_norm_stderr": 0.020620603919625807 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.02858034106513829, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.02858034106513829 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.040179012759817494, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.040179012759817494 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46895424836601307, + "acc_stderr": 0.020188804456361887, + "acc_norm": 0.46895424836601307, + "acc_norm_stderr": 0.020188804456361887 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.0280459469420424, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.0280459469420424 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696044, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2346368715083799, + "acc_stderr": 0.014173044098303675, + "acc_norm": 0.2346368715083799, + "acc_norm_stderr": 0.014173044098303675 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003466, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003466 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5877551020408164, + "acc_stderr": 0.031512360446742695, + "acc_norm": 0.5877551020408164, + "acc_norm_stderr": 0.031512360446742695 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7046413502109705, + "acc_stderr": 0.029696338713422882, + "acc_norm": 0.7046413502109705, + "acc_norm_stderr": 0.029696338713422882 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35658409387222945, + "acc_stderr": 0.01223364298927389, + "acc_norm": 0.35658409387222945, + "acc_norm_stderr": 0.01223364298927389 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.03354092437591519, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.03354092437591519 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165633, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165633 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31701346389228885, + "mc1_stderr": 0.016289203374403392, + "mc2": 0.48234441684091955, + "mc2_stderr": 0.015365663323313775 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5938606847697757, + "acc_stderr": 0.016884749503191396, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.016819438642971404 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-dpo-v1.7", + "model_sha": "85af13a7e6002cee79c1b0be9cd0c93fd18d723e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-dpo-v1.8/result_2024-02-02 00:11:00.json b/Edentns/DataVortexS-10.7B-dpo-v1.8/result_2024-02-02 00:11:00.json new file mode 100644 index 0000000000000000000000000000000000000000..e8363e0d949999e6a2a8ce2e413a92db9a0e2f1a --- /dev/null +++ b/Edentns/DataVortexS-10.7B-dpo-v1.8/result_2024-02-02 00:11:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.48378839590443684, + "acc_stderr": 0.014603708567414947, + "acc_norm": 0.5255972696245734, + "acc_norm_stderr": 0.014592230885298962 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4849631547500498, + "acc_stderr": 0.004987524454849698, + "acc_norm": 0.6667994423421629, + "acc_norm_stderr": 0.004703942346762255 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6081871345029239, + "acc_stderr": 0.037439798259263996, + "acc_norm": 0.6081871345029239, + "acc_norm_stderr": 0.037439798259263996 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6309067688378033, + "acc_stderr": 0.01725628310912463, + "acc_norm": 0.6309067688378033, + "acc_norm_stderr": 0.01725628310912463 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742401, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742401 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.032436186361081025, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.032436186361081025 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6868686868686869, + "acc_stderr": 0.033042050878136525, + "acc_norm": 0.6868686868686869, + "acc_norm_stderr": 0.033042050878136525 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201943, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201943 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.03163145807552378, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.03163145807552378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5282051282051282, + "acc_stderr": 0.02531063925493386, + "acc_norm": 0.5282051282051282, + "acc_norm_stderr": 0.02531063925493386 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.047323326159788126, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.047323326159788126 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.567741935483871, + "acc_stderr": 0.028181739720019406, + "acc_norm": 0.567741935483871, + "acc_norm_stderr": 0.028181739720019406 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.03074634997572347, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.03074634997572347 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857406, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857406 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.0343751933733825, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.0343751933733825 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.025634258115554965, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.025634258115554965 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206184, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206184 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5617283950617284, + "acc_stderr": 0.027607914087400477, + "acc_norm": 0.5617283950617284, + "acc_norm_stderr": 0.027607914087400477 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6839378238341969, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.6839378238341969, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.02070745816435298, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.02070745816435298 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768176, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768176 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.02856869975222587, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.02856869975222587 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.020217030653186453, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.020217030653186453 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.028893955412115886, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.028893955412115886 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833585, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833585 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.03408655867977749, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.03408655867977749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.01513160884996376, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.01513160884996376 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.49264705882352944, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.49264705882352944, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036416, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036416 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4061277705345502, + "acc_stderr": 0.012543154588412927, + "acc_norm": 0.4061277705345502, + "acc_norm_stderr": 0.012543154588412927 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6323529411764706, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.6323529411764706, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.03646204963253812, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.03646204963253812 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4112607099143207, + "mc1_stderr": 0.017225627083660856, + "mc2": 0.5927070170654897, + "mc2_stderr": 0.0162035946763131 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5962219598583235, + "acc_stderr": 0.016869031540298625, + "acc_norm": 0.6103896103896104, + "acc_norm_stderr": 0.0167661616718935 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-dpo-v1.8", + "model_sha": "91c4b182d2c6d514b5f0205001e7ca4e37cfbe60", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-dpo-v1.9/result_2024-02-02 00:11:03.json b/Edentns/DataVortexS-10.7B-dpo-v1.9/result_2024-02-02 00:11:03.json new file mode 100644 index 0000000000000000000000000000000000000000..2c397c3eda20e656d0f493e408ffef01878b4f4f --- /dev/null +++ b/Edentns/DataVortexS-10.7B-dpo-v1.9/result_2024-02-02 00:11:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4718430034129693, + "acc_stderr": 0.0145882041051022, + "acc_norm": 0.5332764505119454, + "acc_norm_stderr": 0.01457899585960581 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4615614419438359, + "acc_stderr": 0.004975014529648634, + "acc_norm": 0.6256721768571998, + "acc_norm_stderr": 0.004829598101635782 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6608187134502924, + "acc_stderr": 0.03631053496488905, + "acc_norm": 0.6608187134502924, + "acc_norm_stderr": 0.03631053496488905 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280041, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280041 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7011494252873564, + "acc_stderr": 0.016369256815093113, + "acc_norm": 0.7011494252873564, + "acc_norm_stderr": 0.016369256815093113 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.028173917761762906, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.028173917761762906 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03358618145732524, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03358618145732524 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5205128205128206, + "acc_stderr": 0.02532966316348994, + "acc_norm": 0.5205128205128206, + "acc_norm_stderr": 0.02532966316348994 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.047323326159788126, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.047323326159788126 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5870967741935483, + "acc_stderr": 0.028009138125400387, + "acc_norm": 0.5870967741935483, + "acc_norm_stderr": 0.028009138125400387 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.028447965476231022, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.028447965476231022 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5433962264150943, + "acc_stderr": 0.030656748696739438, + "acc_norm": 0.5433962264150943, + "acc_norm_stderr": 0.030656748696739438 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.041711158581816184, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.041711158581816184 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932262, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932262 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5401234567901234, + "acc_stderr": 0.02773102275353928, + "acc_norm": 0.5401234567901234, + "acc_norm_stderr": 0.02773102275353928 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6424870466321243, + "acc_stderr": 0.03458816042181012, + "acc_norm": 0.6424870466321243, + "acc_norm_stderr": 0.03458816042181012 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.020707458164352984, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.020707458164352984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.02856869975222588, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.02856869975222588 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.0423696475304102, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.0423696475304102 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.0404633688397825, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.0404633688397825 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4591503267973856, + "acc_stderr": 0.020160213617222516, + "acc_norm": 0.4591503267973856, + "acc_norm_stderr": 0.020160213617222516 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22793296089385476, + "acc_stderr": 0.014030149950805097, + "acc_norm": 0.22793296089385476, + "acc_norm_stderr": 0.014030149950805097 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569746, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233881, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233881 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6624472573839663, + "acc_stderr": 0.030781549102026212, + "acc_norm": 0.6624472573839663, + "acc_norm_stderr": 0.030781549102026212 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3513689700130378, + "acc_stderr": 0.012192969457484035, + "acc_norm": 0.3513689700130378, + "acc_norm_stderr": 0.012192969457484035 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.033933885849584046, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.033933885849584046 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070262, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070262 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559696, + "mc2": 0.4900739935604153, + "mc2_stderr": 0.01572393085880814 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.602125147579693, + "acc_stderr": 0.016827959054733395, + "acc_norm": 0.615112160566706, + "acc_norm_stderr": 0.016728579701498644 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-dpo-v1.9", + "model_sha": "b9a06af3584f2e115f613e7086549ef6e67b5471", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-v0.1/result_2024-01-10 12:11:10.json b/Edentns/DataVortexS-10.7B-v0.1/result_2024-01-10 12:11:10.json new file mode 100644 index 0000000000000000000000000000000000000000..1f1354f8d563693fbcba1eb92662f035c9e4dca6 --- /dev/null +++ b/Edentns/DataVortexS-10.7B-v0.1/result_2024-01-10 12:11:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.24061433447098976, + "acc_stderr": 0.012491468532390576, + "acc_norm": 0.2883959044368601, + "acc_norm_stderr": 0.013238394422428171 + }, + "harness|ko_hellaswag|10": { + "acc": 0.30720971917944634, + "acc_stderr": 0.00460394243986156, + "acc_norm": 0.3979286994622585, + "acc_norm_stderr": 0.004884702412456093 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.03762738699917055, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.03762738699917055 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.33980582524271846, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.33980582524271846, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4163473818646232, + "acc_stderr": 0.017627948030430298, + "acc_norm": 0.4163473818646232, + "acc_norm_stderr": 0.017627948030430298 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.03711725190740753, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.03711725190740753 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.02821768355665231, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.02821768355665231 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.042607351576445594, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.042607351576445594 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03960933549451207, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03960933549451207 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.031041941304059274, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.031041941304059274 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.046166311118017125, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.046166311118017125 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37358490566037733, + "acc_stderr": 0.029773082713319878, + "acc_norm": 0.37358490566037733, + "acc_norm_stderr": 0.029773082713319878 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.047093069786618966, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.047093069786618966 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712163, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712163 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48258706467661694, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.48258706467661694, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.023266512213730564, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.023266512213730564 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.430635838150289, + "acc_stderr": 0.026658800273672387, + "acc_norm": 0.430635838150289, + "acc_norm_stderr": 0.026658800273672387 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.026869490744815247, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.026869490744815247 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35233160621761656, + "acc_stderr": 0.03447478286414357, + "acc_norm": 0.35233160621761656, + "acc_norm_stderr": 0.03447478286414357 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42018348623853213, + "acc_stderr": 0.021162420048273522, + "acc_norm": 0.42018348623853213, + "acc_norm_stderr": 0.021162420048273522 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906024, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906024 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.037385206761196686, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.037385206761196686 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.018690850273595284, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.018690850273595284 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966727, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966727 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828977, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828977 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2346368715083799, + "acc_stderr": 0.014173044098303673, + "acc_norm": 0.2346368715083799, + "acc_norm_stderr": 0.014173044098303673 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.029674288281311183, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.029674288281311183 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.34285714285714286, + "acc_stderr": 0.03038726291954774, + "acc_norm": 0.34285714285714286, + "acc_norm_stderr": 0.03038726291954774 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4641350210970464, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.4641350210970464, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2894393741851369, + "acc_stderr": 0.011582659702210233, + "acc_norm": 0.2894393741851369, + "acc_norm_stderr": 0.011582659702210233 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.03283472056108567, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.03283472056108567 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.034277431758165236, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.034277431758165236 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707687, + "mc2": 0.44719810330395326, + "mc2_stderr": 0.0165562423178332 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.17119244391971664, + "acc_stderr": 0.012950423337299044, + "acc_norm": 0.2762691853600944, + "acc_norm_stderr": 0.015373387500464464 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-v0.1", + "model_sha": "9160dba1ce26ebcecd1f8ebca001375dc1f41b1f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-v0.2/result_2024-01-04 15:44:32.json b/Edentns/DataVortexS-10.7B-v0.2/result_2024-01-04 15:44:32.json new file mode 100644 index 0000000000000000000000000000000000000000..e88c85b814748a8a6a75578d2d27ae46eb1ef853 --- /dev/null +++ b/Edentns/DataVortexS-10.7B-v0.2/result_2024-01-04 15:44:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3216723549488055, + "acc_stderr": 0.013650488084494164, + "acc_norm": 0.3873720136518771, + "acc_norm_stderr": 0.014235872487909876 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38856801433977295, + "acc_stderr": 0.00486428617673183, + "acc_norm": 0.5073690499900418, + "acc_norm_stderr": 0.004989239462835226 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5044699872286079, + "acc_stderr": 0.01787924897058436, + "acc_norm": 0.5044699872286079, + "acc_norm_stderr": 0.01787924897058436 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.031489558297455304, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.031489558297455304 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.03318833286217281, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.03318833286217281 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009225, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009225 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.037932811853078084, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.037932811853078084 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40336134453781514, + "acc_stderr": 0.031866081214088314, + "acc_norm": 0.40336134453781514, + "acc_norm_stderr": 0.031866081214088314 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35128205128205126, + "acc_stderr": 0.024203665177902792, + "acc_norm": 0.35128205128205126, + "acc_norm_stderr": 0.024203665177902792 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509566, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509566 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791923, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.03005258057955785, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.03005258057955785 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871923, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871923 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.03525675167467973, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.03525675167467973 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655816, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655816 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43641618497109824, + "acc_stderr": 0.026700545424943687, + "acc_norm": 0.43641618497109824, + "acc_norm_stderr": 0.026700545424943687 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.037311335196738925, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.037311335196738925 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833935, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833935 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39896373056994816, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.39896373056994816, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41651376146788993, + "acc_stderr": 0.02113637650403088, + "acc_norm": 0.41651376146788993, + "acc_norm_stderr": 0.02113637650403088 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790606, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283693, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283693 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.48760330578512395, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.038035102483515854, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.038035102483515854 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33169934640522875, + "acc_stderr": 0.01904748523936038, + "acc_norm": 0.33169934640522875, + "acc_norm_stderr": 0.01904748523936038 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.027374128882631153, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.027374128882631153 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331149, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331149 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.46691176470588236, + "acc_stderr": 0.030306257722468304, + "acc_norm": 0.46691176470588236, + "acc_norm_stderr": 0.030306257722468304 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.0318421386668758, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.0318421386668758 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45569620253164556, + "acc_stderr": 0.032419206846933335, + "acc_norm": 0.45569620253164556, + "acc_norm_stderr": 0.032419206846933335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27249022164276404, + "acc_stderr": 0.01137165829431153, + "acc_norm": 0.27249022164276404, + "acc_norm_stderr": 0.01137165829431153 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.037937131711656344, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.037937131711656344 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396704, + "mc2": 0.4469903503185055, + "mc2_stderr": 0.01535653314791522 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3541912632821724, + "acc_stderr": 0.016443175749214757, + "acc_norm": 0.448642266824085, + "acc_norm_stderr": 0.017099430514725792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-v0.2", + "model_sha": "abf0a66a518d3cffae7059bc166ba427a612a360", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-v0.3/result_2024-01-06 03:14:29.json b/Edentns/DataVortexS-10.7B-v0.3/result_2024-01-06 03:14:29.json new file mode 100644 index 0000000000000000000000000000000000000000..dff338baf977087c0761c2c1894e754e9d40a13f --- /dev/null +++ b/Edentns/DataVortexS-10.7B-v0.3/result_2024-01-06 03:14:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26791808873720135, + "acc_stderr": 0.012942030195136423, + "acc_norm": 0.3387372013651877, + "acc_norm_stderr": 0.01383056892797433 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33947420832503483, + "acc_stderr": 0.0047256309115203165, + "acc_norm": 0.42471619199362676, + "acc_norm_stderr": 0.004932896472460571 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3243933588761175, + "acc_stderr": 0.016740929047162716, + "acc_norm": 0.3243933588761175, + "acc_norm_stderr": 0.016740929047162716 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.028504856470514185, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.028504856470514185 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.0362933532994786, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.0362933532994786 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3408360128617363, + "acc_stderr": 0.02692084126077616, + "acc_norm": 0.3408360128617363, + "acc_norm_stderr": 0.02692084126077616 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713547, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713547 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378949, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378949 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.03524068951567446, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.03524068951567446 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.028657491285071973, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.028657491285071973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3435897435897436, + "acc_stderr": 0.024078696580635467, + "acc_norm": 0.3435897435897436, + "acc_norm_stderr": 0.024078696580635467 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.19704433497536947, + "acc_stderr": 0.02798672466673622, + "acc_norm": 0.19704433497536947, + "acc_norm_stderr": 0.02798672466673622 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.29354838709677417, + "acc_stderr": 0.02590608702131929, + "acc_norm": 0.29354838709677417, + "acc_norm_stderr": 0.02590608702131929 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02934311479809447, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02934311479809447 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.026749899771241238, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.026749899771241238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184407, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184407 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.35323383084577115, + "acc_stderr": 0.03379790611796776, + "acc_norm": 0.35323383084577115, + "acc_norm_stderr": 0.03379790611796776 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0356760379963917, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0356760379963917 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.035868792800803406, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.035868792800803406 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.023618678310069363, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.023618678310069363 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25308641975308643, + "acc_stderr": 0.024191808600713, + "acc_norm": 0.25308641975308643, + "acc_norm_stderr": 0.024191808600713 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35233160621761656, + "acc_stderr": 0.03447478286414358, + "acc_norm": 0.35233160621761656, + "acc_norm_stderr": 0.03447478286414358 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28256880733944956, + "acc_stderr": 0.019304243497707152, + "acc_norm": 0.28256880733944956, + "acc_norm_stderr": 0.019304243497707152 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.025457756696667867, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.025457756696667867 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.017555818091322277, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.017555818091322277 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307857, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307857 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22346368715083798, + "acc_stderr": 0.01393206863857977, + "acc_norm": 0.22346368715083798, + "acc_norm_stderr": 0.01393206863857977 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2911392405063291, + "acc_stderr": 0.02957160106575337, + "acc_norm": 0.2911392405063291, + "acc_norm_stderr": 0.02957160106575337 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23728813559322035, + "acc_stderr": 0.010865436690780262, + "acc_norm": 0.23728813559322035, + "acc_norm_stderr": 0.010865436690780262 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.01572313952460874, + "mc2": 0.4609143085702979, + "mc2_stderr": 0.015478893728638463 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2939787485242031, + "acc_stderr": 0.015663242569091122, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.016616612843224944 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-v0.3", + "model_sha": "00c0018dd81707ea37ce355e68f5e6d241a63261", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-v0.4/result_2024-01-11 00:44:15.json b/Edentns/DataVortexS-10.7B-v0.4/result_2024-01-11 00:44:15.json new file mode 100644 index 0000000000000000000000000000000000000000..d55a9eff217791c15da49dc54a0474cb284e534e --- /dev/null +++ b/Edentns/DataVortexS-10.7B-v0.4/result_2024-01-11 00:44:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41723549488054607, + "acc_stderr": 0.01440982551840308, + "acc_norm": 0.49402730375426623, + "acc_norm_stderr": 0.014610348300255802 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43537143995220073, + "acc_stderr": 0.0049479226926888355, + "acc_norm": 0.59699263095001, + "acc_norm_stderr": 0.004894997736719047 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6549707602339181, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.6549707602339181, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6781609195402298, + "acc_stderr": 0.01670638141505791, + "acc_norm": 0.6781609195402298, + "acc_norm_stderr": 0.01670638141505791 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5106382978723404, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.5106382978723404, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.038922121953330446, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.038922121953330446 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751468, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751468 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6641221374045801, + "acc_stderr": 0.04142313771996665, + "acc_norm": 0.6641221374045801, + "acc_norm_stderr": 0.04142313771996665 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.031730712390717244, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.031730712390717244 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5655172413793104, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.5655172413793104, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006718, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006718 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6386554621848739, + "acc_stderr": 0.031204691225150016, + "acc_norm": 0.6386554621848739, + "acc_norm_stderr": 0.031204691225150016 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5307692307692308, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.5307692307692308, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6580645161290323, + "acc_stderr": 0.026985289576552732, + "acc_norm": 0.6580645161290323, + "acc_norm_stderr": 0.026985289576552732 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.02645350805404036, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.02645350805404036 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5584905660377358, + "acc_stderr": 0.030561590426731837, + "acc_norm": 0.5584905660377358, + "acc_norm_stderr": 0.030561590426731837 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948485, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.03778621079092056, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.03778621079092056 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.025424835086923996, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.025424835086923996 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5486111111111112, + "acc_stderr": 0.041614023984032786, + "acc_norm": 0.5486111111111112, + "acc_norm_stderr": 0.041614023984032786 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6069364161849711, + "acc_stderr": 0.026296227915613677, + "acc_norm": 0.6069364161849711, + "acc_norm_stderr": 0.026296227915613677 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6234567901234568, + "acc_stderr": 0.026959344518747787, + "acc_norm": 0.6234567901234568, + "acc_norm_stderr": 0.026959344518747787 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.032922966391551414, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.032922966391551414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7064220183486238, + "acc_stderr": 0.019525151122639667, + "acc_norm": 0.7064220183486238, + "acc_norm_stderr": 0.019525151122639667 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.027914055510468008, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.027914055510468008 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5921052631578947, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.5921052631578947, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.02022394600507432, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.02022394600507432 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.029049190342543454, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.029049190342543454 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.046695106638751926, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.046695106638751926 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.03395322726375797, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.03395322726375797 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21564245810055865, + "acc_stderr": 0.013754835975482351, + "acc_norm": 0.21564245810055865, + "acc_norm_stderr": 0.013754835975482351 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47794117647058826, + "acc_stderr": 0.03034326422421352, + "acc_norm": 0.47794117647058826, + "acc_norm_stderr": 0.03034326422421352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6040816326530613, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.6040816326530613, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.729957805907173, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.729957805907173, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4132985658409387, + "acc_stderr": 0.012576779494860083, + "acc_norm": 0.4132985658409387, + "acc_norm_stderr": 0.012576779494860083 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.03354092437591519, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.03354092437591519 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3182374541003672, + "mc1_stderr": 0.016305988648920612, + "mc2": 0.47503231724175854, + "mc2_stderr": 0.015521283734648058 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5678866587957497, + "acc_stderr": 0.017031170198851746, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.016876941165045616 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-v0.4", + "model_sha": "463b40db961f2d4fa6901268e04bd323528391f9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexS-10.7B-v1.0/result_2024-01-15 00:19:52.json b/Edentns/DataVortexS-10.7B-v1.0/result_2024-01-15 00:19:52.json new file mode 100644 index 0000000000000000000000000000000000000000..b04aef6a56eecc34ff259b794853c465d7cd793a --- /dev/null +++ b/Edentns/DataVortexS-10.7B-v1.0/result_2024-01-15 00:19:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44283276450511944, + "acc_stderr": 0.0145155738733489, + "acc_norm": 0.4906143344709898, + "acc_norm_stderr": 0.014608816322065003 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2562238597888867, + "acc_stderr": 0.0043565471858470406, + "acc_norm": 0.2566221868153754, + "acc_norm_stderr": 0.00435876459640104 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6491228070175439, + "acc_stderr": 0.03660298834049163, + "acc_norm": 0.6491228070175439, + "acc_norm_stderr": 0.03660298834049163 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041696, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6602809706257982, + "acc_stderr": 0.01693639411430163, + "acc_norm": 0.6602809706257982, + "acc_norm_stderr": 0.01693639411430163 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5106382978723404, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.5106382978723404, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5884244372990354, + "acc_stderr": 0.027950481494401273, + "acc_norm": 0.5884244372990354, + "acc_norm_stderr": 0.027950481494401273 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6098654708520179, + "acc_stderr": 0.03273766725459156, + "acc_norm": 0.6098654708520179, + "acc_norm_stderr": 0.03273766725459156 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.043171711948702556, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.043171711948702556 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985905, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985905 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7626262626262627, + "acc_stderr": 0.030313710538198906, + "acc_norm": 0.7626262626262627, + "acc_norm_stderr": 0.030313710538198906 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5435897435897435, + "acc_stderr": 0.025254485424799595, + "acc_norm": 0.5435897435897435, + "acc_norm_stderr": 0.025254485424799595 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6161290322580645, + "acc_stderr": 0.02766618207553965, + "acc_norm": 0.6161290322580645, + "acc_norm_stderr": 0.02766618207553965 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.026453508054040346, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.026453508054040346 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5471698113207547, + "acc_stderr": 0.030635627957961816, + "acc_norm": 0.5471698113207547, + "acc_norm_stderr": 0.030635627957961816 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.047093069786618945, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.047093069786618945 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251976, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251976 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.032200241045342054, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.032200241045342054 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3941798941798942, + "acc_stderr": 0.025167982333894143, + "acc_norm": 0.3941798941798942, + "acc_norm_stderr": 0.025167982333894143 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.653179190751445, + "acc_stderr": 0.025624723994030457, + "acc_norm": 0.653179190751445, + "acc_norm_stderr": 0.025624723994030457 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.02712511551316686, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.02712511551316686 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7357512953367875, + "acc_stderr": 0.03182155050916647, + "acc_norm": 0.7357512953367875, + "acc_norm_stderr": 0.03182155050916647 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.046306532033665956, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.046306532033665956 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6880733944954128, + "acc_stderr": 0.019862967976707245, + "acc_norm": 0.6880733944954128, + "acc_norm_stderr": 0.019862967976707245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.03984979653302871, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.03984979653302871 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.040335656678483205, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.040335656678483205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.020226106567657807, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.020226106567657807 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.02899908090480618, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.02899908090480618 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5787037037037037, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.5787037037037037, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2100558659217877, + "acc_stderr": 0.013623755371333528, + "acc_norm": 0.2100558659217877, + "acc_norm_stderr": 0.013623755371333528 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5220588235294118, + "acc_stderr": 0.03034326422421352, + "acc_norm": 0.5220588235294118, + "acc_norm_stderr": 0.03034326422421352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6081632653061224, + "acc_stderr": 0.031251275910891656, + "acc_norm": 0.6081632653061224, + "acc_norm_stderr": 0.031251275910891656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036423, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036423 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3878748370273794, + "acc_stderr": 0.012444998309675631, + "acc_norm": 0.3878748370273794, + "acc_norm_stderr": 0.012444998309675631 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398394, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398394 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713613, + "mc2": 0.4576126744740946, + "mc2_stderr": 0.015112274979113303 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08382526564344746, + "acc_stderr": 0.009527773913592174, + "acc_norm": 0.29634002361275086, + "acc_norm_stderr": 0.015699701628594232 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexS-10.7B-v1.0", + "model_sha": "888a73a4281e4cb1b64696e5d4c8a1a7b59b3024", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/DataVortexTL-1.1B-v0.1/result_2024-01-09 00:17:54.json b/Edentns/DataVortexTL-1.1B-v0.1/result_2024-01-09 00:17:54.json new file mode 100644 index 0000000000000000000000000000000000000000..422554d086defd6593f020e16d41e9d18da080b0 --- /dev/null +++ b/Edentns/DataVortexTL-1.1B-v0.1/result_2024-01-09 00:17:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.23037542662116042, + "acc_stderr": 0.012304928418747611, + "acc_norm": 0.2525597269624573, + "acc_norm_stderr": 0.012696728980207706 + }, + "harness|ko_hellaswag|10": { + "acc": 0.29934276040629354, + "acc_stderr": 0.004570342034463235, + "acc_norm": 0.335291774546903, + "acc_norm_stderr": 0.004711275408138422 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.03599335771456027, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.03599335771456027 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1650485436893204, + "acc_stderr": 0.03675668832233188, + "acc_norm": 0.1650485436893204, + "acc_norm_stderr": 0.03675668832233188 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.01598281477469563, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.01598281477469563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.03712537833614866, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.03712537833614866 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.02964400657700962, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.02964400657700962 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.035716092300534796, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.035716092300534796 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632917, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632917 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134987, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134987 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.18181818181818182, + "acc_stderr": 0.0274796030105388, + "acc_norm": 0.18181818181818182, + "acc_norm_stderr": 0.0274796030105388 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.226890756302521, + "acc_stderr": 0.02720537153827948, + "acc_norm": 0.226890756302521, + "acc_norm_stderr": 0.02720537153827948 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.02102067268082791, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.02102067268082791 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768081, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768081 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.1724137931034483, + "acc_stderr": 0.026577672183036576, + "acc_norm": 0.1724137931034483, + "acc_norm_stderr": 0.026577672183036576 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2064516129032258, + "acc_stderr": 0.02302589961718871, + "acc_norm": 0.2064516129032258, + "acc_norm_stderr": 0.02302589961718871 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.31196581196581197, + "acc_stderr": 0.030351527323344948, + "acc_norm": 0.31196581196581197, + "acc_norm_stderr": 0.030351527323344948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22641509433962265, + "acc_stderr": 0.025757559893106734, + "acc_norm": 0.22641509433962265, + "acc_norm_stderr": 0.025757559893106734 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.025348097468097845, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.025348097468097845 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.17218543046357615, + "acc_stderr": 0.030826136961962406, + "acc_norm": 0.17218543046357615, + "acc_norm_stderr": 0.030826136961962406 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.029705284056772436, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.029705284056772436 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21164021164021163, + "acc_stderr": 0.021037331505262886, + "acc_norm": 0.21164021164021163, + "acc_norm_stderr": 0.021037331505262886 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.023618678310069367, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.023618678310069367 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445806, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445806 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20207253886010362, + "acc_stderr": 0.02897908979429673, + "acc_norm": 0.20207253886010362, + "acc_norm_stderr": 0.02897908979429673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21284403669724772, + "acc_stderr": 0.017549376389313694, + "acc_norm": 0.21284403669724772, + "acc_norm_stderr": 0.017549376389313694 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333336, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333336 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02380518652488815, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02380518652488815 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.031103182383123384, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.031103182383123384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2630718954248366, + "acc_stderr": 0.017812676542320657, + "acc_norm": 0.2630718954248366, + "acc_norm_stderr": 0.017812676542320657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.014551553659369923, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.014551553659369923 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.025767252010855942, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.025767252010855942 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20408163265306123, + "acc_stderr": 0.025801283475090496, + "acc_norm": 0.20408163265306123, + "acc_norm_stderr": 0.025801283475090496 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2438070404172099, + "acc_stderr": 0.010966507972178473, + "acc_norm": 0.2438070404172099, + "acc_norm_stderr": 0.010966507972178473 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.029331162294251728, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.029331162294251728 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.0340150671524904, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.0340150671524904 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23623011015911874, + "mc1_stderr": 0.014869755015871084, + "mc2": 0.4333851261848709, + "mc2_stderr": 0.015570846471622345 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24675324675324675, + "acc_stderr": 0.014822275820015272, + "acc_norm": 0.30814639905548996, + "acc_norm_stderr": 0.015874515156298393 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/DataVortexTL-1.1B-v0.1", + "model_sha": "d6984dd6056dac75b1aabdb7237488f7f41327d2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/Worktro-Small-v0.1/result_2024-07-15 04:56:13.json b/Edentns/Worktro-Small-v0.1/result_2024-07-15 04:56:13.json new file mode 100644 index 0000000000000000000000000000000000000000..cc7b94748d0d9b2b755f4483e028e41e139dad49 --- /dev/null +++ b/Edentns/Worktro-Small-v0.1/result_2024-07-15 04:56:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34897610921501704, + "acc_stderr": 0.0139289334613825, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398324 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2504481179047998, + "acc_stderr": 0.004323856300539177, + "acc_norm": 0.2504481179047998, + "acc_norm_stderr": 0.004323856300539177 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.03722965741385539, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.03722965741385539 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.017779225233394216, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.017779225233394216 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.042667634040995814, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.042667634040995814 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.032500536843658404, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.032500536843658404 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611549, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611549 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6616161616161617, + "acc_stderr": 0.033711241426263014, + "acc_norm": 0.6616161616161617, + "acc_norm_stderr": 0.033711241426263014 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006718, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006718 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.02533466708095489, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.02533466708095489 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5161290322580645, + "acc_stderr": 0.02842920317672455, + "acc_norm": 0.5161290322580645, + "acc_norm_stderr": 0.02842920317672455 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.027778835904935434, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.027778835904935434 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3962962962962963, + "acc_stderr": 0.029822619458533997, + "acc_norm": 0.3962962962962963, + "acc_norm_stderr": 0.029822619458533997 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.02554284681740051, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.02554284681740051 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070434, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070434 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6018348623853211, + "acc_stderr": 0.020987989422654264, + "acc_norm": 0.6018348623853211, + "acc_norm_stderr": 0.020987989422654264 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.02862441255016795, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.02862441255016795 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.040463368839782486, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.040463368839782486 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.02008736207670285, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.02008736207670285 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125146, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966337, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966337 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312547, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312547 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3644067796610169, + "acc_stderr": 0.012291694983056477, + "acc_norm": 0.3644067796610169, + "acc_norm_stderr": 0.012291694983056477 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.03495624522015477, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.03495624522015477 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.4689813000124781, + "mc2_stderr": 0.015471857359723505 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4002361275088548, + "acc_stderr": 0.016844693510505045, + "acc_norm": 0.4817001180637544, + "acc_norm_stderr": 0.01717883663917776 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/Worktro-Small-v0.1", + "model_sha": "881ea618f197432245c6be4f7cf7758031f1648c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Edentns/Worktro-Small-v0.2/result_2024-07-17 04:26:44.json b/Edentns/Worktro-Small-v0.2/result_2024-07-17 04:26:44.json new file mode 100644 index 0000000000000000000000000000000000000000..6dbc7af038bd31adaa1eaeca09d199b9780a2347 --- /dev/null +++ b/Edentns/Worktro-Small-v0.2/result_2024-07-17 04:26:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19197952218430034, + "acc_stderr": 0.01150959890659811, + "acc_norm": 0.21843003412969283, + "acc_norm_stderr": 0.012074291605700978 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2504481179047998, + "acc_stderr": 0.004323856300539177, + "acc_norm": 0.2504481179047998, + "acc_norm_stderr": 0.004323856300539177 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.18128654970760233, + "acc_stderr": 0.029547741687640024, + "acc_norm": 0.18128654970760233, + "acc_norm_stderr": 0.029547741687640024 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.03989139859531769, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.03989139859531769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.015818450894777555, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.015818450894777555 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03591444084196969, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03591444084196969 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.19148936170212766, + "acc_stderr": 0.025722149992637795, + "acc_norm": 0.19148936170212766, + "acc_norm_stderr": 0.025722149992637795 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.18674698795180722, + "acc_stderr": 0.03033874914450058, + "acc_norm": 0.18674698795180722, + "acc_norm_stderr": 0.03033874914450058 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.18385650224215247, + "acc_stderr": 0.02599837909235651, + "acc_norm": 0.18385650224215247, + "acc_norm_stderr": 0.02599837909235651 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2878787878787879, + "acc_stderr": 0.03225883512300992, + "acc_norm": 0.2878787878787879, + "acc_norm_stderr": 0.03225883512300992 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.037528339580033376, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.037528339580033376 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3067226890756303, + "acc_stderr": 0.029953823891887024, + "acc_norm": 0.3067226890756303, + "acc_norm_stderr": 0.029953823891887024 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132977, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132977 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.02652270967466777, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.02652270967466777 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2830188679245283, + "acc_stderr": 0.027724236492700904, + "acc_norm": 0.2830188679245283, + "acc_norm_stderr": 0.027724236492700904 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.041723430387053825, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.041723430387053825 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.02742001935094528, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.02742001935094528 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.03152439186555404, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.03152439186555404 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.20520231213872833, + "acc_stderr": 0.021742519835276294, + "acc_norm": 0.20520231213872833, + "acc_norm_stderr": 0.021742519835276294 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.023788583551658533, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.023788583551658533 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35233160621761656, + "acc_stderr": 0.03447478286414358, + "acc_norm": 0.35233160621761656, + "acc_norm_stderr": 0.03447478286414358 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28623853211009176, + "acc_stderr": 0.019379436628919975, + "acc_norm": 0.28623853211009176, + "acc_norm_stderr": 0.019379436628919975 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790607, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790607 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.02417084087934099, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.02417084087934099 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2066115702479339, + "acc_stderr": 0.03695980128098825, + "acc_norm": 0.2066115702479339, + "acc_norm_stderr": 0.03695980128098825 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2236842105263158, + "acc_stderr": 0.033911609343436025, + "acc_norm": 0.2236842105263158, + "acc_norm_stderr": 0.033911609343436025 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.024847921358063962, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.024847921358063962 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3877551020408163, + "acc_stderr": 0.03119223072679566, + "acc_norm": 0.3877551020408163, + "acc_norm_stderr": 0.03119223072679566 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.21940928270042195, + "acc_stderr": 0.026939106581553945, + "acc_norm": 0.21940928270042195, + "acc_norm_stderr": 0.026939106581553945 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24445893089960888, + "acc_stderr": 0.0109764250131139, + "acc_norm": 0.24445893089960888, + "acc_norm_stderr": 0.0109764250131139 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083292, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083292 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.20606060606060606, + "acc_stderr": 0.03158415324047709, + "acc_norm": 0.20606060606060606, + "acc_norm_stderr": 0.03158415324047709 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834564, + "mc2": 0.4877359344668719, + "mc2_stderr": 0.01670237434131383 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.17473435655253838, + "acc_stderr": 0.013055720791340993, + "acc_norm": 0.44037780401416765, + "acc_norm_stderr": 0.017067699774312984 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Edentns/Worktro-Small-v0.2", + "model_sha": "6b274adce0791092ab89e1d53704bdba7f83bcec", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/EleutherAI/polyglot-ko-1.3b/result_2023-09-24 15:21:38.json b/EleutherAI/polyglot-ko-1.3b/result_2023-09-24 15:21:38.json new file mode 100644 index 0000000000000000000000000000000000000000..1b7095328882e4fa2330ae228ca1c8942721fe6c --- /dev/null +++ b/EleutherAI/polyglot-ko-1.3b/result_2023-09-24 15:21:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2235494880546075, + "acc_stderr": 0.012174896631202605, + "acc_norm": 0.2815699658703072, + "acc_norm_stderr": 0.013143376735009015 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3345947022505477, + "acc_stderr": 0.004708842600177431, + "acc_norm": 0.4135630352519418, + "acc_norm_stderr": 0.0049146550633294974 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.03424042924691585, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.03424042924691585 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26947637292464877, + "acc_stderr": 0.015866243073215065, + "acc_norm": 0.26947637292464877, + "acc_norm_stderr": 0.015866243073215065 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.026754391348039783, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.026754391348039783 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.033293941190735296, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.033293941190735296 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2379421221864952, + "acc_stderr": 0.024185150647818707, + "acc_norm": 0.2379421221864952, + "acc_norm_stderr": 0.024185150647818707 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.030216831011508766, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.030216831011508766 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.0359546161177469, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.0359546161177469 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.03074630074212451, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.03074630074212451 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.030176808288974337, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.030176808288974337 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.02102067268082791, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.02102067268082791 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.03108982600293752, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.03108982600293752 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.02606936229533513, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02606936229533513 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.027601921381417607, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.027601921381417607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.25660377358490566, + "acc_stderr": 0.026880647889051968, + "acc_norm": 0.25660377358490566, + "acc_norm_stderr": 0.026880647889051968 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.04172343038705383, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.04172343038705383 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02784081149587194, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02784081149587194 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.03096590312357303, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.03096590312357303 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641144, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641144 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.024477222856135114, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.024477222856135114 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.03410780251836184, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.03410780251836184 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.037752050135836386, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.037752050135836386 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24220183486238533, + "acc_stderr": 0.01836817630659862, + "acc_norm": 0.24220183486238533, + "acc_norm_stderr": 0.01836817630659862 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790606, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.024288619466046102, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.024288619466046102 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.039849796533028704, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.039849796533028704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.033550453048829226, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.033550453048829226 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.01728276069516743, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.01728276069516743 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.03106721126287249, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.03106721126287249 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2109704641350211, + "acc_stderr": 0.02655837250266192, + "acc_norm": 0.2109704641350211, + "acc_norm_stderr": 0.02655837250266192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23468057366362452, + "acc_stderr": 0.010824026872449344, + "acc_norm": 0.23468057366362452, + "acc_norm_stderr": 0.010824026872449344 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.03256866661681102, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.03256866661681102 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707682, + "mc2": 0.4116568832959107, + "mc2_stderr": 0.015044504977529799 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27744982290436837, + "acc_stderr": 0.015393630236605975, + "acc_norm": 0.3400236127508855, + "acc_norm_stderr": 0.016286717220737674 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "EleutherAI/polyglot-ko-1.3b", + "model_sha": "557e162cf6e944fdbae05bab2e45d066a125eacb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/EleutherAI/polyglot-ko-12.8b/result_2023-09-26 09:55:07.json b/EleutherAI/polyglot-ko-12.8b/result_2023-09-26 09:55:07.json new file mode 100644 index 0000000000000000000000000000000000000000..d1a30257ad7684dfa258c0524fe16e2e6f6dec7c --- /dev/null +++ b/EleutherAI/polyglot-ko-12.8b/result_2023-09-26 09:55:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.013203196088537365, + "acc_norm": 0.33532423208191126, + "acc_norm_stderr": 0.013796182947785562 + }, + "harness|ko_hellaswag|10": { + "acc": 0.385381398127863, + "acc_stderr": 0.004856906473719383, + "acc_norm": 0.5027882891854212, + "acc_norm_stderr": 0.004989703824167094 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393161, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393161 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.015671006009339572, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.015671006009339572 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.026754391348039787, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.026754391348039787 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31189710610932475, + "acc_stderr": 0.02631185807185416, + "acc_norm": 0.31189710610932475, + "acc_norm_stderr": 0.02631185807185416 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03053289223393203, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03053289223393203 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438014, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438014 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868963, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868963 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21025641025641026, + "acc_stderr": 0.020660597485026928, + "acc_norm": 0.21025641025641026, + "acc_norm_stderr": 0.020660597485026928 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.0309037969521145, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.0309037969521145 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239963, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239963 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.027601921381417604, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.027601921381417604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.026199808807561932, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.026199808807561932 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935554, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935554 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776578, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776578 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.036539469694421, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.036539469694421 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.022698657167855716, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.022698657167855716 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.024922001168886338, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.024922001168886338 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26422018348623855, + "acc_stderr": 0.0189041641715102, + "acc_norm": 0.26422018348623855, + "acc_norm_stderr": 0.0189041641715102 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020534, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020534 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.02505850331695815, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.02505850331695815 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.20098039215686275, + "acc_stderr": 0.016211938889655574, + "acc_norm": 0.20098039215686275, + "acc_norm_stderr": 0.016211938889655574 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.02498710636564298, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.02498710636564298 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22857142857142856, + "acc_stderr": 0.026882144922307748, + "acc_norm": 0.22857142857142856, + "acc_norm_stderr": 0.026882144922307748 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.32489451476793246, + "acc_stderr": 0.030486039389105303, + "acc_norm": 0.32489451476793246, + "acc_norm_stderr": 0.030486039389105303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25684485006518903, + "acc_stderr": 0.011158455853098857, + "acc_norm": 0.25684485006518903, + "acc_norm_stderr": 0.011158455853098857 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.032876667586034886, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.032876667586034886 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731613, + "mc2": 0.390667104295536, + "mc2_stderr": 0.014736649975849761 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30932703659976385, + "acc_stderr": 0.01589132050552089, + "acc_norm": 0.3990554899645809, + "acc_norm_stderr": 0.0168363772928493 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "EleutherAI/polyglot-ko-12.8b", + "model_sha": "09dfc839067bf44e7f52976eca8adbc17f04e1b0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/EleutherAI/polyglot-ko-3.8b/result_2023-09-26 09:54:58.json b/EleutherAI/polyglot-ko-3.8b/result_2023-09-26 09:54:58.json new file mode 100644 index 0000000000000000000000000000000000000000..abb21b3618c51a6aab88db50984e75adfdd941cc --- /dev/null +++ b/EleutherAI/polyglot-ko-3.8b/result_2023-09-26 09:54:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2525597269624573, + "acc_stderr": 0.01269672898020771, + "acc_norm": 0.3046075085324232, + "acc_norm_stderr": 0.013449522109932494 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3511252738498307, + "acc_stderr": 0.004763465139038552, + "acc_norm": 0.4420434176458873, + "acc_norm_stderr": 0.004956147046108961 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03188578017686398, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03188578017686398 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23627075351213284, + "acc_stderr": 0.015190473717037497, + "acc_norm": 0.23627075351213284, + "acc_norm_stderr": 0.015190473717037497 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2, + "acc_stderr": 0.026148818018424502, + "acc_norm": 0.2, + "acc_norm_stderr": 0.026148818018424502 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.03384429155233135, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.03384429155233135 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24437299035369775, + "acc_stderr": 0.024406162094668886, + "acc_norm": 0.24437299035369775, + "acc_norm_stderr": 0.024406162094668886 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21524663677130046, + "acc_stderr": 0.027584066602208263, + "acc_norm": 0.21524663677130046, + "acc_norm_stderr": 0.027584066602208263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.19083969465648856, + "acc_stderr": 0.034465133507525954, + "acc_norm": 0.19083969465648856, + "acc_norm_stderr": 0.034465133507525954 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.31313131313131315, + "acc_stderr": 0.033042050878136525, + "acc_norm": 0.31313131313131315, + "acc_norm_stderr": 0.033042050878136525 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.024283140529467295, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.024283140529467295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.03194740072265541, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.03194740072265541 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.026148685930671742, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.026148685930671742 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3283018867924528, + "acc_stderr": 0.02890159361241178, + "acc_norm": 0.3283018867924528, + "acc_norm_stderr": 0.02890159361241178 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721376, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721376 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736412, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2398843930635838, + "acc_stderr": 0.022989592543123567, + "acc_norm": 0.2398843930635838, + "acc_norm_stderr": 0.022989592543123567 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.024659685185967277, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.024659685185967277 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3626943005181347, + "acc_stderr": 0.034697137917043715, + "acc_norm": 0.3626943005181347, + "acc_norm_stderr": 0.034697137917043715 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25688073394495414, + "acc_stderr": 0.018732492928342448, + "acc_norm": 0.25688073394495414, + "acc_norm_stderr": 0.018732492928342448 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.024954184324879905, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.024954184324879905 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.1652892561983471, + "acc_stderr": 0.03390780612972776, + "acc_norm": 0.1652892561983471, + "acc_norm_stderr": 0.03390780612972776 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.016729937565537537, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.016729937565537537 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290396, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.15178571428571427, + "acc_stderr": 0.03405702838185692, + "acc_norm": 0.15178571428571427, + "acc_norm_stderr": 0.03405702838185692 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364546, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364546 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.23628691983122363, + "acc_stderr": 0.02765215314415926, + "acc_norm": 0.23628691983122363, + "acc_norm_stderr": 0.02765215314415926 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24445893089960888, + "acc_stderr": 0.010976425013113912, + "acc_norm": 0.24445893089960888, + "acc_norm_stderr": 0.010976425013113912 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.03096451792692341, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.03096451792692341 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268046, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268046 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.40454723614569765, + "mc2_stderr": 0.014981033793701278 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.282172373081464, + "acc_stderr": 0.015473271583988433, + "acc_norm": 0.3707201889020071, + "acc_norm_stderr": 0.016605801289212605 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "EleutherAI/polyglot-ko-3.8b", + "model_sha": "3c696a71c16b4a4622b7cabf6c5da4ba5a73b548", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/EleutherAI/polyglot-ko-5.8b/result_2023-09-24 15:21:38.json b/EleutherAI/polyglot-ko-5.8b/result_2023-09-24 15:21:38.json new file mode 100644 index 0000000000000000000000000000000000000000..7b3baeeb1120cc18456c3c4dc216ad5740f8b04a --- /dev/null +++ b/EleutherAI/polyglot-ko-5.8b/result_2023-09-24 15:21:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2687713310580205, + "acc_stderr": 0.012955065963710675, + "acc_norm": 0.32764505119453924, + "acc_norm_stderr": 0.013715847940719339 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3690499900418243, + "acc_stderr": 0.004815613144385398, + "acc_norm": 0.4814777932682733, + "acc_norm_stderr": 0.004986356526063965 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727665, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727665 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041693, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041693 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20178799489144317, + "acc_stderr": 0.014351702181636861, + "acc_norm": 0.20178799489144317, + "acc_norm_stderr": 0.014351702181636861 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.17446808510638298, + "acc_stderr": 0.02480944233550398, + "acc_norm": 0.17446808510638298, + "acc_norm_stderr": 0.02480944233550398 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1927710843373494, + "acc_stderr": 0.030709824050565264, + "acc_norm": 0.1927710843373494, + "acc_norm_stderr": 0.030709824050565264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.11659192825112108, + "acc_stderr": 0.021539639816244467, + "acc_norm": 0.11659192825112108, + "acc_norm_stderr": 0.021539639816244467 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.048108401480826346, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.048108401480826346 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493864, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493864 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757173, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757173 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3504587155963303, + "acc_stderr": 0.02045607759982446, + "acc_norm": 0.3504587155963303, + "acc_norm_stderr": 0.02045607759982446 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.026090162504279053, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.026090162504279053 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.14049586776859505, + "acc_stderr": 0.031722334260021585, + "acc_norm": 0.14049586776859505, + "acc_norm_stderr": 0.031722334260021585 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953776, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953776 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.03562367850095391, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.03562367850095391 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536934, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536934 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.030932858792789834, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.030932858792789834 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20253164556962025, + "acc_stderr": 0.026160568246601464, + "acc_norm": 0.20253164556962025, + "acc_norm_stderr": 0.026160568246601464 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2438070404172099, + "acc_stderr": 0.010966507972178475, + "acc_norm": 0.2438070404172099, + "acc_norm_stderr": 0.010966507972178475 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693257, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693257 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03477691162163659, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03477691162163659 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.01522589934082683, + "mc2": 0.3923103125697379, + "mc2_stderr": 0.014648106435610566 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2857142857142857, + "acc_stderr": 0.01553162078698674, + "acc_norm": 0.3565525383707202, + "acc_norm_stderr": 0.016467706981527448 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "EleutherAI/polyglot-ko-5.8b", + "model_sha": "581a4c3eebfac23536b3c9676bcfb05c6a97baa2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/EnumaInc/llama-8b-ko-slimorca-45000/result_2024-04-25 14:59:26.json b/EnumaInc/llama-8b-ko-slimorca-45000/result_2024-04-25 14:59:26.json new file mode 100644 index 0000000000000000000000000000000000000000..edb8c3e8873559cfa73bd45d027704feb2d4b7d3 --- /dev/null +++ b/EnumaInc/llama-8b-ko-slimorca-45000/result_2024-04-25 14:59:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3703071672354949, + "acc_stderr": 0.01411129875167495, + "acc_norm": 0.431740614334471, + "acc_norm_stderr": 0.014474591427196202 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3882692690699064, + "acc_stderr": 0.004863603638367454, + "acc_norm": 0.5113523202549293, + "acc_norm_stderr": 0.004988495127747284 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.524904214559387, + "acc_stderr": 0.017857770704901035, + "acc_norm": 0.524904214559387, + "acc_norm_stderr": 0.017857770704901035 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.04122737111370332, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.04122737111370332 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5064516129032258, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.5064516129032258, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.03046365674734026, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.03046365674734026 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083018, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596433, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596433 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960719, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960719 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.02687408588351835, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.02687408588351835 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542595, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542595 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5577981651376147, + "acc_stderr": 0.021293613207520202, + "acc_norm": 0.5577981651376147, + "acc_norm_stderr": 0.021293613207520202 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577447, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577447 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650144, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650144 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.0317987634217685, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.0317987634217685 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.01489339173524962, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.01489339173524962 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.33877551020408164, + "acc_stderr": 0.03029950656215418, + "acc_norm": 0.33877551020408164, + "acc_norm_stderr": 0.03029950656215418 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.42616033755274263, + "acc_stderr": 0.032190357031317736, + "acc_norm": 0.42616033755274263, + "acc_norm_stderr": 0.032190357031317736 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28748370273794005, + "acc_stderr": 0.011559337355708502, + "acc_norm": 0.28748370273794005, + "acc_norm_stderr": 0.011559337355708502 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.0158663464013843, + "mc2": 0.46502937106374664, + "mc2_stderr": 0.015372195450409798 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.017182864434998564, + "acc_norm": 0.5796930342384888, + "acc_norm_stderr": 0.01697059828117771 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "EnumaInc/llama-8b-ko-slimorca-45000", + "model_sha": "eab90d20ed140a8eabe19d00d3c4af2ed9cffd08", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/F24/F23-llama2-13B-x1/result_2023-11-24 10:19:15.json b/F24/F23-llama2-13B-x1/result_2023-11-24 10:19:15.json new file mode 100644 index 0000000000000000000000000000000000000000..bb70229e31cae1233fe568c3c4b6244335b3465a --- /dev/null +++ b/F24/F23-llama2-13B-x1/result_2023-11-24 10:19:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34897610921501704, + "acc_stderr": 0.013928933461382504, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398326 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4160525791674965, + "acc_stderr": 0.004918951019183889, + "acc_norm": 0.5650268870742879, + "acc_norm_stderr": 0.004947402907996247 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.01786933015400371, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.01786933015400371 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788683, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788683 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197608, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197608 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.03355746535223264, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.03355746535223264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.37404580152671757, + "acc_stderr": 0.042438692422305246, + "acc_norm": 0.37404580152671757, + "acc_norm_stderr": 0.042438692422305246 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179961, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179961 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.0316314580755238, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.0316314580755238 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.382051282051282, + "acc_stderr": 0.024635549163908223, + "acc_norm": 0.382051282051282, + "acc_norm_stderr": 0.024635549163908223 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.03332769068410789, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.03332769068410789 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.02797605491534736, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.02797605491534736 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776285, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776285 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959316, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959316 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261114, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261114 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.026803720583206167, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.026803720583206167 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48990825688073397, + "acc_stderr": 0.021432956203453306, + "acc_norm": 0.48990825688073397, + "acc_norm_stderr": 0.021432956203453306 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.02795604616542452, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.02795604616542452 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.038035102483515854, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.038035102483515854 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355442, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257017, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257017 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.027678468642144696, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.027678468642144696 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3510204081632653, + "acc_stderr": 0.03055531675557364, + "acc_norm": 0.3510204081632653, + "acc_norm_stderr": 0.03055531675557364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29726205997392435, + "acc_stderr": 0.011673346173086045, + "acc_norm": 0.29726205997392435, + "acc_norm_stderr": 0.011673346173086045 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219374, + "mc2": 0.43479566764760613, + "mc2_stderr": 0.014958184938646393 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4604486422668241, + "acc_stderr": 0.017136487626049846, + "acc_norm": 0.5678866587957497, + "acc_norm_stderr": 0.017031170198851742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "F24/F23-llama2-13B-x1", + "model_sha": "90b8a06c768a8981c6368bcbd0294a9e0f92aa79", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/F24/llama-2-koen-13b-slimOrca/result_2023-12-03 09:10:05.json b/F24/llama-2-koen-13b-slimOrca/result_2023-12-03 09:10:05.json new file mode 100644 index 0000000000000000000000000000000000000000..a5187609220be56ef46a433614e4186f667daaa3 --- /dev/null +++ b/F24/llama-2-koen-13b-slimOrca/result_2023-12-03 09:10:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4061433447098976, + "acc_stderr": 0.014351656690097862, + "acc_norm": 0.46245733788395904, + "acc_norm_stderr": 0.014570144495075581 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4186417048396734, + "acc_stderr": 0.0049232818418285165, + "acc_norm": 0.5636327424815774, + "acc_norm_stderr": 0.004949207947265917 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5466155810983397, + "acc_stderr": 0.01780208713585031, + "acc_norm": 0.5466155810983397, + "acc_norm_stderr": 0.01780208713585031 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370332, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370332 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.0324498084999003, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.0324498084999003 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036544, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036544 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.029996951858349476, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.029996951858349476 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.030656748696739435, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.030656748696739435 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114982, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114982 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101803, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101803 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583302, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583302 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008585, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008585 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6, + "acc_stderr": 0.021004201260420078, + "acc_norm": 0.6, + "acc_norm_stderr": 0.021004201260420078 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762626, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762626 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353604, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353604 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2670391061452514, + "acc_stderr": 0.014796502622562548, + "acc_norm": 0.2670391061452514, + "acc_norm_stderr": 0.014796502622562548 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280055, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280055 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.318122555410691, + "acc_stderr": 0.011895407281104097, + "acc_norm": 0.318122555410691, + "acc_norm_stderr": 0.011895407281104097 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.03843566993588717, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.03843566993588717 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.44254172455320107, + "mc2_stderr": 0.015186819172805456 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44510035419126326, + "acc_stderr": 0.017086417431005474, + "acc_norm": 0.4805194805194805, + "acc_norm_stderr": 0.01717730199234255 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "F24/llama-2-koen-13b-slimOrca", + "model_sha": "74138e08e67f4d1b710286b70399e75a4c03a511", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/F24/llama-2-koen-orca-mini-platypus2-math-13b/result_2023-12-03 09:38:38.json b/F24/llama-2-koen-orca-mini-platypus2-math-13b/result_2023-12-03 09:38:38.json new file mode 100644 index 0000000000000000000000000000000000000000..7e8255f1a21ca871cc7b8c78b69d6e574117ade6 --- /dev/null +++ b/F24/llama-2-koen-orca-mini-platypus2-math-13b/result_2023-12-03 09:38:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.22696245733788395, + "acc_stderr": 0.01224049153613286, + "acc_norm": 0.22696245733788395, + "acc_norm_stderr": 0.01224049153613286 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2504481179047998, + "acc_stderr": 0.004323856300539177, + "acc_norm": 0.2504481179047998, + "acc_norm_stderr": 0.004323856300539177 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 1.0, + "mc1_stderr": 0.0, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.014846044968252247, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.014846044968252247 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "F24/llama-2-koen-orca-mini-platypus2-math-13b", + "model_sha": "b5ee3b5b459be0a3fd99d5050ed0d38653404690", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/FINDA-FIT/llama-2-ko-plain/result_2023-09-30 03:54:00.json b/FINDA-FIT/llama-2-ko-plain/result_2023-09-30 03:54:00.json new file mode 100644 index 0000000000000000000000000000000000000000..90045181537868b66022a7e29b971dde97f688e4 --- /dev/null +++ b/FINDA-FIT/llama-2-ko-plain/result_2023-09-30 03:54:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19539249146757678, + "acc_stderr": 0.011586907189952911, + "acc_norm": 0.2636518771331058, + "acc_norm_stderr": 0.012875929151297047 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2660824536944832, + "acc_stderr": 0.004410047530835032, + "acc_norm": 0.2788289185421231, + "acc_norm_stderr": 0.004475067344626752 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.04541609446503949, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.04541609446503949 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2720306513409962, + "acc_stderr": 0.015913367447500524, + "acc_norm": 0.2720306513409962, + "acc_norm_stderr": 0.015913367447500524 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2, + "acc_stderr": 0.034554737023254366, + "acc_norm": 0.2, + "acc_norm_stderr": 0.034554737023254366 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.028957342788342347, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.028957342788342347 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.034106466140718564, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.034106466140718564 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632945, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632945 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2914798206278027, + "acc_stderr": 0.030500283176545902, + "acc_norm": 0.2914798206278027, + "acc_norm_stderr": 0.030500283176545902 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.0359546161177469, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.0359546161177469 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124505, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124505 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2689655172413793, + "acc_stderr": 0.036951833116502325, + "acc_norm": 0.2689655172413793, + "acc_norm_stderr": 0.036951833116502325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931666, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931666 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31025641025641026, + "acc_stderr": 0.023454674889404285, + "acc_norm": 0.31025641025641026, + "acc_norm_stderr": 0.023454674889404285 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.026226485652553873, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.026226485652553873 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.02619980880756193, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.02619980880756193 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.04172343038705383, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.04172343038705383 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871937, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871937 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.20398009950248755, + "acc_stderr": 0.02849317624532609, + "acc_norm": 0.20398009950248755, + "acc_norm_stderr": 0.02849317624532609 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173044, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173044 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.022894082489925992, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.022894082489925992 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.03324837939758159, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.03324837939758159 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24220183486238533, + "acc_stderr": 0.01836817630659862, + "acc_norm": 0.24220183486238533, + "acc_norm_stderr": 0.01836817630659862 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102148, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102148 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.025457756696667874, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.025457756696667874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3305785123966942, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.037610708698674805, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.037610708698674805 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148594, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148594 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.17857142857142858, + "acc_stderr": 0.036352091215778065, + "acc_norm": 0.17857142857142858, + "acc_norm_stderr": 0.036352091215778065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.033851779760448106, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.033851779760448106 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.01431099954796145, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.01431099954796145 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.028123429335142804, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.028123429335142804 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2320675105485232, + "acc_stderr": 0.02747974455080851, + "acc_norm": 0.2320675105485232, + "acc_norm_stderr": 0.02747974455080851 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23663624511082137, + "acc_stderr": 0.010855137351572742, + "acc_norm": 0.23663624511082137, + "acc_norm_stderr": 0.010855137351572742 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591362, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591362 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.03317505930009179, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.03317505930009179 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842892, + "mc2": 0.5367542106571858, + "mc2_stderr": 0.01635449255335969 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.1487603305785124, + "acc_stderr": 0.012234446131035063, + "acc_norm": 0.3860684769775679, + "acc_norm_stderr": 0.01673813076032174 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "FINDA-FIT/llama-2-ko-plain", + "model_sha": "091fe3550bfa49baaebda838c10825484580f89d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/FINDA-FIT/llama-ko-7b/result_2023-09-29 16:26:20.json b/FINDA-FIT/llama-ko-7b/result_2023-09-29 16:26:20.json new file mode 100644 index 0000000000000000000000000000000000000000..b7551097ac9913a99b9f740e98885dc86abb33b2 --- /dev/null +++ b/FINDA-FIT/llama-ko-7b/result_2023-09-29 16:26:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19795221843003413, + "acc_stderr": 0.011643990971573401, + "acc_norm": 0.26535836177474403, + "acc_norm_stderr": 0.012902554762313962 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2633937462656841, + "acc_stderr": 0.004395739495688583, + "acc_norm": 0.27823142800239, + "acc_norm_stderr": 0.004472121485161932 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.03989139859531771, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.03989139859531771 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.015818450894777552, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.015818450894777552 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.03633384414073463, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.03633384414073463 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.24680851063829787, + "acc_stderr": 0.028185441301234102, + "acc_norm": 0.24680851063829787, + "acc_norm_stderr": 0.028185441301234102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.0355092018568963, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.0355092018568963 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26688102893890675, + "acc_stderr": 0.025122637608816657, + "acc_norm": 0.26688102893890675, + "acc_norm_stderr": 0.025122637608816657 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2556053811659193, + "acc_stderr": 0.029275891003969923, + "acc_norm": 0.2556053811659193, + "acc_norm_stderr": 0.029275891003969923 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.037276735755969195, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.037276735755969195 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.03074630074212451, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.03074630074212451 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931666, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931666 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.30512820512820515, + "acc_stderr": 0.023346335293325887, + "acc_norm": 0.30512820512820515, + "acc_norm_stderr": 0.023346335293325887 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.15, + "acc_stderr": 0.03588702812826371, + "acc_norm": 0.15, + "acc_norm_stderr": 0.03588702812826371 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.032826493853041504, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.032826493853041504 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.026377567028645858, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.026377567028645858 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.025819233256483727, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.025819233256483727 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22641509433962265, + "acc_stderr": 0.025757559893106727, + "acc_norm": 0.22641509433962265, + "acc_norm_stderr": 0.025757559893106727 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721376, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721376 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184408, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184408 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.022894082489925992, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.022894082489925992 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.03324837939758159, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.03324837939758159 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.018125669180861514, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.018125669180861514 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333337, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333337 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.04173349148083497, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.04173349148083497 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.034260594244031654, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.034260594244031654 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.016729937565537537, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.016729937565537537 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290396, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.033851779760448106, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.033851779760448106 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.01440029642922561, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.01440029642922561 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.03018753206032938, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.03018753206032938 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2320675105485232, + "acc_stderr": 0.02747974455080851, + "acc_norm": 0.2320675105485232, + "acc_norm_stderr": 0.02747974455080851 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2301173402868318, + "acc_stderr": 0.010750183177375553, + "acc_norm": 0.2301173402868318, + "acc_norm_stderr": 0.010750183177375553 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.03256866661681102, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.03256866661681102 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219381, + "mc2": 0.538620436654127, + "mc2_stderr": 0.016366108934105512 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.15230224321133412, + "acc_stderr": 0.01235345636132145, + "acc_norm": 0.3742621015348288, + "acc_norm_stderr": 0.016637917789798735 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "FINDA-FIT/llama-ko-7b", + "model_sha": "c1f0b9f20d38c9494e1607bd30ce43da570d9d52", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/FINDA-FIT/llama-m/result_2023-09-30 08:24:55.json b/FINDA-FIT/llama-m/result_2023-09-30 08:24:55.json new file mode 100644 index 0000000000000000000000000000000000000000..5f04ec60250cdeebfb3bb1dfa186a44c9cc3b38c --- /dev/null +++ b/FINDA-FIT/llama-m/result_2023-09-30 08:24:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19539249146757678, + "acc_stderr": 0.01158690718995291, + "acc_norm": 0.2619453924914676, + "acc_norm_stderr": 0.012849054826858112 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2642899820752838, + "acc_stderr": 0.00440053218855021, + "acc_norm": 0.27763393746265685, + "acc_norm_stderr": 0.00446916572860033 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161549, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161549 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2681992337164751, + "acc_stderr": 0.015842430835269438, + "acc_norm": 0.2681992337164751, + "acc_norm_stderr": 0.015842430835269438 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.03502553170678316, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.03502553170678316 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.028504856470514203, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.028504856470514203 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.0357160923005348, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.0357160923005348 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2797427652733119, + "acc_stderr": 0.02549425935069491, + "acc_norm": 0.2797427652733119, + "acc_norm_stderr": 0.02549425935069491 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.23318385650224216, + "acc_stderr": 0.028380391147094716, + "acc_norm": 0.23318385650224216, + "acc_norm_stderr": 0.028380391147094716 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.037276735755969195, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.037276735755969195 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932032, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932032 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3282051282051282, + "acc_stderr": 0.02380763319865727, + "acc_norm": 0.3282051282051282, + "acc_norm_stderr": 0.02380763319865727 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.15, + "acc_stderr": 0.03588702812826371, + "acc_norm": 0.15, + "acc_norm_stderr": 0.03588702812826371 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.026226485652553873, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.026226485652553873 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23018867924528302, + "acc_stderr": 0.025907897122408173, + "acc_norm": 0.23018867924528302, + "acc_norm_stderr": 0.025907897122408173 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724138, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724138 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22388059701492538, + "acc_stderr": 0.0294752502360172, + "acc_norm": 0.22388059701492538, + "acc_norm_stderr": 0.0294752502360172 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173043, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173043 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.022894082489925992, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.022894082489925992 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02438366553103545, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02438366553103545 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.032396370467357015, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.032396370467357015 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24036697247706423, + "acc_stderr": 0.01832060732096407, + "acc_norm": 0.24036697247706423, + "acc_norm_stderr": 0.01832060732096407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.032684540130117436, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.032684540130117436 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2892561983471074, + "acc_stderr": 0.04139112727635464, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.04139112727635464 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2238562091503268, + "acc_stderr": 0.016863008585416617, + "acc_norm": 0.2238562091503268, + "acc_norm_stderr": 0.016863008585416617 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290396, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608043, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553983, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553983 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.02812342933514279, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.02812342933514279 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.22362869198312235, + "acc_stderr": 0.027123298205229972, + "acc_norm": 0.22362869198312235, + "acc_norm_stderr": 0.027123298205229972 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2333767926988266, + "acc_stderr": 0.010803108481179088, + "acc_norm": 0.2333767926988266, + "acc_norm_stderr": 0.010803108481179088 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604243, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604243 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.03287666758603489, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.03287666758603489 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219385, + "mc2": 0.5382255654218452, + "mc2_stderr": 0.01636582464762524 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.1487603305785124, + "acc_stderr": 0.012234446131035059, + "acc_norm": 0.3789846517119244, + "acc_norm_stderr": 0.016679260684229286 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "FINDA-FIT/llama-m", + "model_sha": "7c06c7acb6bd18e1cf52846483e430def93686f2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/FINDA-FIT/llama-p/result_2023-09-30 17:05:38.json b/FINDA-FIT/llama-p/result_2023-09-30 17:05:38.json new file mode 100644 index 0000000000000000000000000000000000000000..41a758a2ce06da144ca0d3d59eb72ea527ebe4ea --- /dev/null +++ b/FINDA-FIT/llama-p/result_2023-09-30 17:05:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3395904436860068, + "acc_stderr": 0.013839039762820169, + "acc_norm": 0.39590443686006827, + "acc_norm_stderr": 0.014291228393536588 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38856801433977295, + "acc_stderr": 0.004864286176731823, + "acc_norm": 0.5073690499900418, + "acc_norm_stderr": 0.004989239462835233 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.037439798259263996, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.037439798259263996 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3946360153256705, + "acc_stderr": 0.017478464305911545, + "acc_norm": 0.3946360153256705, + "acc_norm_stderr": 0.017478464305911545 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102956, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102956 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.03571609230053481, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.03571609230053481 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4115755627009646, + "acc_stderr": 0.027950481494401266, + "acc_norm": 0.4115755627009646, + "acc_norm_stderr": 0.027950481494401266 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.03878352372138621, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.03878352372138621 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.13725490196078433, + "acc_stderr": 0.03424084669891523, + "acc_norm": 0.13725490196078433, + "acc_norm_stderr": 0.03424084669891523 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.33613445378151263, + "acc_stderr": 0.030684737115135367, + "acc_norm": 0.33613445378151263, + "acc_norm_stderr": 0.030684737115135367 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.258974358974359, + "acc_stderr": 0.02221110681006167, + "acc_norm": 0.258974358974359, + "acc_norm_stderr": 0.02221110681006167 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3258064516129032, + "acc_stderr": 0.026662010578567104, + "acc_norm": 0.3258064516129032, + "acc_norm_stderr": 0.026662010578567104 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5512820512820513, + "acc_stderr": 0.032583346493868806, + "acc_norm": 0.5512820512820513, + "acc_norm_stderr": 0.032583346493868806 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35094339622641507, + "acc_stderr": 0.029373646253234686, + "acc_norm": 0.35094339622641507, + "acc_norm_stderr": 0.029373646253234686 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.046737523336702384, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.046737523336702384 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.032578473844367746, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.032578473844367746 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4427860696517413, + "acc_stderr": 0.03512310964123936, + "acc_norm": 0.4427860696517413, + "acc_norm_stderr": 0.03512310964123936 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0356760379963917, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0356760379963917 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3901734104046243, + "acc_stderr": 0.026261677607806642, + "acc_norm": 0.3901734104046243, + "acc_norm_stderr": 0.026261677607806642 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.03731133519673893, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.03731133519673893 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.02716368603827123, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.02716368603827123 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32642487046632124, + "acc_stderr": 0.033840286211432945, + "acc_norm": 0.32642487046632124, + "acc_norm_stderr": 0.033840286211432945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3743119266055046, + "acc_stderr": 0.02074895940898831, + "acc_norm": 0.3743119266055046, + "acc_norm_stderr": 0.02074895940898831 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790604, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790604 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791434, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.045077322787750874, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.045077322787750874 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.019524316744866346, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.019524316744866346 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.030388051301678116, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.030388051301678116 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.02833295951403124, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.02833295951403124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2985658409387223, + "acc_stderr": 0.011688060141794208, + "acc_norm": 0.2985658409387223, + "acc_norm_stderr": 0.011688060141794208 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.0381549430868893, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.0381549430868893 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.38092210327853554, + "mc2_stderr": 0.014881931344043989 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24675324675324675, + "acc_stderr": 0.014822275820015236, + "acc_norm": 0.31286894923258557, + "acc_norm_stderr": 0.015941010118302658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "FINDA-FIT/llama-p", + "model_sha": "e54c345988c60cdafe797a2f15e916801ee4ab7b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/FINDA-FIT/llama-r/result_2023-09-30 09:12:26.json b/FINDA-FIT/llama-r/result_2023-09-30 09:12:26.json new file mode 100644 index 0000000000000000000000000000000000000000..32ef0163e5f33b0723deeea151c8dc11afdb0874 --- /dev/null +++ b/FINDA-FIT/llama-r/result_2023-09-30 09:12:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20136518771331058, + "acc_stderr": 0.011718927477444262, + "acc_norm": 0.2636518771331058, + "acc_norm_stderr": 0.01287592915129705 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2665803624775941, + "acc_stderr": 0.004412674170976469, + "acc_norm": 0.27922724556861184, + "acc_norm_stderr": 0.004477025762200596 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2046783625730994, + "acc_stderr": 0.03094445977853321, + "acc_norm": 0.2046783625730994, + "acc_norm_stderr": 0.03094445977853321 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.04541609446503949, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.04541609446503949 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2656449553001277, + "acc_stderr": 0.01579430248788873, + "acc_norm": 0.2656449553001277, + "acc_norm_stderr": 0.01579430248788873 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.03502553170678316, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.03502553170678316 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.24680851063829787, + "acc_stderr": 0.028185441301234113, + "acc_norm": 0.24680851063829787, + "acc_norm_stderr": 0.028185441301234113 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.034605799075530255, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.034605799075530255 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.02592237178881877, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.02592237178881877 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.23766816143497757, + "acc_stderr": 0.028568079464714267, + "acc_norm": 0.23766816143497757, + "acc_norm_stderr": 0.028568079464714267 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932032, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932032 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727771, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727771 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416542, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416542 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.33613445378151263, + "acc_stderr": 0.03068473711513536, + "acc_norm": 0.33613445378151263, + "acc_norm_stderr": 0.03068473711513536 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3153846153846154, + "acc_stderr": 0.02355964698318994, + "acc_norm": 0.3153846153846154, + "acc_norm_stderr": 0.02355964698318994 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.026377567028645858, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.026377567028645858 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.025819233256483727, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.025819233256483727 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724138, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724138 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712177, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712177 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.19402985074626866, + "acc_stderr": 0.027962677604768893, + "acc_norm": 0.19402985074626866, + "acc_norm_stderr": 0.027962677604768893 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566018, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566018 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071134, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071134 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02438366553103545, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02438366553103545 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2849740932642487, + "acc_stderr": 0.0325771407770966, + "acc_norm": 0.2849740932642487, + "acc_norm_stderr": 0.0325771407770966 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23119266055045873, + "acc_stderr": 0.018075750241633163, + "acc_norm": 0.23119266055045873, + "acc_norm_stderr": 0.018075750241633163 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333337, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333337 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.024954184324879912, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.024954184324879912 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.26973684210526316, + "acc_stderr": 0.036117805602848975, + "acc_norm": 0.26973684210526316, + "acc_norm_stderr": 0.036117805602848975 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22712418300653595, + "acc_stderr": 0.016949853279212373, + "acc_norm": 0.22712418300653595, + "acc_norm_stderr": 0.016949853279212373 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953777, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953777 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.033812000056435254, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.033812000056435254 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.01446589382985993, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.01446589382985993 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.22784810126582278, + "acc_stderr": 0.02730348459906942, + "acc_norm": 0.22784810126582278, + "acc_norm_stderr": 0.02730348459906942 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23728813559322035, + "acc_stderr": 0.010865436690780272, + "acc_norm": 0.23728813559322035, + "acc_norm_stderr": 0.010865436690780272 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139404, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139404 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.01561651849721938, + "mc2": 0.5406294687690661, + "mc2_stderr": 0.016334114258114155 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.14994096812278632, + "acc_stderr": 0.012274378656217328, + "acc_norm": 0.3872491145218418, + "acc_norm_stderr": 0.016747577991642792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "FINDA-FIT/llama-r", + "model_sha": "6bdde9a227da60c2db803024d5b2e3a53a41cf0b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/FINGU-AI/FinguAI-Chat-v1/result_2024-04-01 04:08:18.json b/FINGU-AI/FinguAI-Chat-v1/result_2024-04-01 04:08:18.json new file mode 100644 index 0000000000000000000000000000000000000000..7a9cbb311fce314f75f2a48f8f003bc1e968e0ae --- /dev/null +++ b/FINGU-AI/FinguAI-Chat-v1/result_2024-04-01 04:08:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21245733788395904, + "acc_stderr": 0.011953482906582947, + "acc_norm": 0.25, + "acc_norm_stderr": 0.012653835621466646 + }, + "harness|ko_hellaswag|10": { + "acc": 0.28291177056363276, + "acc_stderr": 0.00449493402546234, + "acc_norm": 0.3088030272854013, + "acc_norm_stderr": 0.004610554974411238 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.030267457554898465, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.030267457554898465 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.04846748253977239, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.04846748253977239 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.210727969348659, + "acc_stderr": 0.014583812465862546, + "acc_norm": 0.210727969348659, + "acc_norm_stderr": 0.014583812465862546 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.035025531706783165, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.035025531706783165 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2297872340425532, + "acc_stderr": 0.02750175294441242, + "acc_norm": 0.2297872340425532, + "acc_norm_stderr": 0.02750175294441242 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.031069390260789406, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.031069390260789406 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2347266881028939, + "acc_stderr": 0.024071805887677048, + "acc_norm": 0.2347266881028939, + "acc_norm_stderr": 0.024071805887677048 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.13004484304932734, + "acc_stderr": 0.02257451942417487, + "acc_norm": 0.13004484304932734, + "acc_norm_stderr": 0.02257451942417487 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062947, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062947 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31092436974789917, + "acc_stderr": 0.030066761582977924, + "acc_norm": 0.31092436974789917, + "acc_norm_stderr": 0.030066761582977924 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.023661296393964273, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.023661296393964273 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.031785297106427496, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.031785297106427496 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.026522709674667765, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.026522709674667765 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3418803418803419, + "acc_stderr": 0.031075028526507762, + "acc_norm": 0.3418803418803419, + "acc_norm_stderr": 0.031075028526507762 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3169811320754717, + "acc_stderr": 0.028637235639800935, + "acc_norm": 0.3169811320754717, + "acc_norm_stderr": 0.028637235639800935 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724137, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724137 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.033450369167889925, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.033450369167889925 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.02193587808118476, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.02193587808118476 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548574, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548574 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.023618678310069374, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.023618678310069374 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.034089978868575295, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.034089978868575295 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916647, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916647 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3119266055045872, + "acc_stderr": 0.01986296797670724, + "acc_norm": 0.3119266055045872, + "acc_norm_stderr": 0.01986296797670724 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.026173908506718576, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.026173908506718576 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163046, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163046 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.037827289808654706, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.037827289808654706 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.017667841612378995, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.017667841612378995 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.025892151156709405, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.025892151156709405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932267, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932267 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03025437257397669, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03025437257397669 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20408163265306123, + "acc_stderr": 0.025801283475090496, + "acc_norm": 0.20408163265306123, + "acc_norm_stderr": 0.025801283475090496 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2911392405063291, + "acc_stderr": 0.029571601065753374, + "acc_norm": 0.2911392405063291, + "acc_norm_stderr": 0.029571601065753374 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25097783572359844, + "acc_stderr": 0.011073730299187217, + "acc_norm": 0.25097783572359844, + "acc_norm_stderr": 0.011073730299187217 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923413, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923413 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.01570210709062789, + "mc2": 0.4684529912207382, + "mc2_stderr": 0.016283298202620658 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2255017709563164, + "acc_stderr": 0.014368122149532174, + "acc_norm": 0.2939787485242031, + "acc_norm_stderr": 0.015663242569091115 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "FINGU-AI/FinguAI-Chat-v1", + "model_sha": "3557829049749742bdb0bfaf23de2d07ecf928f2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/KoSOLAR-10.7B-dpo-v1/result_2024-01-09 05:52:01.json b/GAI-LLM/KoSOLAR-10.7B-dpo-v1/result_2024-01-09 05:52:01.json new file mode 100644 index 0000000000000000000000000000000000000000..cb4efda45d5f0a573ce7d1531be606e8c9ba2d4b --- /dev/null +++ b/GAI-LLM/KoSOLAR-10.7B-dpo-v1/result_2024-01-09 05:52:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.01384746051889298, + "acc_norm": 0.36945392491467577, + "acc_norm_stderr": 0.0141045783664919 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3231428002389962, + "acc_stderr": 0.004667209383690232, + "acc_norm": 0.3798048197570205, + "acc_norm_stderr": 0.004843462545943493 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5938697318007663, + "acc_stderr": 0.017562037406478923, + "acc_norm": 0.5938697318007663, + "acc_norm_stderr": 0.017562037406478923 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.02833327710956279, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.02833327710956279 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.03526552724601199, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.03526552724601199 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954963, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954963 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.0497569851956243, + "acc_norm": 0.57, + "acc_norm_stderr": 0.0497569851956243 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5451612903225806, + "acc_stderr": 0.028327743091561067, + "acc_norm": 0.5451612903225806, + "acc_norm_stderr": 0.028327743091561067 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173092, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173092 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.03077265364207565, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.03077265364207565 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340496, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137602, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5709876543209876, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.5709876543209876, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6476683937823834, + "acc_stderr": 0.034474782864143565, + "acc_norm": 0.6476683937823834, + "acc_norm_stderr": 0.034474782864143565 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336938, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336938 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5834862385321101, + "acc_stderr": 0.02113637650403087, + "acc_norm": 0.5834862385321101, + "acc_norm_stderr": 0.02113637650403087 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749255, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576073, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576073 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401164, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401164 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4078014184397163, + "acc_stderr": 0.02931601177634356, + "acc_norm": 0.4078014184397163, + "acc_norm_stderr": 0.02931601177634356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.033953227263757976, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.033953227263757976 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.34301675977653634, + "acc_stderr": 0.015876912673057738, + "acc_norm": 0.34301675977653634, + "acc_norm_stderr": 0.015876912673057738 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03032024326500413, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03032024326500413 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.030486039389105307, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.030486039389105307 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3578878748370274, + "acc_stderr": 0.012243563850490325, + "acc_norm": 0.3578878748370274, + "acc_norm_stderr": 0.012243563850490325 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.034602283272391725, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.034602283272391725 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398395, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398395 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006516, + "mc2": 0.5157849052136222, + "mc2_stderr": 0.01681907310324583 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.014846044968252252, + "acc_norm": 0.2939787485242031, + "acc_norm_stderr": 0.015663242569091122 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/KoSOLAR-10.7B-dpo-v1", + "model_sha": "2e96d7a3669d63376c7a49d5793d69bdcce52f72", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/KoSOLAR-10.7B-mixed-v13/result_2024-01-09 01:19:12.json b/GAI-LLM/KoSOLAR-10.7B-mixed-v13/result_2024-01-09 01:19:12.json new file mode 100644 index 0000000000000000000000000000000000000000..ef8ca21d636a207569427e08239131c01dffb18a --- /dev/null +++ b/GAI-LLM/KoSOLAR-10.7B-mixed-v13/result_2024-01-09 01:19:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3430034129692833, + "acc_stderr": 0.013872423223718169, + "acc_norm": 0.35494880546075086, + "acc_norm_stderr": 0.013983036904094095 + }, + "harness|ko_hellaswag|10": { + "acc": 0.317167894841665, + "acc_stderr": 0.004644223294727725, + "acc_norm": 0.37402907787293366, + "acc_norm_stderr": 0.0048288229209152295 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6270753512132823, + "acc_stderr": 0.01729286826945393, + "acc_norm": 0.6270753512132823, + "acc_norm_stderr": 0.01729286826945393 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5063829787234042, + "acc_stderr": 0.03268335899936336, + "acc_norm": 0.5063829787234042, + "acc_norm_stderr": 0.03268335899936336 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.028333277109562783, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.028333277109562783 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.648854961832061, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.648854961832061, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03427308652999936, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03427308652999936 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5630252100840336, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.5630252100840336, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.02534267129380724, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.02534267129380724 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5774193548387097, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.5774193548387097, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4021164021164021, + "acc_stderr": 0.025253032554997692, + "acc_norm": 0.4021164021164021, + "acc_norm_stderr": 0.025253032554997692 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.02743162372241501, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.02743162372241501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6580310880829016, + "acc_stderr": 0.03423465100104284, + "acc_norm": 0.6580310880829016, + "acc_norm_stderr": 0.03423465100104284 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6568807339449542, + "acc_stderr": 0.020354777736086037, + "acc_norm": 0.6568807339449542, + "acc_norm_stderr": 0.020354777736086037 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5424836601307189, + "acc_stderr": 0.028526383452142638, + "acc_norm": 0.5424836601307189, + "acc_norm_stderr": 0.028526383452142638 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.01999797303545833, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.01999797303545833 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40070921985815605, + "acc_stderr": 0.02923346574557309, + "acc_norm": 0.40070921985815605, + "acc_norm_stderr": 0.02923346574557309 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.034086558679777494, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.034086558679777494 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961454, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961454 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.48161764705882354, + "acc_stderr": 0.030352303395351964, + "acc_norm": 0.48161764705882354, + "acc_norm_stderr": 0.030352303395351964 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.031717528240626645, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.031717528240626645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7088607594936709, + "acc_stderr": 0.029571601065753374, + "acc_norm": 0.7088607594936709, + "acc_norm_stderr": 0.029571601065753374 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36114732724902215, + "acc_stderr": 0.012267935477519046, + "acc_norm": 0.36114732724902215, + "acc_norm_stderr": 0.012267935477519046 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165633, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165633 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2252141982864137, + "mc1_stderr": 0.01462324076802348, + "mc2": 0.42495215761771127, + "mc2_stderr": 0.016033708445870685 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.23376623376623376, + "acc_stderr": 0.014550782587103121, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.015311853110300354 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/KoSOLAR-10.7B-mixed-v13", + "model_sha": "b71be1d2f6ce126c7cf7b2857fd2411be02368b5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/OPEN-SOLAR-KO-10.7B-dpo-dedup/result_2024-01-31 09:23:30.json b/GAI-LLM/OPEN-SOLAR-KO-10.7B-dpo-dedup/result_2024-01-31 09:23:30.json new file mode 100644 index 0000000000000000000000000000000000000000..904706a3133a4bf6e192f513378af02c8ab1e42b --- /dev/null +++ b/GAI-LLM/OPEN-SOLAR-KO-10.7B-dpo-dedup/result_2024-01-31 09:23:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35580204778157, + "acc_stderr": 0.01399057113791876, + "acc_norm": 0.40102389078498296, + "acc_norm_stderr": 0.014322255790719865 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40659231228838877, + "acc_stderr": 0.004901936511546131, + "acc_norm": 0.5401314479187412, + "acc_norm_stderr": 0.004973683026202176 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5402298850574713, + "acc_stderr": 0.017821994096933535, + "acc_norm": 0.5402298850574713, + "acc_norm_stderr": 0.017821994096933535 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789958, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789958 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.03526552724601198, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.03526552724601198 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942638, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942638 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.047245774057315705, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.047245774057315705 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047732, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047732 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723368, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723368 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.026918645383239004, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.026918645383239004 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5, + "acc_stderr": 0.02782074420373286, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02782074420373286 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6036697247706422, + "acc_stderr": 0.020971469947900525, + "acc_norm": 0.6036697247706422, + "acc_norm_stderr": 0.020971469947900525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.02856869975222588, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.02856869975222588 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775086, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775086 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.04060127035236395, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.04060127035236395 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4199346405228758, + "acc_stderr": 0.01996681117825648, + "acc_norm": 0.4199346405228758, + "acc_norm_stderr": 0.01996681117825648 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.044642857142857116, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.044642857142857116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.01444415780826145, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.01444415780826145 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125474, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125474 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42448979591836733, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.42448979591836733, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.03178471874564729, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.03178471874564729 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3246414602346806, + "acc_stderr": 0.011959089388530023, + "acc_norm": 0.3246414602346806, + "acc_norm_stderr": 0.011959089388530023 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.01609588415538684, + "mc2": 0.46287390999405587, + "mc2_stderr": 0.01568531535775204 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.551357733175915, + "acc_stderr": 0.01709943051472577, + "acc_norm": 0.5796930342384888, + "acc_norm_stderr": 0.01697059828117771 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/OPEN-SOLAR-KO-10.7B-dpo-dedup", + "model_sha": "b2521334feca96155d3399a5a261f4eb80380512", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/OPEN-SOLAR-KO-10.7B-dpo-v1/result_2024-01-25 04:22:42.json b/GAI-LLM/OPEN-SOLAR-KO-10.7B-dpo-v1/result_2024-01-25 04:22:42.json new file mode 100644 index 0000000000000000000000000000000000000000..b8d783351d36dd6e9ab7cba282a6980fe313de5a --- /dev/null +++ b/GAI-LLM/OPEN-SOLAR-KO-10.7B-dpo-v1/result_2024-01-25 04:22:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4658703071672355, + "acc_stderr": 0.014577311315231097, + "acc_norm": 0.5247440273037542, + "acc_norm_stderr": 0.01459348769493774 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4598685520812587, + "acc_stderr": 0.00497368302620218, + "acc_norm": 0.6088428599880502, + "acc_norm_stderr": 0.004870121051762735 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.03722965741385539, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.03722965741385539 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.04689765937278135, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.04689765937278135 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6500638569604087, + "acc_stderr": 0.01705567979715042, + "acc_norm": 0.6500638569604087, + "acc_norm_stderr": 0.01705567979715042 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224467, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224467 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5466237942122186, + "acc_stderr": 0.02827435985489424, + "acc_norm": 0.5466237942122186, + "acc_norm_stderr": 0.02827435985489424 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6414141414141414, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.6414141414141414, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179327, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179327 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126163, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126163 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5096774193548387, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.5096774193548387, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.028286324075564397, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.028286324075564397 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556555, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556555 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176095, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176095 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333334, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333334 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149138, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.02690290045866664, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.02690290045866664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5432098765432098, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.5432098765432098, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373131, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373131 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374767, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374767 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5853211009174312, + "acc_stderr": 0.02112290320860259, + "acc_norm": 0.5853211009174312, + "acc_norm_stderr": 0.02112290320860259 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249034, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249034 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.020109864547181357, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.020109864547181357 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29608938547486036, + "acc_stderr": 0.0152686773176023, + "acc_norm": 0.29608938547486036, + "acc_norm_stderr": 0.0152686773176023 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.0314506860074486, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.0314506860074486 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897637, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897637 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.37576499388004897, + "mc1_stderr": 0.016954584060214307, + "mc2": 0.5536468577288562, + "mc2_stderr": 0.016032830714481056 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5903187721369539, + "acc_stderr": 0.016907568192219478, + "acc_norm": 0.6092089728453365, + "acc_norm_stderr": 0.01677529846510826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/OPEN-SOLAR-KO-10.7B-dpo-v1", + "model_sha": "66f56a656d085c813ad58226a21a3b951df8543c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/OPEN-SOLAR-KO-10.7B-mixed-v15-dedup/result_2024-01-29 04:21:17.json b/GAI-LLM/OPEN-SOLAR-KO-10.7B-mixed-v15-dedup/result_2024-01-29 04:21:17.json new file mode 100644 index 0000000000000000000000000000000000000000..e248d03d6e9b623b76730df176d9c02960d5cf58 --- /dev/null +++ b/GAI-LLM/OPEN-SOLAR-KO-10.7B-mixed-v15-dedup/result_2024-01-29 04:21:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35238907849829354, + "acc_stderr": 0.013960142600598684, + "acc_norm": 0.4121160409556314, + "acc_norm_stderr": 0.0143839153022254 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4012148974307907, + "acc_stderr": 0.004891426533390626, + "acc_norm": 0.5355506871141207, + "acc_norm_stderr": 0.0049771527464786015 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5427841634738186, + "acc_stderr": 0.017814385238534444, + "acc_norm": 0.5427841634738186, + "acc_norm_stderr": 0.017814385238534444 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.03446897738659332, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.03446897738659332 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490385, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490385 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653333, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674064, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674064 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524586, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524586 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.0379401267469703, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.0379401267469703 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.02450877752102842, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.02450877752102842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008746, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008746 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.035260770955482405, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.035260770955482405 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.02070745816435298, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.02070745816435298 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138293, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.019944914136873583, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.019944914136873583 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650147, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697623, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697623 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.032847388576472056, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.032847388576472056 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596452, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596452 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.031376240725616185, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.031376240725616185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33572359843546284, + "acc_stderr": 0.012061304157664618, + "acc_norm": 0.33572359843546284, + "acc_norm_stderr": 0.012061304157664618 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070264, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070264 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283338, + "mc2": 0.40795074977785445, + "mc2_stderr": 0.014747748547063478 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5608028335301063, + "acc_stderr": 0.017062775744780705, + "acc_norm": 0.6210153482880756, + "acc_norm_stderr": 0.016679260684229293 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/OPEN-SOLAR-KO-10.7B-mixed-v15-dedup", + "model_sha": "108053f88e4632dabb22ba6313f0441deff840d0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/OPEN-SOLAR-KO-10.7B-mixed-v15/result_2024-01-23 07:48:01.json b/GAI-LLM/OPEN-SOLAR-KO-10.7B-mixed-v15/result_2024-01-23 07:48:01.json new file mode 100644 index 0000000000000000000000000000000000000000..df8de18d4d29f0d9adbb9a4b85e7e1efad611f76 --- /dev/null +++ b/GAI-LLM/OPEN-SOLAR-KO-10.7B-mixed-v15/result_2024-01-23 07:48:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4539249146757679, + "acc_stderr": 0.014549221105171865, + "acc_norm": 0.5085324232081911, + "acc_norm_stderr": 0.014609263165632182 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4373630750846445, + "acc_stderr": 0.004950472918523313, + "acc_norm": 0.5981876120294762, + "acc_norm_stderr": 0.004892624490937216 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6549707602339181, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.6549707602339181, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6819923371647509, + "acc_stderr": 0.01665348627561539, + "acc_norm": 0.6819923371647509, + "acc_norm_stderr": 0.01665348627561539 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6767676767676768, + "acc_stderr": 0.03332299921070645, + "acc_norm": 0.6767676767676768, + "acc_norm_stderr": 0.03332299921070645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752173, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752173 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5102564102564102, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.5102564102564102, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145631, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145631 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04712821257426769, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04712821257426769 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5741935483870968, + "acc_stderr": 0.02812911270916591, + "acc_norm": 0.5741935483870968, + "acc_norm_stderr": 0.02812911270916591 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922754, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922754 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.03074634997572347, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.03074634997572347 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.029318203645206865, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.029318203645206865 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.02422996529842507, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.02422996529842507 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5679012345679012, + "acc_stderr": 0.027563010971606676, + "acc_norm": 0.5679012345679012, + "acc_norm_stderr": 0.027563010971606676 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.689119170984456, + "acc_stderr": 0.03340361906276586, + "acc_norm": 0.689119170984456, + "acc_norm_stderr": 0.03340361906276586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.020707458164352984, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.020707458164352984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.0201546857125909, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.0201546857125909 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.015024083883322872, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.015024083883322872 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877753, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877753 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6122448979591837, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.6122448979591837, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6962025316455697, + "acc_stderr": 0.02993669638713861, + "acc_norm": 0.6962025316455697, + "acc_norm_stderr": 0.02993669638713861 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34159061277705344, + "acc_stderr": 0.012112391320842854, + "acc_norm": 0.34159061277705344, + "acc_norm_stderr": 0.012112391320842854 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.03393388584958404, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.03393388584958404 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165634, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165634 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.015945068581236614, + "mc2": 0.46102353979455557, + "mc2_stderr": 0.01511169348606206 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5974025974025974, + "acc_stderr": 0.016861020486407773, + "acc_norm": 0.6375442739079102, + "acc_norm_stderr": 0.016527131240453692 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/OPEN-SOLAR-KO-10.7B-mixed-v15", + "model_sha": "da2d5692181ad63e0cd1ae2eb4bcd3860e183a45", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/Yi-Ko-6B-dpo-v3/result_2023-12-19 14:26:44.json b/GAI-LLM/Yi-Ko-6B-dpo-v3/result_2023-12-19 14:26:44.json new file mode 100644 index 0000000000000000000000000000000000000000..8ee76ff7ee2bd1d93a44b06aa94a4551f72ac4d8 --- /dev/null +++ b/GAI-LLM/Yi-Ko-6B-dpo-v3/result_2023-12-19 14:26:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3677474402730375, + "acc_stderr": 0.014090995618168485, + "acc_norm": 0.4351535836177474, + "acc_norm_stderr": 0.014487986197186052 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3979286994622585, + "acc_stderr": 0.004884702412456099, + "acc_norm": 0.5377414857598088, + "acc_norm_stderr": 0.004975546018950673 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5466155810983397, + "acc_stderr": 0.0178020871358503, + "acc_norm": 0.5466155810983397, + "acc_norm_stderr": 0.0178020871358503 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6414141414141414, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.6414141414141414, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933914, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933914 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5193548387096775, + "acc_stderr": 0.02842268740431211, + "acc_norm": 0.5193548387096775, + "acc_norm_stderr": 0.02842268740431211 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.0275285992103405, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.0275285992103405 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255168, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255168 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523867, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523867 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5963302752293578, + "acc_stderr": 0.02103570485657497, + "acc_norm": 0.5963302752293578, + "acc_norm_stderr": 0.02103570485657497 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.02862930519400355, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.02862930519400355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.019706875804085627, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.019706875804085627 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650147, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.030388051301678116, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.030388051301678116 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28938547486033517, + "acc_stderr": 0.015166544550490305, + "acc_norm": 0.28938547486033517, + "acc_norm_stderr": 0.015166544550490305 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714854, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714854 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.011971507294982775, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.011971507294982775 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.034956245220154766, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.034956245220154766 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.45135776407373196, + "mc2_stderr": 0.015142660341000317 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5655253837072018, + "acc_stderr": 0.017042098620824925, + "acc_norm": 0.6257378984651711, + "acc_norm_stderr": 0.016637917789798742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/Yi-Ko-6B-dpo-v3", + "model_sha": "9abe61ce6ef0fcdc77e2b2d87bf85b9c83dda19d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/Yi-Ko-6B-dpo-v4/result_2023-12-22 00:26:06.json b/GAI-LLM/Yi-Ko-6B-dpo-v4/result_2023-12-22 00:26:06.json new file mode 100644 index 0000000000000000000000000000000000000000..c41d72824b9fe717e2a6bac44a65e78bb497bb83 --- /dev/null +++ b/GAI-LLM/Yi-Ko-6B-dpo-v4/result_2023-12-22 00:26:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.01415063143511173, + "acc_norm": 0.4351535836177474, + "acc_norm_stderr": 0.014487986197186052 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4004182433778132, + "acc_stderr": 0.0048898174897396935, + "acc_norm": 0.5360485958972316, + "acc_norm_stderr": 0.004976796060456437 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5568326947637292, + "acc_stderr": 0.01776408503534841, + "acc_norm": 0.5568326947637292, + "acc_norm_stderr": 0.01776408503534841 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101737, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101737 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079023, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.028438677998909558, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.028438677998909558 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431183, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431183 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524582, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524582 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268815, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268815 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.616580310880829, + "acc_stderr": 0.03508984236295342, + "acc_norm": 0.616580310880829, + "acc_norm_stderr": 0.03508984236295342 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6201834862385321, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.6201834862385321, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.02862930519400355, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.02862930519400355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.01979448890002411, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.01979448890002411 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611324, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611324 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802747, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802747 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2905027932960894, + "acc_stderr": 0.01518384430720616, + "acc_norm": 0.2905027932960894, + "acc_norm_stderr": 0.01518384430720616 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0301619119307671, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0301619119307671 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301843, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301843 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31877444589308995, + "acc_stderr": 0.011901895635786084, + "acc_norm": 0.31877444589308995, + "acc_norm_stderr": 0.011901895635786084 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03888176921674101, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03888176921674101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.4453706082481084, + "mc2_stderr": 0.015180178951498797 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5926800472255017, + "acc_stderr": 0.01689245669519127, + "acc_norm": 0.6304604486422668, + "acc_norm_stderr": 0.016594883405685438 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/Yi-Ko-6B-dpo-v4", + "model_sha": "88d9402eaf923e49b72f09ecefca91705e3e3d01", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/Yi-Ko-6B-dpo-v5/result_2024-01-03 06:28:06.json b/GAI-LLM/Yi-Ko-6B-dpo-v5/result_2024-01-03 06:28:06.json new file mode 100644 index 0000000000000000000000000000000000000000..7cbc9a7320be4e22ca9da5ad62cc0a745edea381 --- /dev/null +++ b/GAI-LLM/Yi-Ko-6B-dpo-v5/result_2024-01-03 06:28:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.363481228668942, + "acc_stderr": 0.014056207319068285, + "acc_norm": 0.4249146757679181, + "acc_norm_stderr": 0.014445698968520769 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4086835291774547, + "acc_stderr": 0.004905859114942305, + "acc_norm": 0.5401314479187412, + "acc_norm_stderr": 0.004973683026202173 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5427841634738186, + "acc_stderr": 0.017814385238534444, + "acc_norm": 0.5427841634738186, + "acc_norm_stderr": 0.017814385238534444 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111305, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111305 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.034373055019806184, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.034373055019806184 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48717948717948717, + "acc_stderr": 0.025342671293807264, + "acc_norm": 0.48717948717948717, + "acc_norm_stderr": 0.025342671293807264 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653315, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173092, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173092 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.030767394707808093, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.030767394707808093 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.024026846392873502, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.024026846392873502 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.02780165621232366, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.02780165621232366 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5963302752293578, + "acc_stderr": 0.02103570485657497, + "acc_norm": 0.5963302752293578, + "acc_norm_stderr": 0.02103570485657497 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225864, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225864 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467761, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467761 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03246887243637649, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03246887243637649 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409167, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409167 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933105, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3011734028683181, + "acc_stderr": 0.011717148751648422, + "acc_norm": 0.3011734028683181, + "acc_norm_stderr": 0.011717148751648422 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3598531211750306, + "mc1_stderr": 0.016801860466677133, + "mc2": 0.5367536588831076, + "mc2_stderr": 0.015755121127564543 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5041322314049587, + "acc_stderr": 0.01718976703213081, + "acc_norm": 0.5218417945690673, + "acc_norm_stderr": 0.017173944474294392 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/Yi-Ko-6B-dpo-v5", + "model_sha": "116ebe4d8f2dffe6ce28f1a230033e5882d92217", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/Yi-Ko-6B-dpo-v6/result_2024-01-22 04:31:26.json b/GAI-LLM/Yi-Ko-6B-dpo-v6/result_2024-01-22 04:31:26.json new file mode 100644 index 0000000000000000000000000000000000000000..caf10c87a3f00404f64eaf655748157ece6b5ecb --- /dev/null +++ b/GAI-LLM/Yi-Ko-6B-dpo-v6/result_2024-01-22 04:31:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3924914675767918, + "acc_stderr": 0.014269634635670736, + "acc_norm": 0.4522184300341297, + "acc_norm_stderr": 0.014544519880633825 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42929695279824737, + "acc_stderr": 0.004939642460172577, + "acc_norm": 0.5702051384186417, + "acc_norm_stderr": 0.004940349676769334 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.42718446601941745, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.42718446601941745, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5300127713920817, + "acc_stderr": 0.01784772308664908, + "acc_norm": 0.5300127713920817, + "acc_norm_stderr": 0.01784772308664908 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102308, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102308 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438804, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438804 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983052, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073824, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073824 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.035319879302087305, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.035319879302087305 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261117, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261117 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.026830805998952236, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.026830805998952236 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668767, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.042663394431593955, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.042663394431593955 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.544954128440367, + "acc_stderr": 0.021350503090925163, + "acc_norm": 0.544954128440367, + "acc_norm_stderr": 0.021350503090925163 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.045190820213197716, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.045190820213197716 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849725, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.019524316744866342, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.019524316744866342 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042408, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042408 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714854, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714854 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3272490221642764, + "acc_stderr": 0.011983819806464761, + "acc_norm": 0.3272490221642764, + "acc_norm_stderr": 0.011983819806464761 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606787, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606787 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4173806609547124, + "mc1_stderr": 0.01726289106327218, + "mc2": 0.5982804891371111, + "mc2_stderr": 0.0156129830819337 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.01718286443499856, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.017077254131556224 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/Yi-Ko-6B-dpo-v6", + "model_sha": "9217c8bae09e5d789c15d96073a19d97b038ae4d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/Yi-Ko-6B-mixed-v11/result_2023-12-21 05:30:07.json b/GAI-LLM/Yi-Ko-6B-mixed-v11/result_2023-12-21 05:30:07.json new file mode 100644 index 0000000000000000000000000000000000000000..e95f945c3f4019d95b6f4b565d1d4e4a965d5fdf --- /dev/null +++ b/GAI-LLM/Yi-Ko-6B-mixed-v11/result_2023-12-21 05:30:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3575085324232082, + "acc_stderr": 0.014005494275916573, + "acc_norm": 0.4206484641638225, + "acc_norm_stderr": 0.014426211252508401 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3994224258115913, + "acc_stderr": 0.004887787255353492, + "acc_norm": 0.5336586337382991, + "acc_norm_stderr": 0.004978462690966916 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5504469987228607, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.5504469987228607, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5025641025641026, + "acc_stderr": 0.025350672979412188, + "acc_norm": 0.5025641025641026, + "acc_norm_stderr": 0.025350672979412188 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5096774193548387, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.5096774193548387, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948496, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.024594975128920938, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.024594975128920938 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.634862385321101, + "acc_stderr": 0.020642801454383998, + "acc_norm": 0.634862385321101, + "acc_norm_stderr": 0.020642801454383998 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147125, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147125 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.02862441255016795, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.02862441255016795 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650144, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650144 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787317, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787317 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.03198761546763126, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.03198761546763126 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.03121956944530185, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.03121956944530185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32529335071707954, + "acc_stderr": 0.01196531153657153, + "acc_norm": 0.32529335071707954, + "acc_norm_stderr": 0.01196531153657153 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.038783721137112745, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.038783721137112745 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557956, + "mc2": 0.4121524749596002, + "mc2_stderr": 0.014822327944942062 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5785123966942148, + "acc_stderr": 0.016977101932601518, + "acc_norm": 0.6269185360094451, + "acc_norm_stderr": 0.016627318275137443 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/Yi-Ko-6B-mixed-v11", + "model_sha": "ca827b1389e67a8b8d8581d8f6f3accd4866ba11", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/Yi-Ko-6B-mixed-v13/result_2024-01-02 07:08:09.json b/GAI-LLM/Yi-Ko-6B-mixed-v13/result_2024-01-02 07:08:09.json new file mode 100644 index 0000000000000000000000000000000000000000..10c1e93ed84f6261aa3c685384e2bfb44566b4eb --- /dev/null +++ b/GAI-LLM/Yi-Ko-6B-mixed-v13/result_2024-01-02 07:08:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35665529010238906, + "acc_stderr": 0.013998056902620192, + "acc_norm": 0.42918088737201365, + "acc_norm_stderr": 0.014464085894870655 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39892451702848036, + "acc_stderr": 0.004886764243204055, + "acc_norm": 0.5349531965743876, + "acc_norm_stderr": 0.004977574188421318 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.558109833971903, + "acc_stderr": 0.017758800534214414, + "acc_norm": 0.558109833971903, + "acc_norm_stderr": 0.017758800534214414 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079021, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079021 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.034273086529999365, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.034273086529999365 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5025641025641026, + "acc_stderr": 0.025350672979412188, + "acc_norm": 0.5025641025641026, + "acc_norm_stderr": 0.025350672979412188 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5225806451612903, + "acc_stderr": 0.02841498501970786, + "acc_norm": 0.5225806451612903, + "acc_norm_stderr": 0.02841498501970786 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02831753349606648, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02831753349606648 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.039580272311215706, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.039580272311215706 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983056, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860807, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6146788990825688, + "acc_stderr": 0.020865850852794104, + "acc_norm": 0.6146788990825688, + "acc_norm_stderr": 0.020865850852794104 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061173, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061173 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963768, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963768 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787317, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787317 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877746, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33572359843546284, + "acc_stderr": 0.012061304157664618, + "acc_norm": 0.33572359843546284, + "acc_norm_stderr": 0.012061304157664618 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834555, + "mc2": 0.417505165464333, + "mc2_stderr": 0.014838211675706857 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5655253837072018, + "acc_stderr": 0.017042098620824928, + "acc_norm": 0.6056670602125147, + "acc_norm_stderr": 0.01680209067489322 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/Yi-Ko-6B-mixed-v13", + "model_sha": "226493d5e98b136d519ed5d3052e8aff77ceef1e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/Yi-Ko-6B-mixed-v15/result_2024-01-18 12:26:36.json b/GAI-LLM/Yi-Ko-6B-mixed-v15/result_2024-01-18 12:26:36.json new file mode 100644 index 0000000000000000000000000000000000000000..3cbeb1142a9dfdb44b2c9bfe7cb8972a3199d79f --- /dev/null +++ b/GAI-LLM/Yi-Ko-6B-mixed-v15/result_2024-01-18 12:26:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3515358361774744, + "acc_stderr": 0.01395241369960094, + "acc_norm": 0.4300341296928328, + "acc_norm_stderr": 0.014467631559137994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3988249352718582, + "acc_stderr": 0.004886559008754987, + "acc_norm": 0.5341565425214101, + "acc_norm_stderr": 0.0049781249457598545 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5593869731800766, + "acc_stderr": 0.01775339697390849, + "acc_norm": 0.5593869731800766, + "acc_norm_stderr": 0.01775339697390849 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.04319223625811331, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.04319223625811331 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611549, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611549 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.028386198084177687, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.028386198084177687 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.03446897738659332, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.03446897738659332 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4948717948717949, + "acc_stderr": 0.025349672906838667, + "acc_norm": 0.4948717948717949, + "acc_norm_stderr": 0.025349672906838667 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406795, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406795 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5032258064516129, + "acc_stderr": 0.02844341422643831, + "acc_norm": 0.5032258064516129, + "acc_norm_stderr": 0.02844341422643831 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.02961432369045665, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.02961432369045665 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.030770900763851316, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.030770900763851316 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066475, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066475 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655812, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655812 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.026864624366756653, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.026864624366756653 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5339506172839507, + "acc_stderr": 0.027756535257347666, + "acc_norm": 0.5339506172839507, + "acc_norm_stderr": 0.027756535257347666 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6110091743119266, + "acc_stderr": 0.020902300887392863, + "acc_norm": 0.6110091743119266, + "acc_norm_stderr": 0.020902300887392863 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449848, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449848 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.031376240725616185, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.031376240725616185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.011989936640666535, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.011989936640666535 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.42627781984164365, + "mc2_stderr": 0.014874668181902183 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.551357733175915, + "acc_stderr": 0.017099430514725775, + "acc_norm": 0.5926800472255017, + "acc_norm_stderr": 0.01689245669519127 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/Yi-Ko-6B-mixed-v15", + "model_sha": "f0ea374c8583608a9cb1e9c08e3b6951a64b14b6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/Yi-Ko-6B-smash-dpo/result_2023-12-29 06:56:37.json b/GAI-LLM/Yi-Ko-6B-smash-dpo/result_2023-12-29 06:56:37.json new file mode 100644 index 0000000000000000000000000000000000000000..c06377ce0e9b6f8ad87a837604e7af83b27a2f7a --- /dev/null +++ b/GAI-LLM/Yi-Ko-6B-smash-dpo/result_2023-12-29 06:56:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36689419795221845, + "acc_stderr": 0.014084133118104296, + "acc_norm": 0.41638225255972694, + "acc_norm_stderr": 0.014405618279436172 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4124676359290978, + "acc_stderr": 0.004912723848944785, + "acc_norm": 0.5473013343955387, + "acc_norm_stderr": 0.004967402792744853 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5376756066411239, + "acc_stderr": 0.017829131764287187, + "acc_norm": 0.5376756066411239, + "acc_norm_stderr": 0.017829131764287187 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480864, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480864 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816503, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.036807836907275814, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.036807836907275814 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.034648816750163375, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.034648816750163375 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.025158266016868547, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.025158266016868547 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.030767394707808093, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.030767394707808093 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114982, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114982 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.03468343295111126, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.03468343295111126 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283649, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283649 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342658, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342658 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637793, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637793 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.03889066619112722, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.03889066619112722 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.035339990940656964, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.035339990940656964 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6018348623853211, + "acc_stderr": 0.020987989422654257, + "acc_norm": 0.6018348623853211, + "acc_norm_stderr": 0.020987989422654257 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.02858034106513829, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.02858034106513829 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024113, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024113 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320196, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320196 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.040073418097558065, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.040073418097558065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.032149521478027486, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.032149521478027486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409167, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409167 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483924, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.031680911612338825, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.031680911612338825 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.031376240725616185, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.031376240725616185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31029986962190353, + "acc_stderr": 0.011815439293469832, + "acc_norm": 0.31029986962190353, + "acc_norm_stderr": 0.011815439293469832 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35495716034271724, + "mc1_stderr": 0.0167508623813759, + "mc2": 0.5208407477265208, + "mc2_stderr": 0.015859200141974766 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5017709563164109, + "acc_stderr": 0.017190246276231853, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.017175671279836446 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/Yi-Ko-6B-smash-dpo", + "model_sha": "a28f3cc3bcb5b30bdfbac46949399f3119a3ffd0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/Yi-Ko-6B-smash/result_2023-12-28 04:54:01.json b/GAI-LLM/Yi-Ko-6B-smash/result_2023-12-28 04:54:01.json new file mode 100644 index 0000000000000000000000000000000000000000..d19bf0d63bc74d22247ab327cfa07f6ba69d36b9 --- /dev/null +++ b/GAI-LLM/Yi-Ko-6B-smash/result_2023-12-28 04:54:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34812286689419797, + "acc_stderr": 0.013921008595179344, + "acc_norm": 0.4138225255972696, + "acc_norm_stderr": 0.014392730009221009 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3966341366261701, + "acc_stderr": 0.004881990487628915, + "acc_norm": 0.534654451304521, + "acc_norm_stderr": 0.004977782217582457 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5261813537675607, + "acc_stderr": 0.017855434554041986, + "acc_norm": 0.5261813537675607, + "acc_norm_stderr": 0.017855434554041986 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.034273086529999365, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.034273086529999365 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649038, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649038 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.532258064516129, + "acc_stderr": 0.02838474778881334, + "acc_norm": 0.532258064516129, + "acc_norm_stderr": 0.02838474778881334 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.02920254015343118, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.02920254015343118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911498, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911498 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.024419234966819064, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.024419234966819064 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723368, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723368 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873632, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873632 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6275229357798165, + "acc_stderr": 0.020728368457638494, + "acc_norm": 0.6275229357798165, + "acc_norm_stderr": 0.020728368457638494 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576073, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576073 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.019944914136873583, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.019944914136873583 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0286638201471995, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0286638201471995 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.0302114796091216, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.0302114796091216 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.031987615467631264, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.031987615467631264 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.012014142101842974, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.012014142101842974 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557956, + "mc2": 0.41345739770630174, + "mc2_stderr": 0.014785029688685922 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5714285714285714, + "acc_stderr": 0.01701403811929749, + "acc_norm": 0.6186540731995277, + "acc_norm_stderr": 0.016699301768828084 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/Yi-Ko-6B-smash", + "model_sha": "8b0f29ce0c792414e986f2c8b1fe59d68cb874cd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/Yi-Ko-6B_mixed_v10/result_2023-12-19 13:50:11.json b/GAI-LLM/Yi-Ko-6B_mixed_v10/result_2023-12-19 13:50:11.json new file mode 100644 index 0000000000000000000000000000000000000000..de2069446a9bf3f77f7f20adf473f0f9fad47ec7 --- /dev/null +++ b/GAI-LLM/Yi-Ko-6B_mixed_v10/result_2023-12-19 13:50:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35494880546075086, + "acc_stderr": 0.013983036904094089, + "acc_norm": 0.4308873720136519, + "acc_norm_stderr": 0.014471133392642475 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39852619000199163, + "acc_stderr": 0.004885942040894558, + "acc_norm": 0.5380402310296754, + "acc_norm_stderr": 0.004975319435777093 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.558109833971903, + "acc_stderr": 0.017758800534214414, + "acc_norm": 0.558109833971903, + "acc_norm_stderr": 0.017758800534214414 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.03240038086792747, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.03240038086792747 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.034373055019806184, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.034373055019806184 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49743589743589745, + "acc_stderr": 0.025350672979412202, + "acc_norm": 0.49743589743589745, + "acc_norm_stderr": 0.025350672979412202 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.532258064516129, + "acc_stderr": 0.028384747788813336, + "acc_norm": 0.532258064516129, + "acc_norm_stderr": 0.028384747788813336 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.02860595370200425, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.02860595370200425 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425082, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425082 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5246913580246914, + "acc_stderr": 0.027786800931427443, + "acc_norm": 0.5246913580246914, + "acc_norm_stderr": 0.027786800931427443 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.618348623853211, + "acc_stderr": 0.020828148517022603, + "acc_norm": 0.618348623853211, + "acc_norm_stderr": 0.020828148517022603 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089775, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089775 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.019835176484375383, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.019835176484375383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03141554629402543, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03141554629402543 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.031680911612338825, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.031680911612338825 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301847, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301847 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.011977676704715997, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.011977676704715997 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087298, + "mc2": 0.41566543261161853, + "mc2_stderr": 0.014814793696578963 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5631641086186541, + "acc_stderr": 0.01705263355985607, + "acc_norm": 0.5914994096812278, + "acc_norm_stderr": 0.016900062879427122 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/Yi-Ko-6B_mixed_v10", + "model_sha": "5914204925b61986dfc048dcb78ccf5dc88d013e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/ko-en-llama2-13b-mixed-v10/result_2023-11-27 05:26:03.json b/GAI-LLM/ko-en-llama2-13b-mixed-v10/result_2023-11-27 05:26:03.json new file mode 100644 index 0000000000000000000000000000000000000000..26b4f2ff5eeaf41588d269cb2e52b1f49365b8d1 --- /dev/null +++ b/GAI-LLM/ko-en-llama2-13b-mixed-v10/result_2023-11-27 05:26:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910467, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4208325034853615, + "acc_stderr": 0.004926837572202161, + "acc_norm": 0.5668193586934873, + "acc_norm_stderr": 0.0049450236570322765 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.01786209177850786, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.01786209177850786 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938152, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938152 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502734, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736413, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736413 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.02293097307163335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.02293097307163335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.0387410285981808, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.0387410285981808 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422704, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422704 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.02141822475426464, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.02141822475426464 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412232, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412232 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.02896370257079105, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.02896370257079105 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2940026075619296, + "acc_stderr": 0.011636062953698605, + "acc_norm": 0.2940026075619296, + "acc_norm_stderr": 0.011636062953698605 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.01557284045287583, + "mc2": 0.4226153874486002, + "mc2_stderr": 0.014807061474258669 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5324675324675324, + "acc_stderr": 0.017154073716682868, + "acc_norm": 0.6174734356552538, + "acc_norm_stderr": 0.016709165387228834 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/ko-en-llama2-13b-mixed-v10", + "model_sha": "e7ea3b47dd5fd7b9ce6573b18f9f5801f772017d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/ko-en-llama2-13b-mixed-v3/result_2023-10-23 00:18:31.json b/GAI-LLM/ko-en-llama2-13b-mixed-v3/result_2023-10-23 00:18:31.json new file mode 100644 index 0000000000000000000000000000000000000000..ad4b7ab1471e58276331b390c2b7148ea94dbe05 --- /dev/null +++ b/GAI-LLM/ko-en-llama2-13b-mixed-v3/result_2023-10-23 00:18:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3728668941979522, + "acc_stderr": 0.01413117676013117, + "acc_norm": 0.42406143344709896, + "acc_norm_stderr": 0.014441889627464394 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40689105755825533, + "acc_stderr": 0.004902502514738606, + "acc_norm": 0.5433180641306513, + "acc_norm_stderr": 0.004971019942726589 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.01787469866749135, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.01787469866749135 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.030363582197238167, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.030363582197238167 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840678, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840678 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.02432173848460237, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.02432173848460237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561053, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561053 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5683760683760684, + "acc_stderr": 0.0324483553531149, + "acc_norm": 0.5683760683760684, + "acc_norm_stderr": 0.0324483553531149 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463084, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463084 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.02656417811142262, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.02656417811142262 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.03731133519673893, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.03731133519673893 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347666, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347666 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44954128440366975, + "acc_stderr": 0.021327881417823363, + "acc_norm": 0.44954128440366975, + "acc_norm_stderr": 0.021327881417823363 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604675, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604675 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283693, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283693 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.018771683893528176, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.018771683893528176 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456053, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456053 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4810126582278481, + "acc_stderr": 0.03252375148090448, + "acc_norm": 0.4810126582278481, + "acc_norm_stderr": 0.03252375148090448 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29726205997392435, + "acc_stderr": 0.011673346173086034, + "acc_norm": 0.29726205997392435, + "acc_norm_stderr": 0.011673346173086034 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283347, + "mc2": 0.41687077666896594, + "mc2_stderr": 0.014804732810744745 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4510035419126328, + "acc_stderr": 0.017107618859549353, + "acc_norm": 0.538370720188902, + "acc_norm_stderr": 0.01713966022184555 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/ko-en-llama2-13b-mixed-v3", + "model_sha": "c3e43fecfbbd3adc1ea335de10e23b90452cf081", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/ko-en-llama2-13b-mixed-v4/result_2023-10-26 04:39:06.json b/GAI-LLM/ko-en-llama2-13b-mixed-v4/result_2023-10-26 04:39:06.json new file mode 100644 index 0000000000000000000000000000000000000000..ce04ab8ad7d5ee28a818114c36f569b880c92bb9 --- /dev/null +++ b/GAI-LLM/ko-en-llama2-13b-mixed-v4/result_2023-10-26 04:39:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37627986348122866, + "acc_stderr": 0.014157022555407166, + "acc_norm": 0.4300341296928328, + "acc_norm_stderr": 0.014467631559137998 + }, + "harness|ko_hellaswag|10": { + "acc": 0.407787293367855, + "acc_stderr": 0.004904189257891276, + "acc_norm": 0.5450109539932284, + "acc_norm_stderr": 0.004969521827957934 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5019157088122606, + "acc_stderr": 0.017879832259026677, + "acc_norm": 0.5019157088122606, + "acc_norm_stderr": 0.017879832259026677 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134986, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134986 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179327, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179327 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.024433016466052445, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.024433016466052445 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561056, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561056 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5811965811965812, + "acc_stderr": 0.03232128912157792, + "acc_norm": 0.5811965811965812, + "acc_norm_stderr": 0.03232128912157792 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.03058805297427065, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.03058805297427065 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844072, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159664, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655795, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655795 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.037466683254700206, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.037466683254700206 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995096, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.02803609227389177, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.02803609227389177 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.315359477124183, + "acc_stderr": 0.018798086284886887, + "acc_norm": 0.315359477124183, + "acc_norm_stderr": 0.018798086284886887 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.027374128882631157, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.027374128882631157 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.030998666304560534, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.030998666304560534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.025187786660227276, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.025187786660227276 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.03254462010767859, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.03254462010767859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271824, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271824 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.01529807750948508, + "mc2": 0.42398241596571024, + "mc2_stderr": 0.014807345195706319 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.017175671279836442, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.01705775370216029 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/ko-en-llama2-13b-mixed-v4", + "model_sha": "2d3a564cd23d0e97bb0f3354a148ef57e313661a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/ko-en-llama2-13b-mixed-v5/result_2023-10-28 07:24:05.json b/GAI-LLM/ko-en-llama2-13b-mixed-v5/result_2023-10-28 07:24:05.json new file mode 100644 index 0000000000000000000000000000000000000000..99ec9e0086391dbc59d9412fa2d1c2820d3c4126 --- /dev/null +++ b/GAI-LLM/ko-en-llama2-13b-mixed-v5/result_2023-10-28 07:24:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.014150631435111726, + "acc_norm": 0.431740614334471, + "acc_norm_stderr": 0.014474591427196202 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4082852021509659, + "acc_stderr": 0.00490511903984946, + "acc_norm": 0.5455088627763394, + "acc_norm_stderr": 0.004969070188763755 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5019157088122606, + "acc_stderr": 0.017879832259026677, + "acc_norm": 0.5019157088122606, + "acc_norm_stderr": 0.017879832259026677 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0314108219759624, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0314108219759624 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.02832032583010591, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.02832032583010591 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134986, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134986 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413925, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413925 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3717948717948718, + "acc_stderr": 0.024503472557110956, + "acc_norm": 0.3717948717948718, + "acc_norm_stderr": 0.024503472557110956 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.02834378725054064, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.02834378725054064 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5726495726495726, + "acc_stderr": 0.032408473935163266, + "acc_norm": 0.5726495726495726, + "acc_norm_stderr": 0.032408473935163266 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.0365634365335316, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.0365634365335316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101806, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101806 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.430635838150289, + "acc_stderr": 0.026658800273672376, + "acc_norm": 0.430635838150289, + "acc_norm_stderr": 0.026658800273672376 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.03731133519673893, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.03731133519673893 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656206, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656206 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44587155963302755, + "acc_stderr": 0.02131133500970858, + "acc_norm": 0.44587155963302755, + "acc_norm_stderr": 0.02131133500970858 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.02807415894760066, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.02807415894760066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.512396694214876, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.018824219512706207, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.018824219512706207 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516994, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353605, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353605 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21691176470588236, + "acc_stderr": 0.025035845227711247, + "acc_norm": 0.21691176470588236, + "acc_norm_stderr": 0.025035845227711247 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.510548523206751, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.510548523206751, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271824, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271824 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156463, + "mc2": 0.4218804524380662, + "mc2_stderr": 0.01476995927431319 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46989374262101535, + "acc_stderr": 0.017159163590170216, + "acc_norm": 0.5584415584415584, + "acc_norm_stderr": 0.017072525875563106 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/ko-en-llama2-13b-mixed-v5", + "model_sha": "6e6de7e1907464bd5dc9c9c9fd312983a0611cfb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-dpo-v1/result_2023-11-20 09:28:18.json b/GAI-LLM/llama-2-koen-13b-dpo-v1/result_2023-11-20 09:28:18.json new file mode 100644 index 0000000000000000000000000000000000000000..6d3d9904a2a25aa833af91019d3e7616776120aa --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-dpo-v1/result_2023-11-20 09:28:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38139931740614336, + "acc_stderr": 0.014194389086685247, + "acc_norm": 0.4496587030716723, + "acc_norm_stderr": 0.014537144444284745 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41744672376020714, + "acc_stderr": 0.004921300331285571, + "acc_norm": 0.5662218681537542, + "acc_norm_stderr": 0.004945824056501814 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365776, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365776 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.01786209177850786, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.01786209177850786 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.02832032583010591, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.02832032583010591 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4128205128205128, + "acc_stderr": 0.02496268356433182, + "acc_norm": 0.4128205128205128, + "acc_norm_stderr": 0.02496268356433182 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849734, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145654, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145654 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.02683080599895224, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.02683080599895224 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5100917431192661, + "acc_stderr": 0.02143295620345332, + "acc_norm": 0.5100917431192661, + "acc_norm_stderr": 0.02143295620345332 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147124, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147124 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.019450768432505528, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.019450768432505528 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022135, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022135 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.0317987634217685, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.0317987634217685 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03032024326500413, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03032024326500413 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2835723598435463, + "acc_stderr": 0.011511900775968332, + "acc_norm": 0.2835723598435463, + "acc_norm_stderr": 0.011511900775968332 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.03426712349247271, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.03426712349247271 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015016, + "mc2": 0.4243531644307249, + "mc2_stderr": 0.014820029237903914 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5076741440377804, + "acc_stderr": 0.017188329219654273, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.016756921571069425 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-dpo-v1", + "model_sha": "c5b1d6d2c7e6d6d943453c1ea6e255ecf03a0fe4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-dpo-v2/result_2023-11-30 02:23:53.json b/GAI-LLM/llama-2-koen-13b-dpo-v2/result_2023-11-30 02:23:53.json new file mode 100644 index 0000000000000000000000000000000000000000..4994c6f4eb19345d18461bc334209ed09080c4ef --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-dpo-v2/result_2023-11-30 02:23:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910467, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4208325034853615, + "acc_stderr": 0.004926837572202161, + "acc_norm": 0.5668193586934873, + "acc_norm_stderr": 0.0049450236570322765 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.01786209177850786, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.01786209177850786 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938152, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938152 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502734, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736413, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736413 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.02293097307163335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.02293097307163335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.0387410285981808, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.0387410285981808 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422704, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422704 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.02141822475426464, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.02141822475426464 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412232, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412232 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.02896370257079105, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.02896370257079105 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2940026075619296, + "acc_stderr": 0.011636062953698605, + "acc_norm": 0.2940026075619296, + "acc_norm_stderr": 0.011636062953698605 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.01557284045287583, + "mc2": 0.4226153874486002, + "mc2_stderr": 0.014807061474258669 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5324675324675324, + "acc_stderr": 0.017154073716682868, + "acc_norm": 0.6174734356552538, + "acc_norm_stderr": 0.016709165387228834 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-dpo-v2", + "model_sha": "34736dec8153702c2c07e0265b702a29ca65178c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-dpo-v3/result_2023-12-05 00:42:00.json b/GAI-LLM/llama-2-koen-13b-dpo-v3/result_2023-12-05 00:42:00.json new file mode 100644 index 0000000000000000000000000000000000000000..bfab5930f4c10ecf440111f9410ad916e9796655 --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-dpo-v3/result_2023-12-05 00:42:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910467, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4208325034853615, + "acc_stderr": 0.004926837572202161, + "acc_norm": 0.5668193586934873, + "acc_norm_stderr": 0.0049450236570322765 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.01786209177850786, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.01786209177850786 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938152, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938152 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502734, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736413, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736413 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.02293097307163335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.02293097307163335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.0387410285981808, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.0387410285981808 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422704, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422704 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.02141822475426464, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.02141822475426464 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412232, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412232 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.02896370257079105, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.02896370257079105 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2940026075619296, + "acc_stderr": 0.011636062953698605, + "acc_norm": 0.2940026075619296, + "acc_norm_stderr": 0.011636062953698605 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.01557284045287583, + "mc2": 0.4226153874486002, + "mc2_stderr": 0.014807061474258669 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5324675324675324, + "acc_stderr": 0.017154073716682868, + "acc_norm": 0.6174734356552538, + "acc_norm_stderr": 0.016709165387228834 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-dpo-v3", + "model_sha": "ab13dffe5b9091d09383c3f57cff37e0503a7dc5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-dpo-v3_2/result_2023-12-12 05:55:00.json b/GAI-LLM/llama-2-koen-13b-dpo-v3_2/result_2023-12-12 05:55:00.json new file mode 100644 index 0000000000000000000000000000000000000000..bba24e7be7db796d3cf9f934a64c1844652234c4 --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-dpo-v3_2/result_2023-12-12 05:55:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938215, + "acc_norm": 0.45563139931740615, + "acc_norm_stderr": 0.014553749939306864 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41963752240589525, + "acc_stderr": 0.00492491043310636, + "acc_norm": 0.5681139215295757, + "acc_norm_stderr": 0.0049432643398686525 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458935, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.01785041079438017, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.01785041079438017 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840684, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840684 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.024666744915187232, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.024666744915187232 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347357, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347357 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261107, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261107 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.0387410285981808, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.0387410285981808 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5266055045871559, + "acc_stderr": 0.021406952688151588, + "acc_norm": 0.5266055045871559, + "acc_norm_stderr": 0.021406952688151588 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392868, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392868 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.019643801557924803, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.019643801557924803 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503807, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503807 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791044, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28748370273794005, + "acc_stderr": 0.01155933735570851, + "acc_norm": 0.28748370273794005, + "acc_norm_stderr": 0.01155933735570851 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.034267123492472705, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.034267123492472705 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777305, + "mc2": 0.4588099760274036, + "mc2_stderr": 0.015066495237883525 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5572609208972845, + "acc_stderr": 0.01707725413155622, + "acc_norm": 0.6469893742621016, + "acc_norm_stderr": 0.01643074598242715 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-dpo-v3_2", + "model_sha": "56d6c983276c785d9af25d028abc39d04fbd31d2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-mixed-v11/result_2023-12-06 04:09:10.json b/GAI-LLM/llama-2-koen-13b-mixed-v11/result_2023-12-06 04:09:10.json new file mode 100644 index 0000000000000000000000000000000000000000..a6bd0f6907256a6ea5a8c123a4a161c59e175d0b --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-mixed-v11/result_2023-12-06 04:09:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3822525597269625, + "acc_stderr": 0.014200454049979274, + "acc_norm": 0.4496587030716723, + "acc_norm_stderr": 0.014537144444284746 + }, + "harness|ko_hellaswag|10": { + "acc": 0.418442541326429, + "acc_stderr": 0.004922953651577685, + "acc_norm": 0.5646285600477993, + "acc_norm_stderr": 0.004947922692688838 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5210727969348659, + "acc_stderr": 0.017864076786212907, + "acc_norm": 0.5210727969348659, + "acc_norm_stderr": 0.017864076786212907 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.032219436365661956, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.032219436365661956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.024915243985987844, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.024915243985987844 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712163, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712163 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.02141822475426464, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.02141822475426464 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392868, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392868 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577447, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577447 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.02678917235114024, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.02678917235114024 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.042032772914677614, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.042032772914677614 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012386, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012386 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29335071707953064, + "acc_stderr": 0.011628520449582076, + "acc_norm": 0.29335071707953064, + "acc_norm_stderr": 0.011628520449582076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087295, + "mc2": 0.4223073081878361, + "mc2_stderr": 0.01480298314658298 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5230224321133412, + "acc_stderr": 0.01717212154672763, + "acc_norm": 0.5914994096812278, + "acc_norm_stderr": 0.016900062879427122 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-mixed-v11", + "model_sha": "d309fbc21d29f4cfb41d2506c406244cb11e78f1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-mixed-v11_2/result_2023-12-11 01:53:50.json b/GAI-LLM/llama-2-koen-13b-mixed-v11_2/result_2023-12-11 01:53:50.json new file mode 100644 index 0000000000000000000000000000000000000000..a587b2f51da018096277ec50c700fb9760546cf5 --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-mixed-v11_2/result_2023-12-11 01:53:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38139931740614336, + "acc_stderr": 0.014194389086685247, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41963752240589525, + "acc_stderr": 0.004924910433106359, + "acc_norm": 0.566122286397132, + "acc_norm_stderr": 0.004945956744943813 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5351213282247765, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.5351213282247765, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.024838811988033158, + "acc_norm": 0.4, + "acc_norm_stderr": 0.024838811988033158 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03981240543717862, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03981240543717862 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.038258255488486076, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.038258255488486076 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.02138786335035399, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.02138786335035399 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488795, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488795 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.019675808135281508, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.019675808135281508 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044791, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044791 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464622, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464622 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29595827900912647, + "acc_stderr": 0.011658518525277054, + "acc_norm": 0.29595827900912647, + "acc_norm_stderr": 0.011658518525277054 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842885, + "mc2": 0.42557508687226114, + "mc2_stderr": 0.014810504388914819 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.525383707201889, + "acc_stderr": 0.017168187201429253, + "acc_norm": 0.6092089728453365, + "acc_norm_stderr": 0.016775298465108265 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-mixed-v11_2", + "model_sha": "da7c55c72f9f911022709d710972972beef327a4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-mixed-v7/result_2023-11-03 01:38:04.json b/GAI-LLM/llama-2-koen-13b-mixed-v7/result_2023-11-03 01:38:04.json new file mode 100644 index 0000000000000000000000000000000000000000..b28d107e4df9eecbd6b2444c601558baf28e068a --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-mixed-v7/result_2023-11-03 01:38:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3796928327645051, + "acc_stderr": 0.014182119866974874, + "acc_norm": 0.44795221843003413, + "acc_norm_stderr": 0.014532011498211669 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4171479784903406, + "acc_stderr": 0.004920800313232745, + "acc_norm": 0.5664210316669986, + "acc_norm_stderr": 0.004945558069852528 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365776, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365776 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.01786209177850786, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.01786209177850786 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956279, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956279 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102315, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102315 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833713, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833713 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.02683080599895224, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.02683080599895224 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5100917431192661, + "acc_stderr": 0.02143295620345332, + "acc_norm": 0.5100917431192661, + "acc_norm_stderr": 0.02143295620345332 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.019450768432505528, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.019450768432505528 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022135, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022135 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.0317987634217685, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.0317987634217685 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.46691176470588236, + "acc_stderr": 0.030306257722468314, + "acc_norm": 0.46691176470588236, + "acc_norm_stderr": 0.030306257722468314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.03241920684693334, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.03241920684693334 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28683181225554105, + "acc_stderr": 0.011551504781176935, + "acc_norm": 0.28683181225554105, + "acc_norm_stderr": 0.011551504781176935 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.03426712349247271, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.03426712349247271 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015016, + "mc2": 0.42428555727008455, + "mc2_stderr": 0.014819025436428698 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5112160566706021, + "acc_stderr": 0.01718602846948929, + "acc_norm": 0.6139315230224321, + "acc_norm_stderr": 0.016738130760321757 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-mixed-v7", + "model_sha": "8584de207645e9fed63c76e8e4718fb46e393c3c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-mixed-v8/result_2023-11-08 10:03:55.json b/GAI-LLM/llama-2-koen-13b-mixed-v8/result_2023-11-08 10:03:55.json new file mode 100644 index 0000000000000000000000000000000000000000..dffefdf38678b85a6811d48a699bf24e404f9ed6 --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-mixed-v8/result_2023-11-08 10:03:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38139931740614336, + "acc_stderr": 0.014194389086685251, + "acc_norm": 0.439419795221843, + "acc_norm_stderr": 0.014503747823580123 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41326428998207526, + "acc_stderr": 0.004914130855431776, + "acc_norm": 0.5622385978888668, + "acc_norm_stderr": 0.0049509732311887366 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5159642401021711, + "acc_stderr": 0.017870847506081734, + "acc_norm": 0.5159642401021711, + "acc_norm_stderr": 0.017870847506081734 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.025174048384000777, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.025174048384000777 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561056, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561056 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.031426169937919246, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.031426169937919246 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489358, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489358 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5192660550458715, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.5192660550458715, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791434, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.019506291693954854, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.019506291693954854 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3011734028683181, + "acc_stderr": 0.011717148751648436, + "acc_norm": 0.3011734028683181, + "acc_norm_stderr": 0.011717148751648436 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156474, + "mc2": 0.40988199072836734, + "mc2_stderr": 0.014730658051782728 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4592680047225502, + "acc_stderr": 0.01713321827653767, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.017115418225226872 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-mixed-v8", + "model_sha": "bc460419cfb0d80c3078ebedf761d8fa69e09eeb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/llama-2-koen-13b-mixed-v9/result_2023-11-16 04:14:01.json b/GAI-LLM/llama-2-koen-13b-mixed-v9/result_2023-11-16 04:14:01.json new file mode 100644 index 0000000000000000000000000000000000000000..c3585a04358b79fea2a40235153280defb181d9b --- /dev/null +++ b/GAI-LLM/llama-2-koen-13b-mixed-v9/result_2023-11-16 04:14:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.014175915490000326, + "acc_norm": 0.447098976109215, + "acc_norm_stderr": 0.014529380160526842 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4170483967337184, + "acc_stderr": 0.004920633227844466, + "acc_norm": 0.5650268870742879, + "acc_norm_stderr": 0.004947402907996248 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365776, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365776 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5236270753512133, + "acc_stderr": 0.017859989765176453, + "acc_norm": 0.5236270753512133, + "acc_norm_stderr": 0.017859989765176453 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.0382840111507902, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.0382840111507902 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.032284106267163895, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.032284106267163895 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461224, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461224 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145654, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145654 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577657, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577657 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831027, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831027 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5174311926605505, + "acc_stderr": 0.02142429187185315, + "acc_norm": 0.5174311926605505, + "acc_norm_stderr": 0.02142429187185315 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824096, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824096 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.01955964680921592, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.01955964680921592 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022128, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298804, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298804 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03032024326500413, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03032024326500413 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.032335327775334835, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.032335327775334835 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2953063885267275, + "acc_stderr": 0.011651061936208828, + "acc_norm": 0.2953063885267275, + "acc_norm_stderr": 0.011651061936208828 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.03434131164719128, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.03434131164719128 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237024, + "mc2": 0.42083991681473415, + "mc2_stderr": 0.014797986734621882 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.017188904359077304, + "acc_norm": 0.6068476977567887, + "acc_norm_stderr": 0.01679326280128707 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/llama-2-koen-13b-mixed-v9", + "model_sha": "0d3fe7df627660f041bd73a62362898e05b67196", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GAI-LLM/polyglot-12.8b-mixed-v3/result_2023-10-26 01:40:33.json b/GAI-LLM/polyglot-12.8b-mixed-v3/result_2023-10-26 01:40:33.json new file mode 100644 index 0000000000000000000000000000000000000000..49ae4676386049263d858c18e9af37e6f14a2c5b --- /dev/null +++ b/GAI-LLM/polyglot-12.8b-mixed-v3/result_2023-10-26 01:40:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.013203196088537365, + "acc_norm": 0.33447098976109213, + "acc_norm_stderr": 0.013787460322441374 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38548097988448515, + "acc_stderr": 0.004857140410776749, + "acc_norm": 0.5028878709420435, + "acc_norm_stderr": 0.004989698183207817 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393161, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393161 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.015671006009339572, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.015671006009339572 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.026754391348039787, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.026754391348039787 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31189710610932475, + "acc_stderr": 0.02631185807185416, + "acc_norm": 0.31189710610932475, + "acc_norm_stderr": 0.02631185807185416 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03053289223393203, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03053289223393203 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.038061426873099935, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.038061426873099935 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868963, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868963 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21025641025641026, + "acc_stderr": 0.020660597485026928, + "acc_norm": 0.21025641025641026, + "acc_norm_stderr": 0.020660597485026928 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.0309037969521145, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.0309037969521145 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239963, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239963 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.027601921381417604, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.027601921381417604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.026199808807561932, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.026199808807561932 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935554, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935554 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776578, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776578 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.036539469694421, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.036539469694421 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.022698657167855716, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.022698657167855716 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.024922001168886338, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.024922001168886338 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26422018348623855, + "acc_stderr": 0.0189041641715102, + "acc_norm": 0.26422018348623855, + "acc_norm_stderr": 0.0189041641715102 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020534, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020534 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.02505850331695815, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.02505850331695815 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.20098039215686275, + "acc_stderr": 0.016211938889655574, + "acc_norm": 0.20098039215686275, + "acc_norm_stderr": 0.016211938889655574 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.02498710636564298, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.02498710636564298 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22857142857142856, + "acc_stderr": 0.026882144922307748, + "acc_norm": 0.22857142857142856, + "acc_norm_stderr": 0.026882144922307748 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.32489451476793246, + "acc_stderr": 0.030486039389105303, + "acc_norm": 0.32489451476793246, + "acc_norm_stderr": 0.030486039389105303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25749674054758803, + "acc_stderr": 0.011167706014904138, + "acc_norm": 0.25749674054758803, + "acc_norm_stderr": 0.011167706014904138 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.032876667586034886, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.032876667586034886 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731613, + "mc2": 0.39066594086735945, + "mc2_stderr": 0.014736605286215685 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30932703659976385, + "acc_stderr": 0.01589132050552089, + "acc_norm": 0.3990554899645809, + "acc_norm_stderr": 0.0168363772928493 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GAI-LLM/polyglot-12.8b-mixed-v3", + "model_sha": "ab3c7b46c35cebb556b448db959d885a99f31220", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GUSSSSSSSSSSS/polyglot-ko-12.8b-instruction/result_2023-11-06 04:46:28.json b/GUSSSSSSSSSSS/polyglot-ko-12.8b-instruction/result_2023-11-06 04:46:28.json new file mode 100644 index 0000000000000000000000000000000000000000..705573b694e9d93486430966533a58f3cf1388b5 --- /dev/null +++ b/GUSSSSSSSSSSS/polyglot-ko-12.8b-instruction/result_2023-11-06 04:46:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.24829351535836178, + "acc_stderr": 0.012624912868089764, + "acc_norm": 0.2858361774744027, + "acc_norm_stderr": 0.013203196088537369 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35371439952200756, + "acc_stderr": 0.004771447244095125, + "acc_norm": 0.4420434176458873, + "acc_norm_stderr": 0.004956147046108963 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.034678266857038245, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.034678266857038245 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.040580420156460344, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.040580420156460344 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2681992337164751, + "acc_stderr": 0.015842430835269445, + "acc_norm": 0.2681992337164751, + "acc_norm_stderr": 0.015842430835269445 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.03502553170678317, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.03502553170678317 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.027678452578212373, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.027678452578212373 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2469879518072289, + "acc_stderr": 0.03357351982064536, + "acc_norm": 0.2469879518072289, + "acc_norm_stderr": 0.03357351982064536 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26366559485530544, + "acc_stderr": 0.02502553850053234, + "acc_norm": 0.26366559485530544, + "acc_norm_stderr": 0.02502553850053234 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.18385650224215247, + "acc_stderr": 0.025998379092356513, + "acc_norm": 0.18385650224215247, + "acc_norm_stderr": 0.025998379092356513 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.0359546161177469, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.0359546161177469 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03724563619774632, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03724563619774632 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.10784313725490197, + "acc_stderr": 0.030864282122060136, + "acc_norm": 0.10784313725490197, + "acc_norm_stderr": 0.030864282122060136 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.02788682807838056, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.02788682807838056 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24102564102564103, + "acc_stderr": 0.0216855466653332, + "acc_norm": 0.24102564102564103, + "acc_norm_stderr": 0.0216855466653332 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.03893542518824847, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.03893542518824847 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21182266009852216, + "acc_stderr": 0.028748983689941072, + "acc_norm": 0.21182266009852216, + "acc_norm_stderr": 0.028748983689941072 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.025091892378859275, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.025091892378859275 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22641509433962265, + "acc_stderr": 0.025757559893106727, + "acc_norm": 0.22641509433962265, + "acc_norm_stderr": 0.025757559893106727 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072775, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833713, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833713 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31343283582089554, + "acc_stderr": 0.03280188205348642, + "acc_norm": 0.31343283582089554, + "acc_norm_stderr": 0.03280188205348642 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483098, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483098 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036622, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036622 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.023618678310069363, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.023618678310069363 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292404, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292404 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.025842248700902168, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.025842248700902168 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.037124548537213684, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.037124548537213684 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21100917431192662, + "acc_stderr": 0.01749392240411265, + "acc_norm": 0.21100917431192662, + "acc_norm_stderr": 0.01749392240411265 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333338, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333338 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.024630048979824765, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.024630048979824765 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.20394736842105263, + "acc_stderr": 0.032790004063100515, + "acc_norm": 0.20394736842105263, + "acc_norm_stderr": 0.032790004063100515 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25326797385620914, + "acc_stderr": 0.01759348689536683, + "acc_norm": 0.25326797385620914, + "acc_norm_stderr": 0.01759348689536683 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729903, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729903 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.02896370257079103, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.02896370257079103 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28156424581005585, + "acc_stderr": 0.015042290171866132, + "acc_norm": 0.28156424581005585, + "acc_norm_stderr": 0.015042290171866132 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225418, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225418 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.02500025603954621, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.02500025603954621 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24185136897001303, + "acc_stderr": 0.010936550813827054, + "acc_norm": 0.24185136897001303, + "acc_norm_stderr": 0.010936550813827054 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350195, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816525, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816525 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22399020807833536, + "mc1_stderr": 0.014594964329474203, + "mc2": 0.4106638009419967, + "mc2_stderr": 0.015724386722290755 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3187721369539551, + "acc_stderr": 0.01602142705530959, + "acc_norm": 0.38488783943329397, + "acc_norm_stderr": 0.01672857970149866 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GUSSSSSSSSSSS/polyglot-ko-12.8b-instruction", + "model_sha": "5dd983e0688b676b814f4b9a02810de2d31dafb3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GritLM/GritLM-7B-KTO/result_2024-06-17 02:09:20.json b/GritLM/GritLM-7B-KTO/result_2024-06-17 02:09:20.json new file mode 100644 index 0000000000000000000000000000000000000000..a6458d7bf59d0deed079921a0443f4fec77fe6a4 --- /dev/null +++ b/GritLM/GritLM-7B-KTO/result_2024-06-17 02:09:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3242320819112628, + "acc_stderr": 0.01367881039951882, + "acc_norm": 0.37372013651877134, + "acc_norm_stderr": 0.014137708601759077 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3664608643696475, + "acc_stderr": 0.004808526802718588, + "acc_norm": 0.47122087233618803, + "acc_norm_stderr": 0.004981509099276354 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45977011494252873, + "acc_stderr": 0.01782199409693354, + "acc_norm": 0.45977011494252873, + "acc_norm_stderr": 0.01782199409693354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.37404580152671757, + "acc_stderr": 0.042438692422305246, + "acc_norm": 0.37404580152671757, + "acc_norm_stderr": 0.042438692422305246 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563918, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563918 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.025158266016868557, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.025158266016868557 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561063, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561063 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138663, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138663 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.0284934650910286, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.0284934650910286 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.036812296333943194, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.036812296333943194 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983056, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456602, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456602 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.02751374728437942, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.02751374728437942 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47155963302752296, + "acc_stderr": 0.021402615697348044, + "acc_norm": 0.47155963302752296, + "acc_norm_stderr": 0.021402615697348044 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.019821843688271775, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.019821843688271775 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053479, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053479 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219588, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219588 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.288268156424581, + "acc_stderr": 0.015149132860209422, + "acc_norm": 0.288268156424581, + "acc_norm_stderr": 0.015149132860209422 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.028245687391462916, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.028245687391462916 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.03200682020163909, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.03200682020163909 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3161668839634941, + "acc_stderr": 0.011875780894386578, + "acc_norm": 0.3161668839634941, + "acc_norm_stderr": 0.011875780894386578 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398396, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398396 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133036, + "mc2": 0.4868855375869235, + "mc2_stderr": 0.015699074117298418 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3825265643447462, + "acc_stderr": 0.016709165387228817, + "acc_norm": 0.4025974025974026, + "acc_norm_stderr": 0.01686102048640778 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GritLM/GritLM-7B-KTO", + "model_sha": "b5c48669508c1de18c698460c187f64e90e7df44", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/GritLM/GritLM-7B/result_2024-06-05 14:51:05.json b/GritLM/GritLM-7B/result_2024-06-05 14:51:05.json new file mode 100644 index 0000000000000000000000000000000000000000..19073610b0e426d384d73acfd72bc91310141060 --- /dev/null +++ b/GritLM/GritLM-7B/result_2024-06-05 14:51:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3003412969283277, + "acc_stderr": 0.013395909309957005, + "acc_norm": 0.35665529010238906, + "acc_norm_stderr": 0.013998056902620199 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3573989245170285, + "acc_stderr": 0.004782542754102084, + "acc_norm": 0.4565823541127266, + "acc_norm_stderr": 0.004970933420231928 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.43039591315453385, + "acc_stderr": 0.017705868776292374, + "acc_norm": 0.43039591315453385, + "acc_norm_stderr": 0.017705868776292374 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967926, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967926 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419034, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419034 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179964, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179964 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.02517404838400077, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.02517404838400077 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509568, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509568 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849738, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849738 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791923, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009812, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009812 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524575, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524575 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.034815208033673474, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.034815208033673474 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958549, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958549 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.02441923496681907, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.02441923496681907 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.02691189868637793, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.02691189868637793 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47155963302752296, + "acc_stderr": 0.021402615697348047, + "acc_norm": 0.47155963302752296, + "acc_norm_stderr": 0.021402615697348047 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.02855582751652878, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.02855582751652878 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3937908496732026, + "acc_stderr": 0.01976621199107307, + "acc_norm": 0.3937908496732026, + "acc_norm_stderr": 0.01976621199107307 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219588, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219588 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098426, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098426 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396567, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396567 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.03197694118713672, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.03197694118713672 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32073011734028684, + "acc_stderr": 0.011921199991782622, + "acc_norm": 0.32073011734028684, + "acc_norm_stderr": 0.011921199991782622 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.0346022832723917, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.0346022832723917 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.037937131711656344, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.037937131711656344 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024626, + "mc2": 0.4510026253728979, + "mc2_stderr": 0.015432182201955667 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3730814639905549, + "acc_stderr": 0.016627318275137443, + "acc_norm": 0.43919716646989376, + "acc_norm_stderr": 0.0170627757447807 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "GritLM/GritLM-7B", + "model_sha": "13f00a0e36500c80ce12870ea513846a066004af", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HAERAE-HUB/hae-tae_v0.1.1/result_2023-09-30 11:46:43.json b/HAERAE-HUB/hae-tae_v0.1.1/result_2023-09-30 11:46:43.json new file mode 100644 index 0000000000000000000000000000000000000000..f108fe44a38bc3752e38bae2396c9767a3ab3508 --- /dev/null +++ b/HAERAE-HUB/hae-tae_v0.1.1/result_2023-09-30 11:46:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28242320819112626, + "acc_stderr": 0.01315545688409722, + "acc_norm": 0.3302047781569966, + "acc_norm_stderr": 0.013743085603760422 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3719378609838678, + "acc_stderr": 0.004823341569605419, + "acc_norm": 0.4821748655646286, + "acc_norm_stderr": 0.0049866095427490405 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727665, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727665 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041693, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041693 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20051085568326948, + "acc_stderr": 0.014317653708594209, + "acc_norm": 0.20051085568326948, + "acc_norm_stderr": 0.014317653708594209 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.16170212765957448, + "acc_stderr": 0.02406850528969531, + "acc_norm": 0.16170212765957448, + "acc_norm_stderr": 0.02406850528969531 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1927710843373494, + "acc_stderr": 0.030709824050565264, + "acc_norm": 0.1927710843373494, + "acc_norm_stderr": 0.030709824050565264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.11659192825112108, + "acc_stderr": 0.021539639816244467, + "acc_norm": 0.11659192825112108, + "acc_norm_stderr": 0.021539639816244467 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36153846153846153, + "acc_stderr": 0.024359581465396983, + "acc_norm": 0.36153846153846153, + "acc_norm_stderr": 0.024359581465396983 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443866, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443866 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.02293097307163334, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.02293097307163334 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757173, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757173 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.344954128440367, + "acc_stderr": 0.02038060540506697, + "acc_norm": 0.344954128440367, + "acc_norm_stderr": 0.02038060540506697 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.02609016250427905, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.02609016250427905 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.15702479338842976, + "acc_stderr": 0.03321244842547128, + "acc_norm": 0.15702479338842976, + "acc_norm_stderr": 0.03321244842547128 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953776, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953776 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.14285714285714285, + "acc_stderr": 0.033213611069662696, + "acc_norm": 0.14285714285714285, + "acc_norm_stderr": 0.033213611069662696 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036847, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036847 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3183673469387755, + "acc_stderr": 0.02982253379398209, + "acc_norm": 0.3183673469387755, + "acc_norm_stderr": 0.02982253379398209 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20675105485232068, + "acc_stderr": 0.026361651668389087, + "acc_norm": 0.20675105485232068, + "acc_norm_stderr": 0.026361651668389087 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2470664928292047, + "acc_stderr": 0.011015752255279329, + "acc_norm": 0.2470664928292047, + "acc_norm_stderr": 0.011015752255279329 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03477691162163659, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03477691162163659 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156475, + "mc2": 0.3974526680083883, + "mc2_stderr": 0.01475058288914894 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29279811097992914, + "acc_stderr": 0.015644823205401334, + "acc_norm": 0.3565525383707202, + "acc_norm_stderr": 0.016467706981527445 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HAERAE-HUB/hae-tae_v0.1.1", + "model_sha": "4ae77d9659bb11f158180f4b8b243d1e9ddb51f4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HAERAE-HUB/hae-tae_v0.1.2/result_2023-09-30 11:46:34.json b/HAERAE-HUB/hae-tae_v0.1.2/result_2023-09-30 11:46:34.json new file mode 100644 index 0000000000000000000000000000000000000000..ded436dde002f88280335e6dd939b6cdfc13cb89 --- /dev/null +++ b/HAERAE-HUB/hae-tae_v0.1.2/result_2023-09-30 11:46:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2909556313993174, + "acc_stderr": 0.01327307786590758, + "acc_norm": 0.3302047781569966, + "acc_norm_stderr": 0.013743085603760427 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37442740489942244, + "acc_stderr": 0.004829856058603579, + "acc_norm": 0.481876120294762, + "acc_norm_stderr": 0.00498650229693118 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727665, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727665 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20434227330779056, + "acc_stderr": 0.014419123980931906, + "acc_norm": 0.20434227330779056, + "acc_norm_stderr": 0.014419123980931906 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.03547854198560826, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.03547854198560826 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.15, + "acc_stderr": 0.035887028128263714, + "acc_norm": 0.15, + "acc_norm_stderr": 0.035887028128263714 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838746, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838746 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1927710843373494, + "acc_stderr": 0.030709824050565264, + "acc_norm": 0.1927710843373494, + "acc_norm_stderr": 0.030709824050565264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.1031390134529148, + "acc_stderr": 0.020412564289839272, + "acc_norm": 0.1031390134529148, + "acc_norm_stderr": 0.020412564289839272 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.03383201223244441, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.03383201223244441 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.048108401480826346, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.048108401480826346 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.025819233256483724, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.025819233256483724 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.028049186315695248, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.028049186315695248 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959905, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959905 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525214, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525214 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757173, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757173 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.344954128440367, + "acc_stderr": 0.02038060540506697, + "acc_norm": 0.344954128440367, + "acc_norm_stderr": 0.02038060540506697 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.026090162504279053, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.026090162504279053 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.14049586776859505, + "acc_stderr": 0.031722334260021585, + "acc_norm": 0.14049586776859505, + "acc_norm_stderr": 0.031722334260021585 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.016819028375736386, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.016819028375736386 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.02564555362226673, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.02564555362226673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3346938775510204, + "acc_stderr": 0.030209235226242314, + "acc_norm": 0.3346938775510204, + "acc_norm_stderr": 0.030209235226242314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.19831223628691982, + "acc_stderr": 0.02595502084162111, + "acc_norm": 0.19831223628691982, + "acc_norm_stderr": 0.02595502084162111 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842885, + "mc2": 0.420854027075679, + "mc2_stderr": 0.014933313137954875 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3577331759149941, + "acc_stderr": 0.016479808935749976, + "acc_norm": 0.45808736717827625, + "acc_norm_stderr": 0.017129852117911147 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HAERAE-HUB/hae-tae_v0.1.2", + "model_sha": "fd9094c0e91bcb07ecf2b89b36a16480e27a93dc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HY-KDPARK/llama-2-koen-13b-dpo-v0.4/result_2023-12-16 09:24:56.json b/HY-KDPARK/llama-2-koen-13b-dpo-v0.4/result_2023-12-16 09:24:56.json new file mode 100644 index 0000000000000000000000000000000000000000..446d504281cc63153bedc511a7ee84650127cb76 --- /dev/null +++ b/HY-KDPARK/llama-2-koen-13b-dpo-v0.4/result_2023-12-16 09:24:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3703071672354949, + "acc_stderr": 0.01411129875167495, + "acc_norm": 0.4308873720136519, + "acc_norm_stderr": 0.01447113339264247 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41455885281816374, + "acc_stderr": 0.004916388962142332, + "acc_norm": 0.5623381796454889, + "acc_norm_stderr": 0.004950848456984546 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.017874698667491345, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.017874698667491345 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956278, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956278 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.03163145807552379, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.03163145807552379 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.028156036538233217, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.028156036538233217 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844058, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844058 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831027, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831027 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.02762873715566877, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.02762873715566877 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.027996723180631455, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.027996723180631455 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.019249785691717217, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.019249785691717217 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460997, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460997 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298825, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298825 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3011734028683181, + "acc_stderr": 0.011717148751648431, + "acc_norm": 0.3011734028683181, + "acc_norm_stderr": 0.011717148751648431 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.03434131164719128, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.03434131164719128 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627884, + "mc2": 0.43136545246089486, + "mc2_stderr": 0.014881985381415318 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4935064935064935, + "acc_stderr": 0.01718890435907731, + "acc_norm": 0.5678866587957497, + "acc_norm_stderr": 0.017031170198851742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HY-KDPARK/llama-2-koen-13b-dpo-v0.4", + "model_sha": "a3cd8b7790f43c87f36f7e7289a1a210102dd26f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HY-KDPARK/llama-2-koen-13b-sft-v0.1/result_2023-11-28 04:36:29.json b/HY-KDPARK/llama-2-koen-13b-sft-v0.1/result_2023-11-28 04:36:29.json new file mode 100644 index 0000000000000000000000000000000000000000..ce390c21623e6c096b2b59ccb665e48db02d2128 --- /dev/null +++ b/HY-KDPARK/llama-2-koen-13b-sft-v0.1/result_2023-11-28 04:36:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3165529010238908, + "acc_stderr": 0.01359243151906808, + "acc_norm": 0.363481228668942, + "acc_norm_stderr": 0.014056207319068287 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36974706233817967, + "acc_stderr": 0.004817495546789546, + "acc_norm": 0.47450707030472017, + "acc_norm_stderr": 0.00498329157828904 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4623243933588761, + "acc_stderr": 0.017829131764287187, + "acc_norm": 0.4623243933588761, + "acc_norm_stderr": 0.017829131764287187 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.02823776942208533, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.02823776942208533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.0332319730294294, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.0332319730294294 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416542, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416542 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.024283140529467295, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.024283140529467295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3903225806451613, + "acc_stderr": 0.02775125663696958, + "acc_norm": 0.3903225806451613, + "acc_norm_stderr": 0.02775125663696958 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5641025641025641, + "acc_stderr": 0.03248577511578401, + "acc_norm": 0.5641025641025641, + "acc_norm_stderr": 0.03248577511578401 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.02983280811479601, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.02983280811479601 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.02549753263960955, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.02549753263960955 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.417910447761194, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.417910447761194, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02306818884826111, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02306818884826111 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3786127167630058, + "acc_stderr": 0.02611374936131034, + "acc_norm": 0.3786127167630058, + "acc_norm_stderr": 0.02611374936131034 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3950617283950617, + "acc_stderr": 0.027201117666925647, + "acc_norm": 0.3950617283950617, + "acc_norm_stderr": 0.027201117666925647 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43302752293577984, + "acc_stderr": 0.021244146569074338, + "acc_norm": 0.43302752293577984, + "acc_norm_stderr": 0.021244146569074338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.0275300784471103, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.0275300784471103 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.037385206761196686, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.037385206761196686 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577443, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577443 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.02813968944485967, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.02813968944485967 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225606, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225606 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670736, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670736 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.32653061224489793, + "acc_stderr": 0.03002105623844031, + "acc_norm": 0.32653061224489793, + "acc_norm_stderr": 0.03002105623844031 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4978902953586498, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.4978902953586498, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28292046936114734, + "acc_stderr": 0.011503891323188978, + "acc_norm": 0.28292046936114734, + "acc_norm_stderr": 0.011503891323188978 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833344, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833344 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.016002651487361005, + "mc2": 0.4736196468171595, + "mc2_stderr": 0.016592688559874832 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36363636363636365, + "acc_stderr": 0.016538691603327715, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.01687694116504561 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HY-KDPARK/llama-2-koen-13b-sft-v0.1", + "model_sha": "3b75cac58ea131920b39541547815d14caaa7082", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HY-KDPARK/llama-2-koen-13b-sft-v0.3/result_2023-12-10 07:14:00.json b/HY-KDPARK/llama-2-koen-13b-sft-v0.3/result_2023-12-10 07:14:00.json new file mode 100644 index 0000000000000000000000000000000000000000..94190194dfb07b706cd068158f7697bdb476d0b9 --- /dev/null +++ b/HY-KDPARK/llama-2-koen-13b-sft-v0.3/result_2023-12-10 07:14:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4044368600682594, + "acc_stderr": 0.014342036483436175, + "acc_norm": 0.4726962457337884, + "acc_norm_stderr": 0.014589589101985998 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4191396136227843, + "acc_stderr": 0.004924098711864585, + "acc_norm": 0.5668193586934873, + "acc_norm_stderr": 0.0049450236570322765 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5964912280701754, + "acc_stderr": 0.03762738699917057, + "acc_norm": 0.5964912280701754, + "acc_norm_stderr": 0.03762738699917057 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299798, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299798 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464245, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036543, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036543 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.0282863240755644, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.0282863240755644 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594384, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594384 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5908256880733945, + "acc_stderr": 0.021080670264433738, + "acc_norm": 0.5908256880733945, + "acc_norm_stderr": 0.021080670264433738 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639882, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639882 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.014676252009319464, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.014676252009319464 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125474, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125474 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3226857887874837, + "acc_stderr": 0.011940264193195986, + "acc_norm": 0.3226857887874837, + "acc_norm_stderr": 0.011940264193195986 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398393, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398393 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777305, + "mc2": 0.442704104876821, + "mc2_stderr": 0.015215337318397937 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4462809917355372, + "acc_stderr": 0.017090852631668332, + "acc_norm": 0.4887839433293979, + "acc_norm_stderr": 0.017186028469489287 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HY-KDPARK/llama-2-koen-13b-sft-v0.3", + "model_sha": "5130b6ccb175caaddd0812cfc2f8b1fd3bfe4ae4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HanaGroup/Mini_Orca_16_32/result_2023-11-20 12:28:29.json b/HanaGroup/Mini_Orca_16_32/result_2023-11-20 12:28:29.json new file mode 100644 index 0000000000000000000000000000000000000000..35408fd8db04c16af3563e1b2ec0594976ab5895 --- /dev/null +++ b/HanaGroup/Mini_Orca_16_32/result_2023-11-20 12:28:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.439419795221843, + "acc_stderr": 0.014503747823580127, + "acc_norm": 0.4778156996587031, + "acc_norm_stderr": 0.014597001927076136 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4076877116112328, + "acc_stderr": 0.00490400267618433, + "acc_norm": 0.5386377215694085, + "acc_norm_stderr": 0.0049748608784644325 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.03786720706234214, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.03786720706234214 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5721583652618135, + "acc_stderr": 0.017692787927803724, + "acc_norm": 0.5721583652618135, + "acc_norm_stderr": 0.017692787927803724 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056127, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056127 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764194, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006114, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006114 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.02535574126305527, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.02535574126305527 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.026882643434022895, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.026882643434022895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.02780165621232366, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.02780165621232366 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353996, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353996 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449848, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449848 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101362, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101362 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2122905027932961, + "acc_stderr": 0.013676644685831725, + "acc_norm": 0.2122905027932961, + "acc_norm_stderr": 0.013676644685831725 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824873, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824873 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702368, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702368 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.012198140605353592, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.012198140605353592 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03888176921674101, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03888176921674101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713606, + "mc2": 0.4699398119482503, + "mc2_stderr": 0.015489346893307833 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.01717567127983645, + "acc_norm": 0.5218417945690673, + "acc_norm_stderr": 0.017173944474294378 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HanaGroup/Mini_Orca_16_32", + "model_sha": "1356bee33d15e26ae9738a179058f993134f6141", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HanaGroup/Mini_Test_orca01/result_2023-11-11 01:14:39.json b/HanaGroup/Mini_Test_orca01/result_2023-11-11 01:14:39.json new file mode 100644 index 0000000000000000000000000000000000000000..3c3283f13972057a1db883418227686dd283c7c6 --- /dev/null +++ b/HanaGroup/Mini_Test_orca01/result_2023-11-11 01:14:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.014175915490000322, + "acc_norm": 0.4325938566552901, + "acc_norm_stderr": 0.014478005694182533 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3935471021708823, + "acc_stderr": 0.004875379352079816, + "acc_norm": 0.5049790878311093, + "acc_norm_stderr": 0.0049895339988203545 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4840357598978289, + "acc_stderr": 0.01787084750608172, + "acc_norm": 0.4840357598978289, + "acc_norm_stderr": 0.01787084750608172 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.032339434681820885, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.032339434681820885 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507755, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507755 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.03056159042673183, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.03056159042673183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520196, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520196 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668767, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214334, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576073, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576073 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577457, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577457 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639875, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639875 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.01500576244678616, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.01500576244678616 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398865, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398865 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.01210681720306721, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.01210681720306721 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502342, + "mc2": 0.4404103175289405, + "mc2_stderr": 0.015432051294700285 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3754427390791027, + "acc_stderr": 0.01664841158951109, + "acc_norm": 0.4344746162927981, + "acc_norm_stderr": 0.017042098620824942 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HanaGroup/Mini_Test_orca01", + "model_sha": "c85ec5844cab07c96f6b54292f26bea5e252f3c8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HanaGroup/Mini_category/result_2023-11-08 23:58:45.json b/HanaGroup/Mini_category/result_2023-11-08 23:58:45.json new file mode 100644 index 0000000000000000000000000000000000000000..8975c6d74b49087c105df144014b1502e5cf02a7 --- /dev/null +++ b/HanaGroup/Mini_category/result_2023-11-08 23:58:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3387372013651877, + "acc_stderr": 0.013830568927974332, + "acc_norm": 0.3890784982935154, + "acc_norm_stderr": 0.014247309976045607 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3805018920533758, + "acc_stderr": 0.0048451800342716265, + "acc_norm": 0.48297151961760604, + "acc_norm_stderr": 0.004986886806565639 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.454661558109834, + "acc_stderr": 0.017806304585052606, + "acc_norm": 0.454661558109834, + "acc_norm_stderr": 0.017806304585052606 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563918, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563918 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.0253480060315348, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.0253480060315348 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490385, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490385 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.02479606060269995, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.02479606060269995 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833942, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833942 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45504587155963305, + "acc_stderr": 0.021350503090925167, + "acc_norm": 0.45504587155963305, + "acc_norm_stderr": 0.021350503090925167 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.038607315993160904, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.038607315993160904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.019780465954777508, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.019780465954777508 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861131, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861131 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.293854748603352, + "acc_stderr": 0.015235075776719608, + "acc_norm": 0.293854748603352, + "acc_norm_stderr": 0.015235075776719608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411962, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411962 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.032006820201639065, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.032006820201639065 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32073011734028684, + "acc_stderr": 0.011921199991782613, + "acc_norm": 0.32073011734028684, + "acc_norm_stderr": 0.011921199991782613 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.01596440096558967, + "mc2": 0.4614845426101113, + "mc2_stderr": 0.015908282639721598 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.017168187201429246, + "acc_norm": 0.4970484061393152, + "acc_norm_stderr": 0.017190054580194694 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HanaGroup/Mini_category", + "model_sha": "15482113d5d33f4a677f49741dce3c2a53810c4b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HanaGroup/Mini_orca_test01/result_2023-11-15 22:28:14.json b/HanaGroup/Mini_orca_test01/result_2023-11-15 22:28:14.json new file mode 100644 index 0000000000000000000000000000000000000000..5dcfd30a72d2548e72759c2d7fbb0ef55d3b9331 --- /dev/null +++ b/HanaGroup/Mini_orca_test01/result_2023-11-15 22:28:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39078498293515357, + "acc_stderr": 0.014258563880513782, + "acc_norm": 0.439419795221843, + "acc_norm_stderr": 0.014503747823580123 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39314877514439356, + "acc_stderr": 0.004874511466836798, + "acc_norm": 0.50318661621191, + "acc_norm_stderr": 0.004989680072717476 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.04944901092973781, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.04944901092973781 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.017784034534992457, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.017784034534992457 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370334, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370334 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938152, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938152 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3774193548387097, + "acc_stderr": 0.02757596072327824, + "acc_norm": 0.3774193548387097, + "acc_norm_stderr": 0.02757596072327824 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942652, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942652 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652459, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652459 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602841997, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602841997 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680804, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849725, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.019450768432505514, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.019450768432505514 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012372, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012372 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925296, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925296 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.02841820861940679, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.02841820861940679 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510144, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510144 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30638852672750977, + "acc_stderr": 0.01177398032938071, + "acc_norm": 0.30638852672750977, + "acc_norm_stderr": 0.01177398032938071 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.4549121253328978, + "mc2_stderr": 0.015789028871035962 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4167650531286895, + "acc_stderr": 0.01695048914610883, + "acc_norm": 0.4899645808736718, + "acc_norm_stderr": 0.017186891286894043 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HanaGroup/Mini_orca_test01", + "model_sha": "c5d64eb31a3158983e2f8567d90c51981a424cd6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HanaGroup/Mini_orca_test02/result_2023-11-17 15:50:00.json b/HanaGroup/Mini_orca_test02/result_2023-11-17 15:50:00.json new file mode 100644 index 0000000000000000000000000000000000000000..eea347f61f7ddf74cb22bf9db0c4864139991571 --- /dev/null +++ b/HanaGroup/Mini_orca_test02/result_2023-11-17 15:50:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35494880546075086, + "acc_stderr": 0.013983036904094092, + "acc_norm": 0.42235494880546076, + "acc_norm_stderr": 0.014434138713379988 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3633738299143597, + "acc_stderr": 0.004799882248494814, + "acc_norm": 0.45817566221868156, + "acc_norm_stderr": 0.00497229376497873 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.04950504382128919, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.04950504382128919 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4278416347381865, + "acc_stderr": 0.017692787927803735, + "acc_norm": 0.4278416347381865, + "acc_norm_stderr": 0.017692787927803735 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755292, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755292 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237656, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237656 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.02489047176993815, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.02489047176993815 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051448, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051448 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235897, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235897 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911521, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911521 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.02422996529842509, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.02422996529842509 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4653179190751445, + "acc_stderr": 0.026854257928258893, + "acc_norm": 0.4653179190751445, + "acc_norm_stderr": 0.026854257928258893 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569653, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569653 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47522935779816516, + "acc_stderr": 0.021410999753635918, + "acc_norm": 0.47522935779816516, + "acc_norm_stderr": 0.021410999753635918 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377562, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377562 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.028452639985088006, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.028452639985088006 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.019333142020797056, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.019333142020797056 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053756, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053756 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.032847388576472056, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.032847388576472056 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.01431099954796146, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.01431099954796146 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.027146271936625166, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.027146271936625166 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.03241920684693334, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.03241920684693334 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3116036505867014, + "acc_stderr": 0.011829039182849645, + "acc_norm": 0.3116036505867014, + "acc_norm_stderr": 0.011829039182849645 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524753, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524753 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219364, + "mc2": 0.4409558641757515, + "mc2_stderr": 0.015583794269994873 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3317591499409681, + "acc_stderr": 0.01618798464215732, + "acc_norm": 0.3860684769775679, + "acc_norm_stderr": 0.016738130760321747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HanaGroup/Mini_orca_test02", + "model_sha": "ab2d8616b556f22995feac76cfae8e1d9537c1e5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HanaGroup/Mini_orca_test03/result_2023-11-17 23:30:15.json b/HanaGroup/Mini_orca_test03/result_2023-11-17 23:30:15.json new file mode 100644 index 0000000000000000000000000000000000000000..1a78c4393402b88e7f453d651c49e4c0d2f432c6 --- /dev/null +++ b/HanaGroup/Mini_orca_test03/result_2023-11-17 23:30:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3651877133105802, + "acc_stderr": 0.0140702655192688, + "acc_norm": 0.4189419795221843, + "acc_norm_stderr": 0.014418106953639011 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38697470623381797, + "acc_stderr": 0.0048606237334611275, + "acc_norm": 0.49830711013742285, + "acc_norm_stderr": 0.0049897528111734115 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.03815827365913235, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.03815827365913235 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.049486373240266356, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.049486373240266356 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45721583652618136, + "acc_stderr": 0.01781438523853443, + "acc_norm": 0.45721583652618136, + "acc_norm_stderr": 0.01781438523853443 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120575, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120575 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.0438986995680878, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.0438986995680878 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.0323854694875898, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.0323854694875898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.028071588901091852, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.028071588901091852 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332783, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775088, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981747, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981747 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.019431775677037313, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.019431775677037313 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961462, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961462 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.02873932851398358, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.02873932851398358 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.031996152328062855, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.031996152328062855 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.032335327775334835, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.032335327775334835 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.01197150729498278, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.01197150729498278 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133026, + "mc2": 0.47048205310170765, + "mc2_stderr": 0.01584867112784759 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4214876033057851, + "acc_stderr": 0.016977101932601518, + "acc_norm": 0.4722550177095632, + "acc_norm_stderr": 0.017163867979456016 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HanaGroup/Mini_orca_test03", + "model_sha": "0d24157a9820d50b64bf30683143e0805ab589ef", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HanaGroup/Mini_orca_test04/result_2023-11-18 00:02:47.json b/HanaGroup/Mini_orca_test04/result_2023-11-18 00:02:47.json new file mode 100644 index 0000000000000000000000000000000000000000..6ce14950862b1f9e07eef89528fd66aad221f2b3 --- /dev/null +++ b/HanaGroup/Mini_orca_test04/result_2023-11-18 00:02:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3651877133105802, + "acc_stderr": 0.0140702655192688, + "acc_norm": 0.4189419795221843, + "acc_norm_stderr": 0.014418106953639011 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38657637920732923, + "acc_stderr": 0.00485969956245146, + "acc_norm": 0.49810794662417845, + "acc_norm_stderr": 0.0049897456858204285 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.03815827365913235, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.03815827365913235 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.049486373240266356, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.049486373240266356 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45849297573435505, + "acc_stderr": 0.017818248603465554, + "acc_norm": 0.45849297573435505, + "acc_norm_stderr": 0.017818248603465554 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120575, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120575 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.0438986995680878, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.0438986995680878 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.0323854694875898, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.0323854694875898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871937, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871937 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752052, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752052 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775088, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981747, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981747 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.01945076843250551, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.01945076843250551 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961462, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961462 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225417, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225417 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.031996152328062855, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.031996152328062855 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614193, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614193 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3246414602346806, + "acc_stderr": 0.01195908938853003, + "acc_norm": 0.3246414602346806, + "acc_norm_stderr": 0.01195908938853003 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133026, + "mc2": 0.4703996827708238, + "mc2_stderr": 0.01584673681912838 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4167650531286895, + "acc_stderr": 0.01695048914610882, + "acc_norm": 0.46871310507674147, + "acc_norm_stderr": 0.017156666859785466 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HanaGroup/Mini_orca_test04", + "model_sha": "3623bf497344d78bfb3745f33b1b6e72ff86191c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Heoni/Aguie-chat-proto/result_2024-04-04 12:14:57.json b/Heoni/Aguie-chat-proto/result_2024-04-04 12:14:57.json new file mode 100644 index 0000000000000000000000000000000000000000..c0edf4accce762b0aa6c39b111042e1d8858c444 --- /dev/null +++ b/Heoni/Aguie-chat-proto/result_2024-04-04 12:14:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938174, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955264 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4250149372634933, + "acc_stderr": 0.004933349621589329, + "acc_norm": 0.5692093208524198, + "acc_norm_stderr": 0.004941748817682299 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5325670498084292, + "acc_stderr": 0.017841995750520874, + "acc_norm": 0.5325670498084292, + "acc_norm_stderr": 0.017841995750520874 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231015, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231015 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.035594435655639196, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.035594435655639196 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.033442837442804574, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.033442837442804574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112728, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112728 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507762, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507762 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731837, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731837 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.02386520683697258, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.02386520683697258 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.02775653525734767, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.02775653525734767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4935779816513762, + "acc_stderr": 0.02143555482001308, + "acc_norm": 0.4935779816513762, + "acc_norm_stderr": 0.02143555482001308 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.01962744474841225, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.01962744474841225 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503807, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503807 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.01463518561652784, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.01463518561652784 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.028888193103988647, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.028888193103988647 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330366, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330366 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766375, + "mc2": 0.4145722465010712, + "mc2_stderr": 0.01499504951664785 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43565525383707204, + "acc_stderr": 0.017047415229476323, + "acc_norm": 0.4817001180637544, + "acc_norm_stderr": 0.017178836639177752 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Heoni/Aguie-chat-proto", + "model_sha": "24ccc46f33bf5d41538929ed82269d48c7d28a70", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Heoni/Aguie-chat_v0.1/result_2024-04-04 12:18:11.json b/Heoni/Aguie-chat_v0.1/result_2024-04-04 12:18:11.json new file mode 100644 index 0000000000000000000000000000000000000000..1eae9e10f7d84bb691d3fecd59c40ccc56d38202 --- /dev/null +++ b/Heoni/Aguie-chat_v0.1/result_2024-04-04 12:18:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.01421244498065189, + "acc_norm": 0.4300341296928328, + "acc_norm_stderr": 0.014467631559137986 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42013543118900615, + "acc_stderr": 0.004925717008099708, + "acc_norm": 0.5617406891057558, + "acc_norm_stderr": 0.004951594063272051 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.03771283107626545, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.03771283107626545 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.04931801994220416, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.04931801994220416 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5108556832694764, + "acc_stderr": 0.01787574884024241, + "acc_norm": 0.5108556832694764, + "acc_norm_stderr": 0.01787574884024241 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621502, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621502 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.028150232244535594, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.028150232244535594 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424385, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424385 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.382051282051282, + "acc_stderr": 0.024635549163908223, + "acc_norm": 0.382051282051282, + "acc_norm_stderr": 0.024635549163908223 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.0332085274234831, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.0332085274234831 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540625, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540625 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.031660988918880785, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.031660988918880785 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.03032594578928611, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.03032594578928611 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.02742001935094528, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.02742001935094528 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4626865671641791, + "acc_stderr": 0.03525675167467974, + "acc_norm": 0.4626865671641791, + "acc_norm_stderr": 0.03525675167467974 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112154, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.407514450867052, + "acc_stderr": 0.0264545781469315, + "acc_norm": 0.407514450867052, + "acc_norm_stderr": 0.0264545781469315 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.02733954664066273, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.02733954664066273 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45321100917431195, + "acc_stderr": 0.02134325516554603, + "acc_norm": 0.45321100917431195, + "acc_norm_stderr": 0.02134325516554603 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604672, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604672 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791438, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791438 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.0387813988879761, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.0387813988879761 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.019542101564854114, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.019542101564854114 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289804, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289804 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.028418208619406794, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.028418208619406794 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235926, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235926 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.03248197400511076, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.03248197400511076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3109517601043025, + "acc_stderr": 0.011822252917799205, + "acc_norm": 0.3109517601043025, + "acc_norm_stderr": 0.011822252917799205 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.034542365853806094, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.034542365853806094 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015008, + "mc2": 0.4334964063670436, + "mc2_stderr": 0.015359014612689134 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.345926800472255, + "acc_stderr": 0.016353853414347582, + "acc_norm": 0.4332939787485242, + "acc_norm_stderr": 0.0170366836418931 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Heoni/Aguie-chat_v0.1", + "model_sha": "1f54743fd1c6476377a4e668561931b040b28f07", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Heoni/Aguie_v0.1/result_2024-04-04 12:16:41.json b/Heoni/Aguie_v0.1/result_2024-04-04 12:16:41.json new file mode 100644 index 0000000000000000000000000000000000000000..a6bb60e71a4749b63c177263ac34c75cd703eb64 --- /dev/null +++ b/Heoni/Aguie_v0.1/result_2024-04-04 12:16:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36006825938566556, + "acc_stderr": 0.014027516814585188, + "acc_norm": 0.42662116040955633, + "acc_norm_stderr": 0.014453185592920293 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4139613622784306, + "acc_stderr": 0.004915351107318756, + "acc_norm": 0.5642302330213105, + "acc_norm_stderr": 0.004948439229523918 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5146871008939975, + "acc_stderr": 0.017872248024429122, + "acc_norm": 0.5146871008939975, + "acc_norm_stderr": 0.017872248024429122 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236923, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236923 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568392, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568392 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.031342504862454025, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.031342504862454025 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.02794045713622842, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02794045713622842 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.03567603799639172, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.03567603799639172 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633342, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633342 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775088, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215923, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215923 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.043642261558410445, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.043642261558410445 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828978, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828978 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687754, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687754 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3220338983050847, + "acc_stderr": 0.011933936071891096, + "acc_norm": 0.3220338983050847, + "acc_norm_stderr": 0.011933936071891096 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.015000674373570338, + "mc2": 0.3891445540344024, + "mc2_stderr": 0.014596577327660069 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40613931523022434, + "acc_stderr": 0.016884749503191396, + "acc_norm": 0.5029515938606848, + "acc_norm_stderr": 0.017190054580194694 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Heoni/Aguie_v0.1", + "model_sha": "3c948a869a2940a690c2a784120c847821b827d2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Herry443/LLaMA2-ko-7B-KNUT-v0.1/result_2023-11-13 06:58:41.json b/Herry443/LLaMA2-ko-7B-KNUT-v0.1/result_2023-11-13 06:58:41.json new file mode 100644 index 0000000000000000000000000000000000000000..89543b210291bdab2ce7c80f9df7e8bcbff12956 --- /dev/null +++ b/Herry443/LLaMA2-ko-7B-KNUT-v0.1/result_2023-11-13 06:58:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30119453924914674, + "acc_stderr": 0.013406741767847626, + "acc_norm": 0.3583617747440273, + "acc_norm_stderr": 0.014012883334859859 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3458474407488548, + "acc_stderr": 0.004746716805735756, + "acc_norm": 0.42939653455486954, + "acc_norm_stderr": 0.004939784311448984 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.03446296217088426, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.03446296217088426 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.29757343550446996, + "acc_stderr": 0.016349111912909418, + "acc_norm": 0.29757343550446996, + "acc_norm_stderr": 0.016349111912909418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.029771642712491227, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.029771642712491227 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944967, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944967 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.23318385650224216, + "acc_stderr": 0.02838039114709472, + "acc_norm": 0.23318385650224216, + "acc_norm_stderr": 0.02838039114709472 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728742, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728742 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438015, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438015 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.12745098039215685, + "acc_stderr": 0.03318224921942077, + "acc_norm": 0.12745098039215685, + "acc_norm_stderr": 0.03318224921942077 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.028510251512341933, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.028510251512341933 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.021020672680827912, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.021020672680827912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.29354838709677417, + "acc_stderr": 0.025906087021319295, + "acc_norm": 0.29354838709677417, + "acc_norm_stderr": 0.025906087021319295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.26495726495726496, + "acc_stderr": 0.028911208802749465, + "acc_norm": 0.26495726495726496, + "acc_norm_stderr": 0.028911208802749465 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.026199808807561932, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.026199808807561932 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252089, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252089 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.03479185572599659, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.03479185572599659 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.29850746268656714, + "acc_stderr": 0.032357437893550424, + "acc_norm": 0.29850746268656714, + "acc_norm_stderr": 0.032357437893550424 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.031862098516411454, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.031862098516411454 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.021132859182754444, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.021132859182754444 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.03309615177059007, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.03309615177059007 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720685, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720685 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.024105712607754307, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.024105712607754307 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.02577311116963045, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.02577311116963045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.02977866303775295, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.02977866303775295 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23486238532110093, + "acc_stderr": 0.018175110510343585, + "acc_norm": 0.23486238532110093, + "acc_norm_stderr": 0.018175110510343585 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.02656892101545715, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.02656892101545715 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.33884297520661155, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998905, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998905 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.0184334276494019, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.0184334276494019 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.027696910713093936, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.027696910713093936 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.023529242185193106, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.023529242185193106 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788163, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788163 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2911392405063291, + "acc_stderr": 0.02957160106575337, + "acc_norm": 0.2911392405063291, + "acc_norm_stderr": 0.02957160106575337 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27640156453715775, + "acc_stderr": 0.01142215319455358, + "acc_norm": 0.27640156453715775, + "acc_norm_stderr": 0.01142215319455358 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501943, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501943 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885416, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22031823745410037, + "mc1_stderr": 0.014509045171487283, + "mc2": 0.3876715630562864, + "mc2_stderr": 0.014780799577275159 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2668240850059032, + "acc_stderr": 0.015206575684565892, + "acc_norm": 0.40731995277449823, + "acc_norm_stderr": 0.01689245669519127 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Herry443/LLaMA2-ko-7B-KNUT-v0.1", + "model_sha": "823d2fece402a057d1a68be83c80985d57a37471", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Herry443/Mistral-7B-KNUT-ref-ALL/result_2024-02-05 06:09:30.json b/Herry443/Mistral-7B-KNUT-ref-ALL/result_2024-02-05 06:09:30.json new file mode 100644 index 0000000000000000000000000000000000000000..d63a1e46ba3247ff59e713a2fec50697b2ecc65b --- /dev/null +++ b/Herry443/Mistral-7B-KNUT-ref-ALL/result_2024-02-05 06:09:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19027303754266212, + "acc_stderr": 0.011470424179225702, + "acc_norm": 0.23976109215017063, + "acc_norm_stderr": 0.012476304127453956 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2848038239394543, + "acc_stderr": 0.004503985839041984, + "acc_norm": 0.31607249551882094, + "acc_norm_stderr": 0.0046399137096159344 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.1286549707602339, + "acc_stderr": 0.02567934272327694, + "acc_norm": 0.1286549707602339, + "acc_norm_stderr": 0.02567934272327694 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041694, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041694 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2720306513409962, + "acc_stderr": 0.015913367447500524, + "acc_norm": 0.2720306513409962, + "acc_norm_stderr": 0.015913367447500524 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.12, + "acc_stderr": 0.03265986323710905, + "acc_norm": 0.12, + "acc_norm_stderr": 0.03265986323710905 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.02732107841738753, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.02732107841738753 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3054662379421222, + "acc_stderr": 0.026160584450140474, + "acc_norm": 0.3054662379421222, + "acc_norm_stderr": 0.026160584450140474 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.029442495585857476, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.029442495585857476 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.037276735755969195, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.037276735755969195 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.18181818181818182, + "acc_stderr": 0.0274796030105388, + "acc_norm": 0.18181818181818182, + "acc_norm_stderr": 0.0274796030105388 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.19310344827586207, + "acc_stderr": 0.03289445522127402, + "acc_norm": 0.19310344827586207, + "acc_norm_stderr": 0.03289445522127402 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.024537591572830517, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.024537591572830517 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.042365112580946315, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.042365112580946315 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.026522709674667768, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.026522709674667768 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.25660377358490566, + "acc_stderr": 0.02688064788905199, + "acc_norm": 0.25660377358490566, + "acc_norm_stderr": 0.02688064788905199 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.19090909090909092, + "acc_stderr": 0.03764425585984925, + "acc_norm": 0.19090909090909092, + "acc_norm_stderr": 0.03764425585984925 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02784081149587192, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02784081149587192 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.034765996075164785, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.034765996075164785 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0230836585869842, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0230836585869842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.034624199316156234, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.034624199316156234 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2191358024691358, + "acc_stderr": 0.023016705640262185, + "acc_norm": 0.2191358024691358, + "acc_norm_stderr": 0.023016705640262185 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1963302752293578, + "acc_stderr": 0.017030719339154368, + "acc_norm": 0.1963302752293578, + "acc_norm_stderr": 0.017030719339154368 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.02495418432487991, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.02495418432487991 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.23140495867768596, + "acc_stderr": 0.03849856098794088, + "acc_norm": 0.23140495867768596, + "acc_norm_stderr": 0.03849856098794088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.238562091503268, + "acc_stderr": 0.017242385828779606, + "acc_norm": 0.238562091503268, + "acc_norm_stderr": 0.017242385828779606 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460987, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460987 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.03562367850095391, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.03562367850095391 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235922, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235922 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598018, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598018 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2503259452411995, + "acc_stderr": 0.011064151027165438, + "acc_norm": 0.2503259452411995, + "acc_norm_stderr": 0.011064151027165438 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.20606060606060606, + "acc_stderr": 0.03158415324047709, + "acc_norm": 0.20606060606060606, + "acc_norm_stderr": 0.03158415324047709 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.01539211880501501, + "mc2": 0.4275229431547429, + "mc2_stderr": 0.015942592796773743 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.19008264462809918, + "acc_stderr": 0.013489827742736773, + "acc_norm": 0.2502951593860685, + "acc_norm_stderr": 0.014893137573316869 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Herry443/Mistral-7B-KNUT-ref-ALL", + "model_sha": "95f28cdf865867be553670e9665149f0ca0f78c9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Herry443/Mistral-7B-KNUT-ref/result_2024-02-06 03:44:34.json b/Herry443/Mistral-7B-KNUT-ref/result_2024-02-06 03:44:34.json new file mode 100644 index 0000000000000000000000000000000000000000..1f6c13ad3b8c497419e00ed6dfc4c91652d2c74c --- /dev/null +++ b/Herry443/Mistral-7B-KNUT-ref/result_2024-02-06 03:44:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3037542662116041, + "acc_stderr": 0.013438909184778755, + "acc_norm": 0.3575085324232082, + "acc_norm_stderr": 0.014005494275916573 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3509261103365863, + "acc_stderr": 0.00476284477090985, + "acc_norm": 0.44921330412268473, + "acc_norm_stderr": 0.004963974504003033 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4674329501915709, + "acc_stderr": 0.01784199575052086, + "acc_norm": 0.4674329501915709, + "acc_norm_stderr": 0.01784199575052086 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.02817391776176287, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.02817391776176287 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424385, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424385 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793254, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793254 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38974358974358975, + "acc_stderr": 0.024726967886647074, + "acc_norm": 0.38974358974358975, + "acc_norm_stderr": 0.024726967886647074 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4129032258064516, + "acc_stderr": 0.028009138125400387, + "acc_norm": 0.4129032258064516, + "acc_norm_stderr": 0.028009138125400387 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.02794045713622841, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02794045713622841 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.035319879302087305, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.035319879302087305 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149135, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149135 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4653179190751445, + "acc_stderr": 0.026854257928258882, + "acc_norm": 0.4653179190751445, + "acc_norm_stderr": 0.026854257928258882 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.027431623722415012, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.027431623722415012 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.019206606848825365, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.019206606848825365 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2994413407821229, + "acc_stderr": 0.015318257745976708, + "acc_norm": 0.2994413407821229, + "acc_norm_stderr": 0.015318257745976708 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.031557828165561644, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.031557828165561644 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.032007041833595914, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.032007041833595914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897625, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897625 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630573, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630573 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.01578537085839671, + "mc2": 0.47210945803385745, + "mc2_stderr": 0.015515090553210324 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4757969303423849, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5726092089728453, + "acc_norm_stderr": 0.017008129844823153 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Herry443/Mistral-7B-KNUT-ref", + "model_sha": "90de3478ca2a99cda0999bcb915fffb695359898", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Herry443/Mistral-7B-KNUT-v0.1/result_2023-10-26 05:52:58.json b/Herry443/Mistral-7B-KNUT-v0.1/result_2023-10-26 05:52:58.json new file mode 100644 index 0000000000000000000000000000000000000000..bb80f183bd26b849e9a2c3c4bb8753a6915a2219 --- /dev/null +++ b/Herry443/Mistral-7B-KNUT-v0.1/result_2023-10-26 05:52:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2687713310580205, + "acc_stderr": 0.012955065963710682, + "acc_norm": 0.3225255972696246, + "acc_norm_stderr": 0.013659980894277366 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3323043218482374, + "acc_stderr": 0.004700767741735566, + "acc_norm": 0.4056960764787891, + "acc_norm_stderr": 0.004900227226433385 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.037999786443706066, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.037999786443706066 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.01690520742080355, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.01690520742080355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501117, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501117 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.031489558297455304, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.031489558297455304 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632945, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632945 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134987, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134987 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.04010358942462203, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.04010358942462203 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.032424979581788166, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.032424979581788166 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.039215453124671215, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.039215453124671215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416544, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416544 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.27310924369747897, + "acc_stderr": 0.028942004040998167, + "acc_norm": 0.27310924369747897, + "acc_norm_stderr": 0.028942004040998167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28717948717948716, + "acc_stderr": 0.022939925418530616, + "acc_norm": 0.28717948717948716, + "acc_norm_stderr": 0.022939925418530616 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.04524596007030049, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.04524596007030049 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.452991452991453, + "acc_stderr": 0.03261099873098619, + "acc_norm": 0.452991452991453, + "acc_norm_stderr": 0.03261099873098619 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.026055296901152922, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.026055296901152922 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524575, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524575 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.036030385453603854, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.036030385453603854 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.39303482587064675, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.39303482587064675, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554858, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554858 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3092485549132948, + "acc_stderr": 0.02488314057007176, + "acc_norm": 0.3092485549132948, + "acc_norm_stderr": 0.02488314057007176 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.026041766202717163, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.026041766202717163 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26788990825688075, + "acc_stderr": 0.018987462257978652, + "acc_norm": 0.26788990825688075, + "acc_norm_stderr": 0.018987462257978652 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.026415601914389002, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.026415601914389002 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04391326286724071, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04391326286724071 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.03554180368025689, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.03554180368025689 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.01852175621542303, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.01852175621542303 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101373, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101373 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.18981481481481483, + "acc_stderr": 0.026744714834691943, + "acc_norm": 0.18981481481481483, + "acc_norm_stderr": 0.026744714834691943 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010083, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010083 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2426470588235294, + "acc_stderr": 0.026040662474201278, + "acc_norm": 0.2426470588235294, + "acc_norm_stderr": 0.026040662474201278 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.027682979522960244, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.027682979522960244 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35443037974683544, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.35443037974683544, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28292046936114734, + "acc_stderr": 0.011503891323188976, + "acc_norm": 0.28292046936114734, + "acc_norm_stderr": 0.011503891323188976 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.032282103870378935, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.032282103870378935 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.01541524174023704, + "mc2": 0.4418547715713716, + "mc2_stderr": 0.01568020575059561 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3010625737898465, + "acc_stderr": 0.015771113299945454, + "acc_norm": 0.448642266824085, + "acc_norm_stderr": 0.017099430514725792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Herry443/Mistral-7B-KNUT-v0.1", + "model_sha": "b90832d18d355d77c2e25181f59075070d946978", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Herry443/Mistral-7B-KNUT-v0.2/result_2023-11-28 02:55:57.json b/Herry443/Mistral-7B-KNUT-v0.2/result_2023-11-28 02:55:57.json new file mode 100644 index 0000000000000000000000000000000000000000..bece12d4a9ec556eca09300e60046bf485f761cd --- /dev/null +++ b/Herry443/Mistral-7B-KNUT-v0.2/result_2023-11-28 02:55:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26109215017064846, + "acc_stderr": 0.012835523909473847, + "acc_norm": 0.30204778156996587, + "acc_norm_stderr": 0.013417519144716417 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3200557657837084, + "acc_stderr": 0.0046554427665994646, + "acc_norm": 0.38856801433977295, + "acc_norm_stderr": 0.004864286176731832 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.03599335771456027, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.03599335771456027 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.04931801994220414, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.04931801994220414 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3486590038314176, + "acc_stderr": 0.01704124314349093, + "acc_norm": 0.3486590038314176, + "acc_norm_stderr": 0.01704124314349093 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785138, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785138 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.34726688102893893, + "acc_stderr": 0.027040745502307336, + "acc_norm": 0.34726688102893893, + "acc_norm_stderr": 0.027040745502307336 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.28699551569506726, + "acc_stderr": 0.030360379710291964, + "acc_norm": 0.28699551569506726, + "acc_norm_stderr": 0.030360379710291964 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.043285772152629715, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.043285772152629715 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.41414141414141414, + "acc_stderr": 0.03509438348879628, + "acc_norm": 0.41414141414141414, + "acc_norm_stderr": 0.03509438348879628 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.024433016466052452, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.024433016466052452 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.04453197507374983, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.04453197507374983 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.02770935967503249, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.02770935967503249 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5085470085470085, + "acc_stderr": 0.0327513030009703, + "acc_norm": 0.5085470085470085, + "acc_norm_stderr": 0.0327513030009703 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302506, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302506 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.39800995024875624, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.39800995024875624, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548574, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548574 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.4, + "acc_stderr": 0.0492365963917331, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0492365963917331 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.38439306358381503, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.38439306358381503, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32098765432098764, + "acc_stderr": 0.025976566010862737, + "acc_norm": 0.32098765432098764, + "acc_norm_stderr": 0.025976566010862737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41100917431192663, + "acc_stderr": 0.021095050687277638, + "acc_norm": 0.41100917431192663, + "acc_norm_stderr": 0.021095050687277638 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.038607315993160904, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.038607315993160904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2826797385620915, + "acc_stderr": 0.01821726955205342, + "acc_norm": 0.2826797385620915, + "acc_norm_stderr": 0.01821726955205342 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432403, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432403 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997865, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997865 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.01455155365936992, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.01455155365936992 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.03106721126287249, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.03106721126287249 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.38396624472573837, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.38396624472573837, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2522816166883963, + "acc_stderr": 0.011092789056875248, + "acc_norm": 0.2522816166883963, + "acc_norm_stderr": 0.011092789056875248 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.03198001660115072, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.03198001660115072 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624337, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624337 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.015201522246299948, + "mc2": 0.42165436242518467, + "mc2_stderr": 0.015410741976473186 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2845336481700118, + "acc_stderr": 0.015512301654971767, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.016884749503191392 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Herry443/Mistral-7B-KNUT-v0.2", + "model_sha": "ba21d9b13304dcef6c9d0f0f24d2e7893d569a5c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Herry443/Mistral-7B-KNUT-v0.3/result_2023-12-09 08:52:37.json b/Herry443/Mistral-7B-KNUT-v0.3/result_2023-12-09 08:52:37.json new file mode 100644 index 0000000000000000000000000000000000000000..3f2d446a9be02ff09b142825d2e8fb64b2c745e8 --- /dev/null +++ b/Herry443/Mistral-7B-KNUT-v0.3/result_2023-12-09 08:52:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2738907849829352, + "acc_stderr": 0.013032004972989503, + "acc_norm": 0.3054607508532423, + "acc_norm_stderr": 0.013460080478002505 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3260306711810396, + "acc_stderr": 0.004678006403691725, + "acc_norm": 0.40021907986456884, + "acc_norm_stderr": 0.004889413126208774 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3282247765006386, + "acc_stderr": 0.01679168564019289, + "acc_norm": 0.3282247765006386, + "acc_norm_stderr": 0.01679168564019289 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3440514469453376, + "acc_stderr": 0.026981478043648026, + "acc_norm": 0.3440514469453376, + "acc_norm_stderr": 0.026981478043648026 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.03383201223244442, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.03383201223244442 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.03941707632064889, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.03941707632064889 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31025641025641026, + "acc_stderr": 0.0234546748894043, + "acc_norm": 0.31025641025641026, + "acc_norm_stderr": 0.0234546748894043 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358611, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358611 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3709677419354839, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.3709677419354839, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5170940170940171, + "acc_stderr": 0.032736940493481824, + "acc_norm": 0.5170940170940171, + "acc_norm_stderr": 0.032736940493481824 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443867, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443867 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.044942908662520875, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.044942908662520875 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.02708037281514566, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.02708037281514566 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48258706467661694, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.48258706467661694, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.03496101481191181, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.03496101481191181 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02256989707491841, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02256989707491841 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554859, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554859 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.025070713719153172, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.025070713719153172 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.037311335196738925, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.037311335196738925 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02622964917882116, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02622964917882116 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.034107802518361846, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.034107802518361846 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3211009174311927, + "acc_stderr": 0.020018149772733747, + "acc_norm": 0.3211009174311927, + "acc_norm_stderr": 0.020018149772733747 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.041634530313028585, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.041634530313028585 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.027732834353363947, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.027732834353363947 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.018607552131279834, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.018607552131279834 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460994, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467763, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467763 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160834, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160834 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.02858270975389844, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.02858270975389844 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.02916273841024978, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.02916273841024978 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4177215189873418, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.4177215189873418, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26401564537157757, + "acc_stderr": 0.011258435537723816, + "acc_norm": 0.26401564537157757, + "acc_norm_stderr": 0.011258435537723816 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.033175059300091805, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.033175059300091805 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156467, + "mc2": 0.4346601144729828, + "mc2_stderr": 0.015485642516678326 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2727272727272727, + "acc_stderr": 0.01531185311030035, + "acc_norm": 0.34946871310507677, + "acc_norm_stderr": 0.016392797085769843 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Herry443/Mistral-7B-KNUT-v0.3", + "model_sha": "089a962c7ef124af537742bd25034c601f264fae", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Herry443/Mistral-7B-KNUT-v0.4/result_2023-12-19 10:08:44.json b/Herry443/Mistral-7B-KNUT-v0.4/result_2023-12-19 10:08:44.json new file mode 100644 index 0000000000000000000000000000000000000000..f8a1b48c47ef817513f8c892ac03287828e243d9 --- /dev/null +++ b/Herry443/Mistral-7B-KNUT-v0.4/result_2023-12-19 10:08:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26791808873720135, + "acc_stderr": 0.012942030195136428, + "acc_norm": 0.31313993174061433, + "acc_norm_stderr": 0.013552671543623496 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32732523401712804, + "acc_stderr": 0.00468278079050834, + "acc_norm": 0.40380402310296754, + "acc_norm_stderr": 0.004896563126116813 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.34502923976608185, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.34502923976608185, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3116219667943806, + "acc_stderr": 0.016562433867284176, + "acc_norm": 0.3116219667943806, + "acc_norm_stderr": 0.016562433867284176 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3504823151125402, + "acc_stderr": 0.02709865262130175, + "acc_norm": 0.3504823151125402, + "acc_norm_stderr": 0.02709865262130175 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3282442748091603, + "acc_stderr": 0.041184385658062976, + "acc_norm": 0.3282442748091603, + "acc_norm_stderr": 0.041184385658062976 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732524, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732524 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.03996629574876718, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.03996629574876718 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179964, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179964 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3319327731092437, + "acc_stderr": 0.030588697013783663, + "acc_norm": 0.3319327731092437, + "acc_norm_stderr": 0.030588697013783663 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.30256410256410254, + "acc_stderr": 0.02329088805377272, + "acc_norm": 0.30256410256410254, + "acc_norm_stderr": 0.02329088805377272 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114475, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114475 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3580645161290323, + "acc_stderr": 0.027273890594300642, + "acc_norm": 0.3580645161290323, + "acc_norm_stderr": 0.027273890594300642 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5341880341880342, + "acc_stderr": 0.03267942734081227, + "acc_norm": 0.5341880341880342, + "acc_norm_stderr": 0.03267942734081227 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3132075471698113, + "acc_stderr": 0.028544793319055333, + "acc_norm": 0.3132075471698113, + "acc_norm_stderr": 0.028544793319055333 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04265792110940588, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04265792110940588 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114993, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114993 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.39800995024875624, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.39800995024875624, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594316, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643895, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643895 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.025305258131879723, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.025305258131879723 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924055, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924055 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2716049382716049, + "acc_stderr": 0.024748624490537375, + "acc_norm": 0.2716049382716049, + "acc_norm_stderr": 0.024748624490537375 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3100917431192661, + "acc_stderr": 0.019830849684439756, + "acc_norm": 0.3100917431192661, + "acc_norm_stderr": 0.019830849684439756 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.027245613047215355, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.027245613047215355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.035541803680256896, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.035541803680256896 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.01834252984527591, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.01834252984527591 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631296, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631296 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.014487500852850409, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.014487500852850409 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3014705882352941, + "acc_stderr": 0.027875982114273168, + "acc_norm": 0.3014705882352941, + "acc_norm_stderr": 0.027875982114273168 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3020408163265306, + "acc_stderr": 0.029393609319879815, + "acc_norm": 0.3020408163265306, + "acc_norm_stderr": 0.029393609319879815 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.379746835443038, + "acc_stderr": 0.031591887529658504, + "acc_norm": 0.379746835443038, + "acc_norm_stderr": 0.031591887529658504 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2646675358539765, + "acc_stderr": 0.011267332992845531, + "acc_norm": 0.2646675358539765, + "acc_norm_stderr": 0.011267332992845531 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139404, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139404 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283349, + "mc2": 0.42277041139901306, + "mc2_stderr": 0.015451140013408284 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27744982290436837, + "acc_stderr": 0.015393630236605975, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.01645549600031454 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Herry443/Mistral-7B-KNUT-v0.4", + "model_sha": "ed7abbc15e628a6832b00b24aad888e015e2a65b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HuggingFaceH4/zephyr-7b-beta/result_2023-11-01 04:21:47.json b/HuggingFaceH4/zephyr-7b-beta/result_2023-11-01 04:21:47.json new file mode 100644 index 0000000000000000000000000000000000000000..939b5a31e8c4805c413e7161e3c8d251331d34ad --- /dev/null +++ b/HuggingFaceH4/zephyr-7b-beta/result_2023-11-01 04:21:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33532423208191126, + "acc_stderr": 0.01379618294778556, + "acc_norm": 0.3848122866894198, + "acc_norm_stderr": 0.014218371065251112 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35480979884485164, + "acc_stderr": 0.004774778180345192, + "acc_norm": 0.44911372236606256, + "acc_norm_stderr": 0.00496387293685794 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41762452107279696, + "acc_stderr": 0.017635637326951534, + "acc_norm": 0.41762452107279696, + "acc_norm_stderr": 0.017635637326951534 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.029896145682095462, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.029896145682095462 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.028365041542564584, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.028365041542564584 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168284, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168284 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.025069094387296546, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.025069094387296546 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536821, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536821 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03088273697413865, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03088273697413865 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911521, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911521 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342658, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342658 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.026882643434022885, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.026882643434022885 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.039015918258361836, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.039015918258361836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379424, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379424 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47155963302752296, + "acc_stderr": 0.02140261569734804, + "acc_norm": 0.47155963302752296, + "acc_norm_stderr": 0.02140261569734804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127152, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127152 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.028180596328259297, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.028180596328259297 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.018975427920507215, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.018975427920507215 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.329608938547486, + "acc_stderr": 0.015721531075183884, + "acc_norm": 0.329608938547486, + "acc_norm_stderr": 0.015721531075183884 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.03175195237583322, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.03175195237583322 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4810126582278481, + "acc_stderr": 0.03252375148090448, + "acc_norm": 0.4810126582278481, + "acc_norm_stderr": 0.03252375148090448 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29791395045632335, + "acc_stderr": 0.011680717340400059, + "acc_norm": 0.29791395045632335, + "acc_norm_stderr": 0.011680717340400059 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.03198001660115072, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.03198001660115072 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03588624800091707, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03588624800091707 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3317013463892289, + "mc1_stderr": 0.01648214881024147, + "mc2": 0.5171680571717291, + "mc2_stderr": 0.01606077987901482 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.39787485242030696, + "acc_stderr": 0.01682795905473339, + "acc_norm": 0.4014167650531287, + "acc_norm_stderr": 0.01685290785872906 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HuggingFaceH4/zephyr-7b-beta", + "model_sha": "3bac358730f8806e5c3dc7c7e19eb36e045bf720", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/COKAL-DPO-13b-v2/result_2023-11-11 08:29:02.json b/HumanF-MarkrAI/COKAL-DPO-13b-v2/result_2023-11-11 08:29:02.json new file mode 100644 index 0000000000000000000000000000000000000000..4b59dbe1b1b7d5a3a16f7f8fdf10c5d11464019e --- /dev/null +++ b/HumanF-MarkrAI/COKAL-DPO-13b-v2/result_2023-11-11 08:29:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5059726962457338, + "acc_stderr": 0.014610348300255793, + "acc_norm": 0.5494880546075085, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4880501892053376, + "acc_stderr": 0.00498835614649901, + "acc_norm": 0.6301533559051982, + "acc_norm_stderr": 0.00481776358141023 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299798, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299798 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.038194861407583984, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.038194861407583984 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016336, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016336 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028337, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028337 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235907, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235907 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.02708037281514566, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.02708037281514566 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.037038511930995215, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.037038511930995215 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.023865206836972613, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.023865206836972613 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842424, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6091743119266055, + "acc_stderr": 0.020920058346111076, + "acc_norm": 0.6091743119266055, + "acc_norm_stderr": 0.020920058346111076 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.0282451340243873, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.0282451340243873 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762626, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762626 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.01442229220480885, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.01442229220480885 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42448979591836733, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.42448979591836733, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35853976531942633, + "acc_stderr": 0.012248487319682746, + "acc_norm": 0.35853976531942633, + "acc_norm_stderr": 0.012248487319682746 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3659730722154223, + "mc1_stderr": 0.01686294168408836, + "mc2": 0.5166857407308614, + "mc2_stderr": 0.01622317540419704 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4675324675324675, + "acc_stderr": 0.017154073716682865, + "acc_norm": 0.4982290436835891, + "acc_norm_stderr": 0.017190246276231863 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/COKAL-DPO-13b-v2", + "model_sha": "f90b0c3f6f91a58616aef3a19bdd1dc3c242028a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/COKAL-DPO-13b-v3/result_2023-11-26 12:58:31.json b/HumanF-MarkrAI/COKAL-DPO-13b-v3/result_2023-11-26 12:58:31.json new file mode 100644 index 0000000000000000000000000000000000000000..e407497267259c7b3f1ffed3a5b7cbb743e75334 --- /dev/null +++ b/HumanF-MarkrAI/COKAL-DPO-13b-v3/result_2023-11-26 12:58:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.47696245733788395, + "acc_stderr": 0.014595873205358269, + "acc_norm": 0.5324232081911263, + "acc_norm_stderr": 0.014580637569995426 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46773551085441145, + "acc_stderr": 0.004979381876712608, + "acc_norm": 0.6227843059151563, + "acc_norm_stderr": 0.004836990373261561 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5632183908045977, + "acc_stderr": 0.017736470837800694, + "acc_norm": 0.5632183908045977, + "acc_norm_stderr": 0.017736470837800694 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056126, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056126 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836925, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836925 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.031342504862454025, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.031342504862454025 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.03067609659938918, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.03067609659938918 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112147, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112147 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.035260770955482405, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.035260770955482405 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518754, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518754 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6055045871559633, + "acc_stderr": 0.020954642108587506, + "acc_norm": 0.6055045871559633, + "acc_norm_stderr": 0.020954642108587506 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.01989841271763589, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.01989841271763589 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26927374301675977, + "acc_stderr": 0.014835616582882618, + "acc_norm": 0.26927374301675977, + "acc_norm_stderr": 0.014835616582882618 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131117, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131117 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37222946544980445, + "acc_stderr": 0.012346241297204366, + "acc_norm": 0.37222946544980445, + "acc_norm_stderr": 0.012346241297204366 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3292533659730722, + "mc1_stderr": 0.01645126444006824, + "mc2": 0.4865420269226251, + "mc2_stderr": 0.016014497778680654 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4675324675324675, + "acc_stderr": 0.01715407371668286, + "acc_norm": 0.5029515938606848, + "acc_norm_stderr": 0.017190054580194694 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/COKAL-DPO-13b-v3", + "model_sha": "64a95028cd730b0453dba44259b776a455f86049", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/Dear_My_best_Friends-v4-13B/result_2023-11-28 17:54:12.json b/HumanF-MarkrAI/Dear_My_best_Friends-v4-13B/result_2023-11-28 17:54:12.json new file mode 100644 index 0000000000000000000000000000000000000000..00ea1e398f5a18abe027913ab25e0c29cc2bea5c --- /dev/null +++ b/HumanF-MarkrAI/Dear_My_best_Friends-v4-13B/result_2023-11-28 17:54:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4206484641638225, + "acc_stderr": 0.014426211252508406, + "acc_norm": 0.4786689419795222, + "acc_norm_stderr": 0.014598087973127104 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43108942441744674, + "acc_stderr": 0.0049421645859914695, + "acc_norm": 0.5748854809798845, + "acc_norm_stderr": 0.004933500261683597 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5593869731800766, + "acc_stderr": 0.017753396973908493, + "acc_norm": 0.5593869731800766, + "acc_norm_stderr": 0.017753396973908493 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232964, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232964 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736125, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736125 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03088273697413866, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03088273697413866 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833935, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833935 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6311926605504588, + "acc_stderr": 0.02068622756072953, + "acc_norm": 0.6311926605504588, + "acc_norm_stderr": 0.02068622756072953 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.019542101564854114, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.019542101564854114 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.031137304297185805, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.031137304297185805 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3539765319426336, + "acc_stderr": 0.012213504731731637, + "acc_norm": 0.3539765319426336, + "acc_norm_stderr": 0.012213504731731637 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882462, + "mc2": 0.4658908168793715, + "mc2_stderr": 0.01536090399308638 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46871310507674147, + "acc_stderr": 0.017156666859785466, + "acc_norm": 0.5242030696576151, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/Dear_My_best_Friends-v4-13B", + "model_sha": "9939860a1167f1fdb90b3a206eadf07e8873c7e6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/Yi_lee-SFT-v2-6B/result_2023-12-12 07:30:13.json b/HumanF-MarkrAI/Yi_lee-SFT-v2-6B/result_2023-12-12 07:30:13.json new file mode 100644 index 0000000000000000000000000000000000000000..8e24b5619b65879228e773d9b8828ac4625cc4be --- /dev/null +++ b/HumanF-MarkrAI/Yi_lee-SFT-v2-6B/result_2023-12-12 07:30:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3506825938566553, + "acc_stderr": 0.01394463593072609, + "acc_norm": 0.40784982935153585, + "acc_norm_stderr": 0.014361097288449696 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3963353913563035, + "acc_stderr": 0.004881359589148996, + "acc_norm": 0.5270862378012349, + "acc_norm_stderr": 0.004982454383162067 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5606641123882503, + "acc_stderr": 0.0177478742456836, + "acc_norm": 0.5606641123882503, + "acc_norm_stderr": 0.0177478742456836 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.0378913442461155, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.0378913442461155 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.02837327096106942, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.02837327096106942 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.034648816750163375, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.034648816750163375 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179964, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179964 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.0253106392549339, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.0253106392549339 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674078, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149145, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149145 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.041227287076512825, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.041227287076512825 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6238532110091743, + "acc_stderr": 0.020769231968205074, + "acc_norm": 0.6238532110091743, + "acc_norm_stderr": 0.020769231968205074 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.028180596328259293, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.028180596328259293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.0399930971277747, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.0399930971277747 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762626, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762626 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.0278079901413202, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.0278079901413202 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833585, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833585 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025425, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025425 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.01450897945355398, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.01450897945355398 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411952, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411952 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42448979591836733, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.42448979591836733, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.011977676704716, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.011977676704716 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.4453783861858108, + "mc2_stderr": 0.015094573783194452 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5312868949232585, + "acc_stderr": 0.017156666859785473, + "acc_norm": 0.58913813459268, + "acc_norm_stderr": 0.016914972767841045 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/Yi_lee-SFT-v2-6B", + "model_sha": "17959d8351fad03a56f0d8f4607ebe23ae764f34", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/mistralopithecus-v3-dpo-7b/result_2023-11-26 11:42:24.json b/HumanF-MarkrAI/mistralopithecus-v3-dpo-7b/result_2023-11-26 11:42:24.json new file mode 100644 index 0000000000000000000000000000000000000000..ab7f212410ed867c64fdefd48386ac837df27485 --- /dev/null +++ b/HumanF-MarkrAI/mistralopithecus-v3-dpo-7b/result_2023-11-26 11:42:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46331058020477817, + "acc_stderr": 0.01457200052775699, + "acc_norm": 0.507679180887372, + "acc_norm_stderr": 0.014609667440892574 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4320852419836686, + "acc_stderr": 0.00494353724234442, + "acc_norm": 0.5420235012945628, + "acc_norm_stderr": 0.00497212652303194 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.017879948914431665, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.017879948914431665 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.042667634040995814, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.042667634040995814 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.0354413249194797, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.0354413249194797 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177495, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036543, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036543 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165897, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165897 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972742, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972742 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.02507598176760168, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.02507598176760168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637793, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637793 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607718, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607718 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46972477064220186, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.46972477064220186, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749255, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142635, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142635 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.045190820213197716, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.045190820213197716 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.01939305840235545, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.01939305840235545 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534785, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534785 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.02971127586000534, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.02971127586000534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2837988826815642, + "acc_stderr": 0.015078358970751772, + "acc_norm": 0.2837988826815642, + "acc_norm_stderr": 0.015078358970751772 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05000000000000001, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05000000000000001 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.02833295951403122, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.02833295951403122 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31290743155149936, + "acc_stderr": 0.011842529823063004, + "acc_norm": 0.31290743155149936, + "acc_norm_stderr": 0.011842529823063004 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674119, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674119 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253595, + "mc2": 0.4169766746281562, + "mc2_stderr": 0.016347162773038867 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31759149940968123, + "acc_stderr": 0.01600558187622931, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.01627295299701912 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/mistralopithecus-v3-dpo-7b", + "model_sha": "d7759639c8b879a011233f9ca5af1481b844e22f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/pub-llama-13B-v3/result_2023-10-24 18:02:37.json b/HumanF-MarkrAI/pub-llama-13B-v3/result_2023-10-24 18:02:37.json new file mode 100644 index 0000000000000000000000000000000000000000..0f22bb83c01d882f93df58c6f128e3b2ddb4c587 --- /dev/null +++ b/HumanF-MarkrAI/pub-llama-13B-v3/result_2023-10-24 18:02:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36945392491467577, + "acc_stderr": 0.014104578366491888, + "acc_norm": 0.42150170648464164, + "acc_norm_stderr": 0.014430197069326028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40450109539932283, + "acc_stderr": 0.004897921845492105, + "acc_norm": 0.5392352121091416, + "acc_norm_stderr": 0.004974395131539592 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5070242656449553, + "acc_stderr": 0.017878199003432214, + "acc_norm": 0.5070242656449553, + "acc_norm_stderr": 0.017878199003432214 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.03097669299853443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.03097669299853443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.037400593820293204, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.037400593820293204 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.0354413249194797, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.0354413249194797 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.032339434681820885, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.032339434681820885 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042335, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042335 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.034051553805619514, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.034051553805619514 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502744, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502744 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066475, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066475 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.035161847729521675, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.035161847729521675 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02487081525105709, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02487081525105709 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.02683080599895224, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.02683080599895224 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0383515395439942, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0383515395439942 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.021364122533881695, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.021364122533881695 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664278, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664278 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529675, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529675 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.02866199620233531, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.02866199620233531 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3559322033898305, + "acc_stderr": 0.012228645537277573, + "acc_norm": 0.3559322033898305, + "acc_norm_stderr": 0.012228645537277573 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431855, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431855 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775527, + "mc2": 0.4355517094226067, + "mc2_stderr": 0.015309009273280678 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4510035419126328, + "acc_stderr": 0.017107618859549346, + "acc_norm": 0.5053128689492326, + "acc_norm_stderr": 0.01718938362722971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/pub-llama-13B-v3", + "model_sha": "a077b211925e00e7bd8e3f6bdf29476c59b81d6d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/pub-llama-13B-v4/result_2023-11-03 03:39:52.json b/HumanF-MarkrAI/pub-llama-13B-v4/result_2023-11-03 03:39:52.json new file mode 100644 index 0000000000000000000000000000000000000000..fa83d02f099d109b665d07119f4dbbaaa4927e8d --- /dev/null +++ b/HumanF-MarkrAI/pub-llama-13B-v4/result_2023-11-03 03:39:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39590443686006827, + "acc_stderr": 0.014291228393536588, + "acc_norm": 0.454778156996587, + "acc_norm_stderr": 0.014551507060836355 + }, + "harness|ko_hellaswag|10": { + "acc": 0.420035849432384, + "acc_stderr": 0.004925556104679414, + "acc_norm": 0.5587532364070902, + "acc_norm_stderr": 0.004955212787832385 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5312899106002554, + "acc_stderr": 0.01784491809046855, + "acc_norm": 0.5312899106002554, + "acc_norm_stderr": 0.01784491809046855 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029321, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029321 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.02834504586484068, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.02834504586484068 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.02491524398598784, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.02491524398598784 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540632, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540632 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969481, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969481 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712163, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712163 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.026817718130348923, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.026817718130348923 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353927, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353927 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5394495412844037, + "acc_stderr": 0.0213704946099951, + "acc_norm": 0.5394495412844037, + "acc_norm_stderr": 0.0213704946099951 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.02782610930728369, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.02782610930728369 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215927, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215927 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022135, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022135 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456053, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456053 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569746, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3272490221642764, + "acc_stderr": 0.011983819806464752, + "acc_norm": 0.3272490221642764, + "acc_norm_stderr": 0.011983819806464752 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31211750305997554, + "mc1_stderr": 0.01622075676952091, + "mc2": 0.4796342874579499, + "mc2_stderr": 0.015443652481064269 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.39669421487603307, + "acc_stderr": 0.016819438642971408, + "acc_norm": 0.42266824085005905, + "acc_norm_stderr": 0.0169835060795776 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/pub-llama-13B-v4", + "model_sha": "8f327f994717ac7f2959674cc066cc11434626b1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/pub-llama-13B-v5/result_2023-11-02 17:54:22.json b/HumanF-MarkrAI/pub-llama-13B-v5/result_2023-11-02 17:54:22.json new file mode 100644 index 0000000000000000000000000000000000000000..3ef8d945628dc86e524e95d31b86aafe4850110d --- /dev/null +++ b/HumanF-MarkrAI/pub-llama-13B-v5/result_2023-11-02 17:54:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4061433447098976, + "acc_stderr": 0.014351656690097858, + "acc_norm": 0.46757679180887374, + "acc_norm_stderr": 0.014580637569995421 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4268074088826927, + "acc_stderr": 0.004936029827672038, + "acc_norm": 0.5713005377414858, + "acc_norm_stderr": 0.004938787067611805 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5542784163473818, + "acc_stderr": 0.017774297282479506, + "acc_norm": 0.5542784163473818, + "acc_norm_stderr": 0.017774297282479506 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145631, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145631 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933917, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933917 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959316, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959316 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.023393826500484875, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.023393826500484875 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6128440366972477, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.6128440366972477, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.02799672318063146, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.02799672318063146 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.040179012759817494, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.040179012759817494 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.01972205893961807, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.01972205893961807 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4632352941176471, + "acc_stderr": 0.030290619180485687, + "acc_norm": 0.4632352941176471, + "acc_norm_stderr": 0.030290619180485687 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34876140808344197, + "acc_stderr": 0.012172035157127113, + "acc_norm": 0.34876140808344197, + "acc_norm_stderr": 0.012172035157127113 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834559, + "mc2": 0.4231914841328799, + "mc2_stderr": 0.014828818370873126 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4946871310507674, + "acc_stderr": 0.01718938362722969, + "acc_norm": 0.5667060212514758, + "acc_norm_stderr": 0.017036683641893098 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/pub-llama-13B-v5", + "model_sha": "1f872cab411ce3259a7fc23816b8bce1ca67f4b7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/pub-llama-13B-v6/result_2023-11-04 16:02:32.json b/HumanF-MarkrAI/pub-llama-13B-v6/result_2023-11-04 16:02:32.json new file mode 100644 index 0000000000000000000000000000000000000000..de4ed80cacebe91ae87a1b0efb30f4266dea61a0 --- /dev/null +++ b/HumanF-MarkrAI/pub-llama-13B-v6/result_2023-11-04 16:02:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4104095563139932, + "acc_stderr": 0.014374922192642662, + "acc_norm": 0.47013651877133106, + "acc_norm_stderr": 0.014585305840007105 + }, + "harness|ko_hellaswag|10": { + "acc": 0.427504481179048, + "acc_stderr": 0.004937054233711568, + "acc_norm": 0.5733917546305517, + "acc_norm_stderr": 0.004935735300348869 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.017769250583533246, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.017769250583533246 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419034, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419034 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.02524277098712617, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.02524277098712617 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073824, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073824 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119994, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119994 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.023393826500484875, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.023393826500484875 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939101, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6220183486238532, + "acc_stderr": 0.02078918706672812, + "acc_norm": 0.6220183486238532, + "acc_norm_stderr": 0.02078918706672812 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.028304576673141114, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.028304576673141114 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.01978046595477752, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.01978046595477752 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.037709700493470166, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.037709700493470166 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.030254372573976698, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.030254372573976698 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3644067796610169, + "acc_stderr": 0.012291694983056474, + "acc_norm": 0.3644067796610169, + "acc_norm_stderr": 0.012291694983056474 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31211750305997554, + "mc1_stderr": 0.01622075676952091, + "mc2": 0.4713446696460188, + "mc2_stderr": 0.01534716845498683 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4935064935064935, + "acc_stderr": 0.01718890435907731, + "acc_norm": 0.5667060212514758, + "acc_norm_stderr": 0.017036683641893098 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/pub-llama-13B-v6", + "model_sha": "af533b0f41590d5d8c5ced917b19a82ac98bb201", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/pub-llama-13b-v1/result_2023-10-19 18:44:30.json b/HumanF-MarkrAI/pub-llama-13b-v1/result_2023-10-19 18:44:30.json new file mode 100644 index 0000000000000000000000000000000000000000..0ebf33ec25a761caec265aaec21f178de036c605 --- /dev/null +++ b/HumanF-MarkrAI/pub-llama-13b-v1/result_2023-10-19 18:44:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3677474402730375, + "acc_stderr": 0.01409099561816849, + "acc_norm": 0.41552901023890787, + "acc_norm_stderr": 0.01440136664121639 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40579565823541125, + "acc_stderr": 0.004900417982582061, + "acc_norm": 0.5321649073889664, + "acc_norm_stderr": 0.004979446038824757 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5006385696040868, + "acc_stderr": 0.01787994891443168, + "acc_norm": 0.5006385696040868, + "acc_norm_stderr": 0.01787994891443168 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357787, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357787 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.02839442137098453, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.02839442137098453 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006938, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006938 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990028, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990028 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102318, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102318 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998576, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998576 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.02829205683011273, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.02829205683011273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.0311669573672359, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.0311669573672359 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066482, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066482 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123935, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123935 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.026756255129663765, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.026756255129663765 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.03775205013583639, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.03775205013583639 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5174311926605505, + "acc_stderr": 0.02142429187185315, + "acc_norm": 0.5174311926605505, + "acc_norm_stderr": 0.02142429187185315 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874142, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874142 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.019431775677037313, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.019431775677037313 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.03324708911809117, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.03324708911809117 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.01446589382985992, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.01446589382985992 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225417, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225417 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3624511082138201, + "acc_stderr": 0.012277512533252495, + "acc_norm": 0.3624511082138201, + "acc_norm_stderr": 0.012277512533252495 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502342, + "mc2": 0.4364091486561351, + "mc2_stderr": 0.015369734802451228 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3919716646989374, + "acc_stderr": 0.016784332119424077, + "acc_norm": 0.43565525383707204, + "acc_norm_stderr": 0.017047415229476334 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/pub-llama-13b-v1", + "model_sha": "4aa21e41dfcb82ff842306b3b5eadd2b258bfc80", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HumanF-MarkrAI/pub-llama-13b-v2/result_2023-10-22 16:02:46.json b/HumanF-MarkrAI/pub-llama-13b-v2/result_2023-10-22 16:02:46.json new file mode 100644 index 0000000000000000000000000000000000000000..7d6e7f824011eef0be37e0eb0a9eb0c103636d19 --- /dev/null +++ b/HumanF-MarkrAI/pub-llama-13b-v2/result_2023-10-22 16:02:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.371160409556314, + "acc_stderr": 0.014117971901142824, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.014422181226303026 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4048994224258116, + "acc_stderr": 0.004898693652043317, + "acc_norm": 0.5401314479187412, + "acc_norm_stderr": 0.0049736830262021746 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5019157088122606, + "acc_stderr": 0.017879832259026677, + "acc_norm": 0.5019157088122606, + "acc_norm_stderr": 0.017879832259026677 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231008, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231008 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828063, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828063 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.02506909438729654, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.02506909438729654 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536821, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536821 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906234, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906234 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.035161847729521675, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.035161847729521675 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562424, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.02677299065336182, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.02677299065336182 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5376146788990825, + "acc_stderr": 0.021376575274397576, + "acc_norm": 0.5376146788990825, + "acc_norm_stderr": 0.021376575274397576 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791434, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.019506291693954854, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.019506291693954854 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.03324708911809117, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.03324708911809117 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398865, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398865 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937599, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937599 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35723598435462844, + "acc_stderr": 0.012238615750316506, + "acc_norm": 0.35723598435462844, + "acc_norm_stderr": 0.012238615750316506 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608742, + "mc2": 0.43609767583849846, + "mc2_stderr": 0.015308496603243212 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4214876033057851, + "acc_stderr": 0.016977101932601518, + "acc_norm": 0.4757969303423849, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HumanF-MarkrAI/pub-llama-13b-v2", + "model_sha": "d59387039c395781b62f514db7bf4fb32d254522", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HwiyeolJo/TeamJaeCorpo-v0.2/result_2024-01-15 12:10:51.json b/HwiyeolJo/TeamJaeCorpo-v0.2/result_2024-01-15 12:10:51.json new file mode 100644 index 0000000000000000000000000000000000000000..fb71accb24f65f399c9f114bfbada0a63284c8b6 --- /dev/null +++ b/HwiyeolJo/TeamJaeCorpo-v0.2/result_2024-01-15 12:10:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3532423208191126, + "acc_stderr": 0.013967822714840053, + "acc_norm": 0.3984641638225256, + "acc_norm_stderr": 0.014306946052735565 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3676558454491137, + "acc_stderr": 0.004811815959388833, + "acc_norm": 0.47410874327823144, + "acc_norm_stderr": 0.004983087049281746 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5108556832694764, + "acc_stderr": 0.017875748840242407, + "acc_norm": 0.5108556832694764, + "acc_norm_stderr": 0.017875748840242407 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956279, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956279 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177495, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.02837228779796295, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.02837228779796295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871913, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871913 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.02689704999638286, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.02689704999638286 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413324, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413324 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281338, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281338 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.0193733324207245, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.0193733324207245 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.01463518561652782, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.01463518561652782 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.028888193103988644, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.028888193103988644 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5387755102040817, + "acc_stderr": 0.03191282052669278, + "acc_norm": 0.5387755102040817, + "acc_norm_stderr": 0.03191282052669278 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3494132985658409, + "acc_stderr": 0.012177306252786702, + "acc_norm": 0.3494132985658409, + "acc_norm_stderr": 0.012177306252786702 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.4656302357866705, + "mc2_stderr": 0.01570298909368207 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43683589138134593, + "acc_stderr": 0.017052633559856076, + "acc_norm": 0.45808736717827625, + "acc_norm_stderr": 0.017129852117911144 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HwiyeolJo/TeamJaeCorpo-v0.2", + "model_sha": "f4a798a366446c6d954343de89d5c086c68eaedc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HwiyeolJo/TeamJaeCorpo/result_2024-01-06 01:12:56.json b/HwiyeolJo/TeamJaeCorpo/result_2024-01-06 01:12:56.json new file mode 100644 index 0000000000000000000000000000000000000000..3993edbe97fec78a28f727947b251893fd7e3ef8 --- /dev/null +++ b/HwiyeolJo/TeamJaeCorpo/result_2024-01-06 01:12:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3532423208191126, + "acc_stderr": 0.013967822714840053, + "acc_norm": 0.3984641638225256, + "acc_norm_stderr": 0.014306946052735565 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3676558454491137, + "acc_stderr": 0.004811815959388833, + "acc_norm": 0.47410874327823144, + "acc_norm_stderr": 0.004983087049281746 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5108556832694764, + "acc_stderr": 0.017875748840242407, + "acc_norm": 0.5108556832694764, + "acc_norm_stderr": 0.017875748840242407 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956279, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956279 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177495, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.02837228779796295, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.02837228779796295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871913, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871913 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.02689704999638286, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.02689704999638286 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413324, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413324 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281338, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281338 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.0193733324207245, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.0193733324207245 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.01463518561652782, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.01463518561652782 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.028888193103988644, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.028888193103988644 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5387755102040817, + "acc_stderr": 0.03191282052669278, + "acc_norm": 0.5387755102040817, + "acc_norm_stderr": 0.03191282052669278 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3494132985658409, + "acc_stderr": 0.012177306252786702, + "acc_norm": 0.3494132985658409, + "acc_norm_stderr": 0.012177306252786702 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.4656302357866705, + "mc2_stderr": 0.01570298909368207 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43683589138134593, + "acc_stderr": 0.017052633559856076, + "acc_norm": 0.45808736717827625, + "acc_norm_stderr": 0.017129852117911144 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HwiyeolJo/TeamJaeCorpo", + "model_sha": "32816adb88f45d573f744311867caa7ec94b8e14", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HwiyeolJo/testtt/result_2024-01-03 16:44:36.json b/HwiyeolJo/testtt/result_2024-01-03 16:44:36.json new file mode 100644 index 0000000000000000000000000000000000000000..21e67456d5239c32ab034e0cc199f9a2ffcb03a9 --- /dev/null +++ b/HwiyeolJo/testtt/result_2024-01-03 16:44:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3532423208191126, + "acc_stderr": 0.013967822714840053, + "acc_norm": 0.3984641638225256, + "acc_norm_stderr": 0.014306946052735565 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3676558454491137, + "acc_stderr": 0.004811815959388833, + "acc_norm": 0.47410874327823144, + "acc_norm_stderr": 0.004983087049281746 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5108556832694764, + "acc_stderr": 0.017875748840242407, + "acc_norm": 0.5108556832694764, + "acc_norm_stderr": 0.017875748840242407 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956279, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956279 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177495, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.02837228779796295, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.02837228779796295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871913, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871913 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.02689704999638286, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.02689704999638286 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413324, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413324 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281338, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281338 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.0193733324207245, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.0193733324207245 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.01463518561652782, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.01463518561652782 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.028888193103988644, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.028888193103988644 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5387755102040817, + "acc_stderr": 0.03191282052669278, + "acc_norm": 0.5387755102040817, + "acc_norm_stderr": 0.03191282052669278 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3494132985658409, + "acc_stderr": 0.012177306252786702, + "acc_norm": 0.3494132985658409, + "acc_norm_stderr": 0.012177306252786702 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.4656302357866705, + "mc2_stderr": 0.01570298909368207 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43683589138134593, + "acc_stderr": 0.017052633559856076, + "acc_norm": 0.45808736717827625, + "acc_norm_stderr": 0.017129852117911144 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HwiyeolJo/testtt", + "model_sha": "5daa3c3eb6261513476e7bcfa8b96f2d5cd6001c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/HwiyeolJo/testttt/result_2024-01-03 18:00:51.json b/HwiyeolJo/testttt/result_2024-01-03 18:00:51.json new file mode 100644 index 0000000000000000000000000000000000000000..d098594095df876040b7cf467f35df13144287cb --- /dev/null +++ b/HwiyeolJo/testttt/result_2024-01-03 18:00:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3532423208191126, + "acc_stderr": 0.013967822714840053, + "acc_norm": 0.3984641638225256, + "acc_norm_stderr": 0.014306946052735565 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3676558454491137, + "acc_stderr": 0.004811815959388833, + "acc_norm": 0.47410874327823144, + "acc_norm_stderr": 0.004983087049281746 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5108556832694764, + "acc_stderr": 0.017875748840242407, + "acc_norm": 0.5108556832694764, + "acc_norm_stderr": 0.017875748840242407 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956279, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956279 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177495, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.02837228779796295, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.02837228779796295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871913, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871913 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.02689704999638286, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.02689704999638286 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413324, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413324 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281338, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281338 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.0193733324207245, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.0193733324207245 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.01463518561652782, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.01463518561652782 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.028888193103988644, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.028888193103988644 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5387755102040817, + "acc_stderr": 0.03191282052669278, + "acc_norm": 0.5387755102040817, + "acc_norm_stderr": 0.03191282052669278 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3494132985658409, + "acc_stderr": 0.012177306252786702, + "acc_norm": 0.3494132985658409, + "acc_norm_stderr": 0.012177306252786702 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.4656302357866705, + "mc2_stderr": 0.01570298909368207 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43683589138134593, + "acc_stderr": 0.017052633559856076, + "acc_norm": 0.45808736717827625, + "acc_norm_stderr": 0.017129852117911144 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "HwiyeolJo/testttt", + "model_sha": "7409deaa6c866195b31dc8482414b4d64a40c372", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/I-BRICKS/Cerebro_BM_solar_v01/result_2024-07-22 06:56:27.json b/I-BRICKS/Cerebro_BM_solar_v01/result_2024-07-22 06:56:27.json new file mode 100644 index 0000000000000000000000000000000000000000..2035e5d03397fd8ccb5192b1c438eba9f1eb10df --- /dev/null +++ b/I-BRICKS/Cerebro_BM_solar_v01/result_2024-07-22 06:56:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.757679180887372, + "acc_stderr": 0.012521593295800118, + "acc_norm": 0.7901023890784983, + "acc_norm_stderr": 0.011900548748047449 + }, + "harness|ko_hellaswag|10": { + "acc": 0.7101175064728141, + "acc_stderr": 0.004527804016253785, + "acc_norm": 0.811790479984067, + "acc_norm_stderr": 0.0039008054167367014 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7961165048543689, + "acc_stderr": 0.0398913985953177, + "acc_norm": 0.7961165048543689, + "acc_norm_stderr": 0.0398913985953177 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6743295019157088, + "acc_stderr": 0.016757989458549682, + "acc_norm": 0.6743295019157088, + "acc_norm_stderr": 0.016757989458549682 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.032662042990646775, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.032662042990646775 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6302250803858521, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.6302250803858521, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6457399103139013, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.6457399103139013, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6128205128205129, + "acc_stderr": 0.02469721693087893, + "acc_norm": 0.6128205128205129, + "acc_norm_stderr": 0.02469721693087893 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.027869320571664625, + "acc_norm": 0.6, + "acc_norm_stderr": 0.027869320571664625 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.02514093595033543, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.02514093595033543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5886792452830188, + "acc_stderr": 0.03028500925900979, + "acc_norm": 0.5886792452830188, + "acc_norm_stderr": 0.03028500925900979 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.02563425811555496, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.02563425811555496 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5809248554913294, + "acc_stderr": 0.02656417811142262, + "acc_norm": 0.5809248554913294, + "acc_norm_stderr": 0.02656417811142262 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6073619631901841, + "acc_stderr": 0.0383674090783103, + "acc_norm": 0.6073619631901841, + "acc_norm_stderr": 0.0383674090783103 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6450617283950617, + "acc_stderr": 0.026624152478845853, + "acc_norm": 0.6450617283950617, + "acc_norm_stderr": 0.026624152478845853 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7302752293577982, + "acc_stderr": 0.01902848671111545, + "acc_norm": 0.7302752293577982, + "acc_norm_stderr": 0.01902848671111545 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849725, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.019997973035458333, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.019997973035458333 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.450354609929078, + "acc_stderr": 0.02968010556502904, + "acc_norm": 0.450354609929078, + "acc_norm_stderr": 0.02968010556502904 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.0340763209385405, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.0340763209385405 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3486033519553073, + "acc_stderr": 0.015937484656687022, + "acc_norm": 0.3486033519553073, + "acc_norm_stderr": 0.015937484656687022 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6571428571428571, + "acc_stderr": 0.03038726291954772, + "acc_norm": 0.6571428571428571, + "acc_norm_stderr": 0.03038726291954772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598035, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598035 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44654498044328556, + "acc_stderr": 0.012697046024399663, + "acc_norm": 0.44654498044328556, + "acc_norm_stderr": 0.012697046024399663 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7980416156670747, + "mc1_stderr": 0.014053957441512352, + "mc2": 0.8601950601950346, + "mc2_stderr": 0.011710216294067244 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5017709563164109, + "acc_stderr": 0.017190246276231863, + "acc_norm": 0.526564344746163, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "I-BRICKS/Cerebro_BM_solar_v01", + "model_sha": "31bbd0564cdb8917c6a7825274bee9245ff8d9c8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-42dot_LLM-PLM-1.3B-dpo-v1.0/result_2024-02-03 13:38:33.json b/ITT-AF/ITT-42dot_LLM-PLM-1.3B-dpo-v1.0/result_2024-02-03 13:38:33.json new file mode 100644 index 0000000000000000000000000000000000000000..f7e5d27892c37ad89a654ae38c79347a9bbeb7df --- /dev/null +++ b/ITT-AF/ITT-42dot_LLM-PLM-1.3B-dpo-v1.0/result_2024-02-03 13:38:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27559726962457337, + "acc_stderr": 0.01305716965576184, + "acc_norm": 0.3216723549488055, + "acc_norm_stderr": 0.013650488084494164 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3536148177653854, + "acc_stderr": 0.004771143074426132, + "acc_norm": 0.45120493925512845, + "acc_norm_stderr": 0.004965963647210318 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.34502923976608185, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.34502923976608185, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1553398058252427, + "acc_stderr": 0.03586594738573974, + "acc_norm": 0.1553398058252427, + "acc_norm_stderr": 0.03586594738573974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2388250319284802, + "acc_stderr": 0.015246803197398698, + "acc_norm": 0.2388250319284802, + "acc_norm_stderr": 0.015246803197398698 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066654, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066654 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3021276595744681, + "acc_stderr": 0.030017554471880554, + "acc_norm": 0.3021276595744681, + "acc_norm_stderr": 0.030017554471880554 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.035716092300534796, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.035716092300534796 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.21864951768488747, + "acc_stderr": 0.023475581417861106, + "acc_norm": 0.21864951768488747, + "acc_norm_stderr": 0.023475581417861106 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.28699551569506726, + "acc_stderr": 0.03036037971029196, + "acc_norm": 0.28699551569506726, + "acc_norm_stderr": 0.03036037971029196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03053289223393202, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03053289223393202 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237653, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23949579831932774, + "acc_stderr": 0.027722065493361255, + "acc_norm": 0.23949579831932774, + "acc_norm_stderr": 0.027722065493361255 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.021362027725222717, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.021362027725222717 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21182266009852216, + "acc_stderr": 0.028748983689941065, + "acc_norm": 0.21182266009852216, + "acc_norm_stderr": 0.028748983689941065 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674054, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674054 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22641509433962265, + "acc_stderr": 0.025757559893106727, + "acc_norm": 0.22641509433962265, + "acc_norm_stderr": 0.025757559893106727 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.04309118709946459, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.04309118709946459 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.02708037281514566, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.02708037281514566 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.020842290930114662, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.020842290930114662 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869334, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816503, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816503 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28034682080924855, + "acc_stderr": 0.024182427496577615, + "acc_norm": 0.28034682080924855, + "acc_norm_stderr": 0.024182427496577615 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25617283950617287, + "acc_stderr": 0.024288533637726095, + "acc_norm": 0.25617283950617287, + "acc_norm_stderr": 0.024288533637726095 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.030516111371476005, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.030516111371476005 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23486238532110093, + "acc_stderr": 0.018175110510343585, + "acc_norm": 0.23486238532110093, + "acc_norm_stderr": 0.018175110510343585 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242494, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242494 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.025738854797818716, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.025738854797818716 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.01798661530403032, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.01798661530403032 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.025518731049537755, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.025518731049537755 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755808, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755808 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788153, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788153 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.011328734403140313, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.011328734403140313 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.03317505930009179, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.03317505930009179 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.43509454295381983, + "mc2_stderr": 0.0148878635424864 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.23730814639905548, + "acc_stderr": 0.014626677837186226, + "acc_norm": 0.35064935064935066, + "acc_norm_stderr": 0.016405556903893295 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-42dot_LLM-PLM-1.3B-dpo-v1.0", + "model_sha": "c92f2bb95b9a55c6535146eec918cfd4ee1247c8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v1.0/result_2024-01-25 18:20:04.json b/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v1.0/result_2024-01-25 18:20:04.json new file mode 100644 index 0000000000000000000000000000000000000000..24c90db41155e28237144054c9c28e0994820d19 --- /dev/null +++ b/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v1.0/result_2024-01-25 18:20:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27303754266211605, + "acc_stderr": 0.013019332762635734, + "acc_norm": 0.32081911262798635, + "acc_norm_stderr": 0.013640943091946522 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35461063533160725, + "acc_stderr": 0.004774174590205146, + "acc_norm": 0.4547898824935272, + "acc_norm_stderr": 0.004969341773423515 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245232, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245232 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.03760178006026621, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.03760178006026621 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2541507024265645, + "acc_stderr": 0.015569254692045785, + "acc_norm": 0.2541507024265645, + "acc_norm_stderr": 0.015569254692045785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03820169914517905, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03820169914517905 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.03036358219723817, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.03036358219723817 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.03610805018031023, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.03610805018031023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.030769352008229136, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.030769352008229136 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.038808483010823965, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.038808483010823965 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.029857515673386414, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.029857515673386414 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.040925639582376536, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.040925639582376536 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.026653531596715494, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.026653531596715494 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.02136202772522271, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.02136202772522271 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.042365112580946315, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.042365112580946315 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.02850137816789395, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.02850137816789395 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24516129032258063, + "acc_stderr": 0.024472243840895514, + "acc_norm": 0.24516129032258063, + "acc_norm_stderr": 0.024472243840895514 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.030236389942173106, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.030236389942173106 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.20754716981132076, + "acc_stderr": 0.024959918028911274, + "acc_norm": 0.20754716981132076, + "acc_norm_stderr": 0.024959918028911274 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072775, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230175, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230175 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014624, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014624 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1907514450867052, + "acc_stderr": 0.029957851329869337, + "acc_norm": 0.1907514450867052, + "acc_norm_stderr": 0.029957851329869337 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.22486772486772486, + "acc_stderr": 0.02150209607822914, + "acc_norm": 0.22486772486772486, + "acc_norm_stderr": 0.02150209607822914 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.03396116205845333, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.03396116205845333 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.025089478523765127, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.025089478523765127 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700307, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700307 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25504587155963304, + "acc_stderr": 0.018688500856535856, + "acc_norm": 0.25504587155963304, + "acc_norm_stderr": 0.018688500856535856 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276863, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276863 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.0252616912197295, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.0252616912197295 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815198, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815198 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307857, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307857 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419074, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419074 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.030998666304560534, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.030998666304560534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.02721283588407315, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.02721283588407315 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.02917868230484256, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.02917868230484256 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2646675358539765, + "acc_stderr": 0.011267332992845516, + "acc_norm": 0.2646675358539765, + "acc_norm_stderr": 0.011267332992845516 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253595, + "mc2": 0.41796426846893153, + "mc2_stderr": 0.014850155003426721 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24557260920897284, + "acc_stderr": 0.014798357154972804, + "acc_norm": 0.33293978748524206, + "acc_norm_stderr": 0.01620243120837379 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-42dot_LLM-PLM-1.3B-v1.0", + "model_sha": "6c8dac3a43480d8231306dc1ed7ca5f6a2b9b90f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v2.0/result_2024-01-26 16:19:48.json b/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v2.0/result_2024-01-26 16:19:48.json new file mode 100644 index 0000000000000000000000000000000000000000..dca4c99bcc8e0d84b3ab5e454e99eeb5ce9d7c9e --- /dev/null +++ b/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v2.0/result_2024-01-26 16:19:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2773037542662116, + "acc_stderr": 0.013082095839059374, + "acc_norm": 0.3267918088737201, + "acc_norm_stderr": 0.01370666597558734 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35550687114120694, + "acc_stderr": 0.004776883632722611, + "acc_norm": 0.45210117506472813, + "acc_norm_stderr": 0.00496683255324504 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2573099415204678, + "acc_stderr": 0.03352799844161865, + "acc_norm": 0.2573099415204678, + "acc_norm_stderr": 0.03352799844161865 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24904214559386972, + "acc_stderr": 0.015464676163395983, + "acc_norm": 0.24904214559386972, + "acc_norm_stderr": 0.015464676163395983 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.035025531706783165, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.035025531706783165 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610344, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610344 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.035716092300534796, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.035716092300534796 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.032361983509282745, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.032361983509282745 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847833, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847833 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20707070707070707, + "acc_stderr": 0.028869778460267045, + "acc_norm": 0.20707070707070707, + "acc_norm_stderr": 0.028869778460267045 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.028359620870533946, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.028359620870533946 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.02127839386358628, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.02127839386358628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.03031509928561773, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.03031509928561773 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24838709677419354, + "acc_stderr": 0.02458002892148101, + "acc_norm": 0.24838709677419354, + "acc_norm_stderr": 0.02458002892148101 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3034188034188034, + "acc_stderr": 0.030118210106942662, + "acc_norm": 0.3034188034188034, + "acc_norm_stderr": 0.030118210106942662 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.026341480371118355, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.026341480371118355 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.031343283582089536, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.031343283582089536 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641143, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641143 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26878612716763006, + "acc_stderr": 0.02386800326250011, + "acc_norm": 0.26878612716763006, + "acc_norm_stderr": 0.02386800326250011 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.025251173936495022, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.025251173936495022 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735703, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735703 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.01827257581023186, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.01827257581023186 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.038522733649243156, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.038522733649243156 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.02545775669666786, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.02545775669666786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17105263157894737, + "acc_stderr": 0.030643607071677098, + "acc_norm": 0.17105263157894737, + "acc_norm_stderr": 0.030643607071677098 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26633986928104575, + "acc_stderr": 0.0178831881346672, + "acc_norm": 0.26633986928104575, + "acc_norm_stderr": 0.0178831881346672 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140245, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140245 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697622, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697622 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27346938775510204, + "acc_stderr": 0.028535560337128462, + "acc_norm": 0.27346938775510204, + "acc_norm_stderr": 0.028535560337128462 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24050632911392406, + "acc_stderr": 0.027820781981149675, + "acc_norm": 0.24050632911392406, + "acc_norm_stderr": 0.027820781981149675 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25749674054758803, + "acc_stderr": 0.011167706014904154, + "acc_norm": 0.25749674054758803, + "acc_norm_stderr": 0.011167706014904154 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.035014387062967806, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.035014387062967806 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834559, + "mc2": 0.4174223008245678, + "mc2_stderr": 0.014842652351856885 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2585596221959858, + "acc_stderr": 0.01505335443896399, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.016366945603281276 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-42dot_LLM-PLM-1.3B-v2.0", + "model_sha": "2d8c315f4b602cb8c1f677de62adf07bdc6192f0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v3.0/result_2024-01-30 15:57:37.json b/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v3.0/result_2024-01-30 15:57:37.json new file mode 100644 index 0000000000000000000000000000000000000000..59f87f1aa1ed5402788e07e52eeeb734cb94c56a --- /dev/null +++ b/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v3.0/result_2024-01-30 15:57:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27559726962457337, + "acc_stderr": 0.01305716965576184, + "acc_norm": 0.3216723549488055, + "acc_norm_stderr": 0.013650488084494164 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3536148177653854, + "acc_stderr": 0.004771143074426132, + "acc_norm": 0.45120493925512845, + "acc_norm_stderr": 0.004965963647210318 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.34502923976608185, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.34502923976608185, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1553398058252427, + "acc_stderr": 0.03586594738573974, + "acc_norm": 0.1553398058252427, + "acc_norm_stderr": 0.03586594738573974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2388250319284802, + "acc_stderr": 0.015246803197398698, + "acc_norm": 0.2388250319284802, + "acc_norm_stderr": 0.015246803197398698 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066654, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066654 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3021276595744681, + "acc_stderr": 0.030017554471880554, + "acc_norm": 0.3021276595744681, + "acc_norm_stderr": 0.030017554471880554 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.035716092300534796, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.035716092300534796 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.21864951768488747, + "acc_stderr": 0.023475581417861106, + "acc_norm": 0.21864951768488747, + "acc_norm_stderr": 0.023475581417861106 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.28699551569506726, + "acc_stderr": 0.03036037971029196, + "acc_norm": 0.28699551569506726, + "acc_norm_stderr": 0.03036037971029196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03053289223393202, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03053289223393202 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237653, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23949579831932774, + "acc_stderr": 0.027722065493361255, + "acc_norm": 0.23949579831932774, + "acc_norm_stderr": 0.027722065493361255 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.021362027725222717, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.021362027725222717 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21182266009852216, + "acc_stderr": 0.028748983689941065, + "acc_norm": 0.21182266009852216, + "acc_norm_stderr": 0.028748983689941065 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674054, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674054 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22641509433962265, + "acc_stderr": 0.025757559893106727, + "acc_norm": 0.22641509433962265, + "acc_norm_stderr": 0.025757559893106727 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.04309118709946459, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.04309118709946459 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.02708037281514566, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.02708037281514566 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.020842290930114662, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.020842290930114662 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869334, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816503, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816503 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28034682080924855, + "acc_stderr": 0.024182427496577615, + "acc_norm": 0.28034682080924855, + "acc_norm_stderr": 0.024182427496577615 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25617283950617287, + "acc_stderr": 0.024288533637726095, + "acc_norm": 0.25617283950617287, + "acc_norm_stderr": 0.024288533637726095 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.030516111371476005, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.030516111371476005 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23486238532110093, + "acc_stderr": 0.018175110510343585, + "acc_norm": 0.23486238532110093, + "acc_norm_stderr": 0.018175110510343585 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242494, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242494 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.025738854797818716, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.025738854797818716 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.01798661530403032, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.01798661530403032 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.025518731049537755, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.025518731049537755 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755808, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755808 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788153, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788153 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.011328734403140313, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.011328734403140313 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.03317505930009179, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.03317505930009179 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.43509454295381983, + "mc2_stderr": 0.0148878635424864 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.23730814639905548, + "acc_stderr": 0.014626677837186226, + "acc_norm": 0.35064935064935066, + "acc_norm_stderr": 0.016405556903893295 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-42dot_LLM-PLM-1.3B-v3.0", + "model_sha": "ccdfc183000dc328c5f64d6d9f70e2086de71cbf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v4.0/result_2024-02-20 18:08:06.json b/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v4.0/result_2024-02-20 18:08:06.json new file mode 100644 index 0000000000000000000000000000000000000000..c795ab6b7f8d7e4a3e13061e25baf2c13cebb389 --- /dev/null +++ b/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v4.0/result_2024-02-20 18:08:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28498293515358364, + "acc_stderr": 0.013191348179838792, + "acc_norm": 0.3302047781569966, + "acc_norm_stderr": 0.013743085603760427 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3630750846444931, + "acc_stderr": 0.004799034356969394, + "acc_norm": 0.46285600477992433, + "acc_norm_stderr": 0.004975993795562018 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393163, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393163 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1553398058252427, + "acc_stderr": 0.03586594738573975, + "acc_norm": 0.1553398058252427, + "acc_norm_stderr": 0.03586594738573975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2796934865900383, + "acc_stderr": 0.01605079214803654, + "acc_norm": 0.2796934865900383, + "acc_norm_stderr": 0.01605079214803654 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.029379170464124818, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.029379170464124818 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288085, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288085 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.026858825879488554, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.026858825879488554 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.24663677130044842, + "acc_stderr": 0.028930413120910867, + "acc_norm": 0.24663677130044842, + "acc_norm_stderr": 0.028930413120910867 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.029376616484945616, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.029376616484945616 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309993, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309993 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.040925639582376536, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.040925639582376536 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.02788682807838055, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.02788682807838055 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2230769230769231, + "acc_stderr": 0.02110773012724399, + "acc_norm": 0.2230769230769231, + "acc_norm_stderr": 0.02110773012724399 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22167487684729065, + "acc_stderr": 0.029225575892489614, + "acc_norm": 0.22167487684729065, + "acc_norm_stderr": 0.029225575892489614 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517414, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517414 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2863247863247863, + "acc_stderr": 0.02961432369045665, + "acc_norm": 0.2863247863247863, + "acc_norm_stderr": 0.02961432369045665 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.026199808807561936, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.026199808807561936 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2935323383084577, + "acc_stderr": 0.03220024104534205, + "acc_norm": 0.2935323383084577, + "acc_norm_stderr": 0.03220024104534205 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641144, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641144 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184756, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184756 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.03309615177059005, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.03309615177059005 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.023083658586984204, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.023083658586984204 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916647, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916647 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30091743119266057, + "acc_stderr": 0.019664751366802114, + "acc_norm": 0.30091743119266057, + "acc_norm_stderr": 0.019664751366802114 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.041349130183033156, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.041349130183033156 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.02495418432487991, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.02495418432487991 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.01770453165325007, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.01770453165325007 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140235, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140235 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.02769691071309395, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.02769691071309395 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.02981263070156974, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.02981263070156974 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.025991117672813296, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.025991117672813296 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3291139240506329, + "acc_stderr": 0.030587326294702358, + "acc_norm": 0.3291139240506329, + "acc_norm_stderr": 0.030587326294702358 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25097783572359844, + "acc_stderr": 0.011073730299187226, + "acc_norm": 0.25097783572359844, + "acc_norm_stderr": 0.011073730299187226 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.015201522246299953, + "mc2": 0.4099653189995733, + "mc2_stderr": 0.014725570270046994 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2857142857142857, + "acc_stderr": 0.015531620786986732, + "acc_norm": 0.34946871310507677, + "acc_norm_stderr": 0.01639279708576984 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-42dot_LLM-PLM-1.3B-v4.0", + "model_sha": "498a506ef58ee8018caa360731cbfeb61ddd2ef5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v5.0/result_2024-03-05 02:04:33.json b/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v5.0/result_2024-03-05 02:04:33.json new file mode 100644 index 0000000000000000000000000000000000000000..a40d95321d477d3e81961364a754fb234727375a --- /dev/null +++ b/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v5.0/result_2024-03-05 02:04:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2815699658703072, + "acc_stderr": 0.013143376735009015, + "acc_norm": 0.33532423208191126, + "acc_norm_stderr": 0.013796182947785566 + }, + "harness|ko_hellaswag|10": { + "acc": 0.360884285998805, + "acc_stderr": 0.0047927552358235275, + "acc_norm": 0.45956980681139215, + "acc_norm_stderr": 0.004973442060741621 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.0376017800602662, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.0376017800602662 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25798212005108556, + "acc_stderr": 0.01564583018834895, + "acc_norm": 0.25798212005108556, + "acc_norm_stderr": 0.01564583018834895 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785137, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785137 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.029379170464124818, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.029379170464124818 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.0368078369072758, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.0368078369072758 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2765273311897106, + "acc_stderr": 0.02540383297817961, + "acc_norm": 0.2765273311897106, + "acc_norm_stderr": 0.02540383297817961 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.25112107623318386, + "acc_stderr": 0.0291052208332246, + "acc_norm": 0.25112107623318386, + "acc_norm_stderr": 0.0291052208332246 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.18686868686868688, + "acc_stderr": 0.027772533334218974, + "acc_norm": 0.18686868686868688, + "acc_norm_stderr": 0.027772533334218974 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.038061426873099935, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.038061426873099935 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.0438986995680878, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.0438986995680878 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.028359620870533946, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.028359620870533946 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2358974358974359, + "acc_stderr": 0.021525965407408726, + "acc_norm": 0.2358974358974359, + "acc_norm_stderr": 0.021525965407408726 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.04414343666854932, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.04414343666854932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.17733990147783252, + "acc_stderr": 0.026874337276808342, + "acc_norm": 0.17733990147783252, + "acc_norm_stderr": 0.026874337276808342 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.025091892378859275, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.025091892378859275 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.028120966503914404, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.028120966503914404 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.026341480371118352, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.026341480371118352 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959336, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959336 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.03096590312357304, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.03096590312357304 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.19653179190751446, + "acc_stderr": 0.030299574664788137, + "acc_norm": 0.19653179190751446, + "acc_norm_stderr": 0.030299574664788137 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.022101128787415415, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.022101128787415415 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.022289638852617904, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.022289638852617904 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2716049382716049, + "acc_stderr": 0.02474862449053737, + "acc_norm": 0.2716049382716049, + "acc_norm_stderr": 0.02474862449053737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916646, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916646 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24036697247706423, + "acc_stderr": 0.01832060732096407, + "acc_norm": 0.24036697247706423, + "acc_norm_stderr": 0.01832060732096407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02564686309713791, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02564686309713791 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04391326286724071, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04391326286724071 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.017282760695167432, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.017282760695167432 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.02635806569888059, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.02635806569888059 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2, + "acc_stderr": 0.025607375986579164, + "acc_norm": 0.2, + "acc_norm_stderr": 0.025607375986579164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658342, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658342 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24185136897001303, + "acc_stderr": 0.010936550813827065, + "acc_norm": 0.24185136897001303, + "acc_norm_stderr": 0.010936550813827065 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591362, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591362 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3072215422276622, + "mc1_stderr": 0.01615020132132301, + "mc2": 0.4406215488693316, + "mc2_stderr": 0.014875277546161027 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27036599763872493, + "acc_stderr": 0.015270152942068405, + "acc_norm": 0.33293978748524206, + "acc_norm_stderr": 0.016202431208373797 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-42dot_LLM-PLM-1.3B-v5.0", + "model_sha": "3ac945231c72cd9d5edda7cf0121907ad11198e9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v6.0/result_2024-03-06 07:40:29.json b/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v6.0/result_2024-03-06 07:40:29.json new file mode 100644 index 0000000000000000000000000000000000000000..227cca3cae80bcdb06d099b79454a7f9e354a54a --- /dev/null +++ b/ITT-AF/ITT-42dot_LLM-PLM-1.3B-v6.0/result_2024-03-06 07:40:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27559726962457337, + "acc_stderr": 0.01305716965576184, + "acc_norm": 0.33447098976109213, + "acc_norm_stderr": 0.013787460322441379 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36138219478191597, + "acc_stderr": 0.004794191785967943, + "acc_norm": 0.45976897032463654, + "acc_norm_stderr": 0.004973602904247795 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245231, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245231 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.13592233009708737, + "acc_stderr": 0.03393295729761012, + "acc_norm": 0.13592233009708737, + "acc_norm_stderr": 0.03393295729761012 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.22988505747126436, + "acc_stderr": 0.01504630184669182, + "acc_norm": 0.22988505747126436, + "acc_norm_stderr": 0.01504630184669182 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26688102893890675, + "acc_stderr": 0.025122637608816646, + "acc_norm": 0.26688102893890675, + "acc_norm_stderr": 0.025122637608816646 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.23318385650224216, + "acc_stderr": 0.02838039114709472, + "acc_norm": 0.23318385650224216, + "acc_norm_stderr": 0.02838039114709472 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20202020202020202, + "acc_stderr": 0.028606204289229872, + "acc_norm": 0.20202020202020202, + "acc_norm_stderr": 0.028606204289229872 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.03664666337225256, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.03664666337225256 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2815126050420168, + "acc_stderr": 0.029213549414372174, + "acc_norm": 0.2815126050420168, + "acc_norm_stderr": 0.029213549414372174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.021763733684173916, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.021763733684173916 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22660098522167488, + "acc_stderr": 0.029454863835292975, + "acc_norm": 0.22660098522167488, + "acc_norm_stderr": 0.029454863835292975 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.02606936229533513, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02606936229533513 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.26495726495726496, + "acc_stderr": 0.028911208802749475, + "acc_norm": 0.26495726495726496, + "acc_norm_stderr": 0.028911208802749475 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.026341480371118352, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.026341480371118352 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145668, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145668 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681681, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681681 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.29850746268656714, + "acc_stderr": 0.03235743789355044, + "acc_norm": 0.29850746268656714, + "acc_norm_stderr": 0.03235743789355044 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261128, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261128 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0230836585869842, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0230836585869842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.025630824975621348, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.025630824975621348 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2849740932642487, + "acc_stderr": 0.032577140777096614, + "acc_norm": 0.2849740932642487, + "acc_norm_stderr": 0.032577140777096614 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26605504587155965, + "acc_stderr": 0.018946022322225586, + "acc_norm": 0.26605504587155965, + "acc_norm_stderr": 0.018946022322225586 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.02591780611714716, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.02591780611714716 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.20394736842105263, + "acc_stderr": 0.032790004063100495, + "acc_norm": 0.20394736842105263, + "acc_norm_stderr": 0.032790004063100495 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.017667841612379002, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.017667841612379002 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.02646903681859063, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.02646903681859063 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.030546745264953202, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.030546745264953202 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2816326530612245, + "acc_stderr": 0.028795185574291293, + "acc_norm": 0.2816326530612245, + "acc_norm_stderr": 0.028795185574291293 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24472573839662448, + "acc_stderr": 0.027985699387036416, + "acc_norm": 0.24472573839662448, + "acc_norm_stderr": 0.027985699387036416 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24967405475880053, + "acc_stderr": 0.011054538377832318, + "acc_norm": 0.24967405475880053, + "acc_norm_stderr": 0.011054538377832318 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.028867431449849313, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.028867431449849313 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.03317505930009179, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.03317505930009179 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.44106273502355514, + "mc2_stderr": 0.01484142550203185 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2550177095631641, + "acc_stderr": 0.01498555953342857, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.016272952997019124 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-42dot_LLM-PLM-1.3B-v6.0", + "model_sha": "cade76911ad628d7812682ec2bb5a8caac484c1a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-42dot_LLM-SFT-1.3B-v1.0/result_2024-01-30 03:32:48.json b/ITT-AF/ITT-42dot_LLM-SFT-1.3B-v1.0/result_2024-01-30 03:32:48.json new file mode 100644 index 0000000000000000000000000000000000000000..fbca9153f05c277b77cc3d0d3b9bdab5a6bde083 --- /dev/null +++ b/ITT-AF/ITT-42dot_LLM-SFT-1.3B-v1.0/result_2024-01-30 03:32:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2687713310580205, + "acc_stderr": 0.01295506596371068, + "acc_norm": 0.3361774744027304, + "acc_norm_stderr": 0.01380485502620576 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35700059749053975, + "acc_stderr": 0.004781358113341954, + "acc_norm": 0.45359490141406095, + "acc_norm_stderr": 0.004968244611429385 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2541507024265645, + "acc_stderr": 0.015569254692045774, + "acc_norm": 0.2541507024265645, + "acc_norm_stderr": 0.015569254692045774 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03591444084196969, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03591444084196969 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.03047297336338005, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.03047297336338005 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.29260450160771706, + "acc_stderr": 0.02583989833487798, + "acc_norm": 0.29260450160771706, + "acc_norm_stderr": 0.02583989833487798 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.27802690582959644, + "acc_stderr": 0.030069584874494047, + "acc_norm": 0.27802690582959644, + "acc_norm_stderr": 0.030069584874494047 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677698, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677698 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2878787878787879, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.2878787878787879, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2773109243697479, + "acc_stderr": 0.029079374539480007, + "acc_norm": 0.2773109243697479, + "acc_norm_stderr": 0.029079374539480007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28974358974358977, + "acc_stderr": 0.02300062824368795, + "acc_norm": 0.28974358974358977, + "acc_norm_stderr": 0.02300062824368795 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335134, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335134 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.030572811310299604, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.030572811310299604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.02761116340239972, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.02761116340239972 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.34328358208955223, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.34328358208955223, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.030631145539198823, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.030631145539198823 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577657, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577657 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106135, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.02335736578587403, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.02335736578587403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615623, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615623 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2716049382716049, + "acc_stderr": 0.02474862449053737, + "acc_norm": 0.2716049382716049, + "acc_norm_stderr": 0.02474862449053737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30275229357798167, + "acc_stderr": 0.01969871143475635, + "acc_norm": 0.30275229357798167, + "acc_norm_stderr": 0.01969871143475635 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3140495867768595, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.3140495867768595, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26633986928104575, + "acc_stderr": 0.01788318813466719, + "acc_norm": 0.26633986928104575, + "acc_norm_stderr": 0.01788318813466719 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880585, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880585 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.03070137211151094, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.03070137211151094 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372432, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372432 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411945, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411945 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.0259911176728133, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.0259911176728133 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3291139240506329, + "acc_stderr": 0.03058732629470236, + "acc_norm": 0.3291139240506329, + "acc_norm_stderr": 0.03058732629470236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2542372881355932, + "acc_stderr": 0.01112112900784068, + "acc_norm": 0.2542372881355932, + "acc_norm_stderr": 0.01112112900784068 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.03228210387037893, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.03228210387037893 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885415, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885415 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834555, + "mc2": 0.4211786149339985, + "mc2_stderr": 0.014990275190279451 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2727272727272727, + "acc_stderr": 0.015311853110300352, + "acc_norm": 0.3447461629279811, + "acc_norm_stderr": 0.01634064990541869 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-42dot_LLM-SFT-1.3B-v1.0", + "model_sha": "30f319b19f45fcc54811ed51b1008d1b1cea1571", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-42dot_LLM-SFT-1.3B-v2.0/result_2024-02-08 12:04:27.json b/ITT-AF/ITT-42dot_LLM-SFT-1.3B-v2.0/result_2024-02-08 12:04:27.json new file mode 100644 index 0000000000000000000000000000000000000000..9a0b27e0f11eb5d2f96f4ce9885427f81dfe9738 --- /dev/null +++ b/ITT-AF/ITT-42dot_LLM-SFT-1.3B-v2.0/result_2024-02-08 12:04:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28498293515358364, + "acc_stderr": 0.013191348179838792, + "acc_norm": 0.34982935153583616, + "acc_norm_stderr": 0.013936809212158287 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3681537542322247, + "acc_stderr": 0.004813177057496272, + "acc_norm": 0.4675363473411671, + "acc_norm_stderr": 0.004979252954977322 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03615507630310935, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03615507630310935 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.015302380123542087, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.015302380123542087 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2, + "acc_stderr": 0.03455473702325438, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03455473702325438 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02850485647051421, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02850485647051421 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824665, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824665 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.22508038585209003, + "acc_stderr": 0.023720088516179027, + "acc_norm": 0.22508038585209003, + "acc_norm_stderr": 0.023720088516179027 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.02944249558585747, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.02944249558585747 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728745, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728745 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.1919191919191919, + "acc_stderr": 0.028057791672989017, + "acc_norm": 0.1919191919191919, + "acc_norm_stderr": 0.028057791672989017 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2184873949579832, + "acc_stderr": 0.02684151432295894, + "acc_norm": 0.2184873949579832, + "acc_norm_stderr": 0.02684151432295894 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2153846153846154, + "acc_stderr": 0.020843034557462874, + "acc_norm": 0.2153846153846154, + "acc_norm_stderr": 0.020843034557462874 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.17733990147783252, + "acc_stderr": 0.026874337276808342, + "acc_norm": 0.17733990147783252, + "acc_norm_stderr": 0.026874337276808342 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2129032258064516, + "acc_stderr": 0.023287665127268545, + "acc_norm": 0.2129032258064516, + "acc_norm_stderr": 0.023287665127268545 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2863247863247863, + "acc_stderr": 0.02961432369045665, + "acc_norm": 0.2863247863247863, + "acc_norm_stderr": 0.02961432369045665 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.02560423347089909, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.02560423347089909 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721375, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721375 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.025644108639267624, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.025644108639267624 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.03152439186555404, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.03152439186555404 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.031265112061730424, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.031265112061730424 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02141168439369418, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02141168439369418 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071134, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071134 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.19631901840490798, + "acc_stderr": 0.031207970394709225, + "acc_norm": 0.19631901840490798, + "acc_norm_stderr": 0.031207970394709225 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.02447722285613511, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.02447722285613511 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.029252823291803617, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.029252823291803617 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518752, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518752 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21834862385321102, + "acc_stderr": 0.01771260052872273, + "acc_norm": 0.21834862385321102, + "acc_norm_stderr": 0.01771260052872273 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749255, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.02405102973991226, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.02405102973991226 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516304, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516304 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.01755581809132227, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.01755581809132227 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.02525786135943241, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.02525786135943241 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372432, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372432 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.02873932851398357, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.02873932851398357 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2163265306122449, + "acc_stderr": 0.026358916334904007, + "acc_norm": 0.2163265306122449, + "acc_norm_stderr": 0.026358916334904007 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2438070404172099, + "acc_stderr": 0.010966507972178479, + "acc_norm": 0.2438070404172099, + "acc_norm_stderr": 0.010966507972178479 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.03192271569548299, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.03192271569548299 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520691, + "mc2": 0.4168577725013622, + "mc2_stderr": 0.015076547753296358 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26092089728453366, + "acc_stderr": 0.015097836279964204, + "acc_norm": 0.31286894923258557, + "acc_norm_stderr": 0.015941010118302658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-42dot_LLM-SFT-1.3B-v2.0", + "model_sha": "5fecf0d23c52b71ac2d33525956308a73648c7ac", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-42dot_LLM-SFT-1.3B-v3.0/result_2024-02-19 10:37:25.json b/ITT-AF/ITT-42dot_LLM-SFT-1.3B-v3.0/result_2024-02-19 10:37:25.json new file mode 100644 index 0000000000000000000000000000000000000000..8d81f941be122a70d6c22bdfbbe7af6bdf63d877 --- /dev/null +++ b/ITT-AF/ITT-42dot_LLM-SFT-1.3B-v3.0/result_2024-02-19 10:37:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2645051194539249, + "acc_stderr": 0.012889272949313368, + "acc_norm": 0.3395904436860068, + "acc_norm_stderr": 0.013839039762820166 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3659629555865365, + "acc_stderr": 0.004807146925162057, + "acc_norm": 0.46265684126667994, + "acc_norm_stderr": 0.004975845335086623 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.034678266857038266, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.034678266857038266 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24265644955300128, + "acc_stderr": 0.015329888940899873, + "acc_norm": 0.24265644955300128, + "acc_norm_stderr": 0.015329888940899873 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.02937917046412482, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.02937917046412482 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24437299035369775, + "acc_stderr": 0.024406162094668893, + "acc_norm": 0.24437299035369775, + "acc_norm_stderr": 0.024406162094668893 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.030769352008229136, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.030769352008229136 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082395, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082395 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.1919191919191919, + "acc_stderr": 0.028057791672989017, + "acc_norm": 0.1919191919191919, + "acc_norm_stderr": 0.028057791672989017 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868966, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868966 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.25384615384615383, + "acc_stderr": 0.022066054378726257, + "acc_norm": 0.25384615384615383, + "acc_norm_stderr": 0.022066054378726257 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15763546798029557, + "acc_stderr": 0.025639014131172408, + "acc_norm": 0.15763546798029557, + "acc_norm_stderr": 0.025639014131172408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.024892469172462836, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.024892469172462836 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.026199808807561925, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.026199808807561925 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.04172343038705383, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.04172343038705383 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.025497532639609542, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.025497532639609542 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31343283582089554, + "acc_stderr": 0.032801882053486414, + "acc_norm": 0.31343283582089554, + "acc_norm_stderr": 0.032801882053486414 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.03156809362703174, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.03156809362703174 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.021679219663693145, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.021679219663693145 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.02289408248992599, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.02289408248992599 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25308641975308643, + "acc_stderr": 0.024191808600713002, + "acc_norm": 0.25308641975308643, + "acc_norm_stderr": 0.024191808600713002 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860667, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860667 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21834862385321102, + "acc_stderr": 0.017712600528722724, + "acc_norm": 0.21834862385321102, + "acc_norm_stderr": 0.017712600528722724 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.024848018263875195, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.024848018263875195 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516304, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516304 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.031546962856566295, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.031546962856566295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225418, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225418 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20408163265306123, + "acc_stderr": 0.025801283475090492, + "acc_norm": 0.20408163265306123, + "acc_norm_stderr": 0.025801283475090492 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2516297262059974, + "acc_stderr": 0.011083276280441905, + "acc_norm": 0.2516297262059974, + "acc_norm_stderr": 0.011083276280441905 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.02955429260569507, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.02955429260569507 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.033175059300091805, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.033175059300091805 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.01502635482491078, + "mc2": 0.4107155702536168, + "mc2_stderr": 0.014821026437600502 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2526564344746163, + "acc_stderr": 0.014939640598798433, + "acc_norm": 0.2987012987012987, + "acc_norm_stderr": 0.01573565739143827 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-42dot_LLM-SFT-1.3B-v3.0", + "model_sha": "7bb3f97ba2d9f3ed567364e2b73253fc8efdf37c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-AF-PLM-1.4B_v0.1/result_2024-07-01 13:23:14.json b/ITT-AF/ITT-AF-PLM-1.4B_v0.1/result_2024-07-01 13:23:14.json new file mode 100644 index 0000000000000000000000000000000000000000..50b5fbb87242aa7fe68c8c46df3e5054832aad24 --- /dev/null +++ b/ITT-AF/ITT-AF-PLM-1.4B_v0.1/result_2024-07-01 13:23:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2354948805460751, + "acc_stderr": 0.012399451855004748, + "acc_norm": 0.2909556313993174, + "acc_norm_stderr": 0.013273077865907588 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3286197968532165, + "acc_stderr": 0.004687514708345323, + "acc_norm": 0.4056960764787891, + "acc_norm_stderr": 0.004900227226433394 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21637426900584794, + "acc_stderr": 0.03158149539338734, + "acc_norm": 0.21637426900584794, + "acc_norm_stderr": 0.03158149539338734 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690879, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690879 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.015818450894777562, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.015818450894777562 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.22127659574468084, + "acc_stderr": 0.027136349602424045, + "acc_norm": 0.22127659574468084, + "acc_norm_stderr": 0.027136349602424045 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2645739910313901, + "acc_stderr": 0.029605103217038332, + "acc_norm": 0.2645739910313901, + "acc_norm_stderr": 0.029605103217038332 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.0359546161177469, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.0359546161177469 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124505, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124505 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.037245636197746325, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.037245636197746325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149351, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149351 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863804, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863804 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.25384615384615383, + "acc_stderr": 0.022066054378726257, + "acc_norm": 0.25384615384615383, + "acc_norm_stderr": 0.022066054378726257 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.038935425188248475, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.038935425188248475 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678242, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678242 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.025560604721022884, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.025560604721022884 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02723601394619669, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02723601394619669 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.025604233470899105, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.025604233470899105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.040693063197213775, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.040693063197213775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21693121693121692, + "acc_stderr": 0.021227082449445055, + "acc_norm": 0.21693121693121692, + "acc_norm_stderr": 0.021227082449445055 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.25722543352601157, + "acc_stderr": 0.023532925431044287, + "acc_norm": 0.25722543352601157, + "acc_norm_stderr": 0.023532925431044287 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924034, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924034 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.024477222856135114, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.024477222856135114 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.03308818594415751, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.03308818594415751 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22385321100917432, + "acc_stderr": 0.017871217767790222, + "acc_norm": 0.22385321100917432, + "acc_norm_stderr": 0.017871217767790222 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.12698412698412698, + "acc_stderr": 0.02978041752268843, + "acc_norm": 0.12698412698412698, + "acc_norm_stderr": 0.02978041752268843 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.024051029739912258, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.024051029739912258 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22712418300653595, + "acc_stderr": 0.01694985327921237, + "acc_norm": 0.22712418300653595, + "acc_norm_stderr": 0.01694985327921237 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340461, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340461 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.034076320938540516, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.034076320938540516 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596452, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596452 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.027212835884073167, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.027212835884073167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.0284588209914603, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.0284588209914603 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24119947848761408, + "acc_stderr": 0.010926496102034956, + "acc_norm": 0.24119947848761408, + "acc_norm_stderr": 0.010926496102034956 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.029554292605695066, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.029554292605695066 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.033175059300091805, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.033175059300091805 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557958, + "mc2": 0.4152719205828202, + "mc2_stderr": 0.015006529677082818 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26210153482880755, + "acc_stderr": 0.015119864670254151, + "acc_norm": 0.33293978748524206, + "acc_norm_stderr": 0.01620243120837378 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-AF-PLM-1.4B_v0.1", + "model_sha": "d7078baba2546a6a3064d4e98558d8c9426993a3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-AF-PLM-1.4B_v0.2/result_2024-07-03 02:09:35.json b/ITT-AF/ITT-AF-PLM-1.4B_v0.2/result_2024-07-03 02:09:35.json new file mode 100644 index 0000000000000000000000000000000000000000..fad6ab06cd428d16ad63aa83e3832a2c1efabc6a --- /dev/null +++ b/ITT-AF/ITT-AF-PLM-1.4B_v0.2/result_2024-07-03 02:09:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2354948805460751, + "acc_stderr": 0.012399451855004752, + "acc_norm": 0.2901023890784983, + "acc_norm_stderr": 0.013261573677520764 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3286197968532165, + "acc_stderr": 0.004687514708345323, + "acc_norm": 0.40529774945230035, + "acc_norm_stderr": 0.0048994621118323345 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21637426900584794, + "acc_stderr": 0.03158149539338734, + "acc_norm": 0.21637426900584794, + "acc_norm_stderr": 0.03158149539338734 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690879, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690879 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2681992337164751, + "acc_stderr": 0.015842430835269435, + "acc_norm": 0.2681992337164751, + "acc_norm_stderr": 0.015842430835269435 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.22127659574468084, + "acc_stderr": 0.027136349602424045, + "acc_norm": 0.22127659574468084, + "acc_norm_stderr": 0.027136349602424045 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2645739910313901, + "acc_stderr": 0.029605103217038332, + "acc_norm": 0.2645739910313901, + "acc_norm_stderr": 0.029605103217038332 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.0359546161177469, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.0359546161177469 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124505, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124505 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.037245636197746325, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.037245636197746325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149351, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149351 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863804, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863804 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.02213908110397153, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.02213908110397153 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.038260763248848646, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.038260763248848646 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678242, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678242 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.025649381063029254, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.025649381063029254 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.027236013946196697, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.027236013946196697 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.025604233470899105, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.025604233470899105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.040693063197213775, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.040693063197213775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21693121693121692, + "acc_stderr": 0.021227082449445055, + "acc_norm": 0.21693121693121692, + "acc_norm_stderr": 0.021227082449445055 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.25722543352601157, + "acc_stderr": 0.023532925431044287, + "acc_norm": 0.25722543352601157, + "acc_norm_stderr": 0.023532925431044287 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924034, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924034 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02438366553103545, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02438366553103545 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.03308818594415751, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.03308818594415751 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22385321100917432, + "acc_stderr": 0.017871217767790222, + "acc_norm": 0.22385321100917432, + "acc_norm_stderr": 0.017871217767790222 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.12698412698412698, + "acc_stderr": 0.02978041752268843, + "acc_norm": 0.12698412698412698, + "acc_norm_stderr": 0.02978041752268843 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.024170840879340995, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.024170840879340995 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882923, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882923 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22712418300653595, + "acc_stderr": 0.01694985327921237, + "acc_norm": 0.22712418300653595, + "acc_norm_stderr": 0.01694985327921237 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340461, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340461 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.034076320938540516, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.034076320938540516 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596452, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596452 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.027212835884073167, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.027212835884073167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.0284588209914603, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.0284588209914603 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24119947848761408, + "acc_stderr": 0.010926496102034956, + "acc_norm": 0.24119947848761408, + "acc_norm_stderr": 0.010926496102034956 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.029554292605695066, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.029554292605695066 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.033175059300091805, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.033175059300091805 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557958, + "mc2": 0.4152659984195771, + "mc2_stderr": 0.015006611480241957 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2585596221959858, + "acc_stderr": 0.015053354438963997, + "acc_norm": 0.33293978748524206, + "acc_norm_stderr": 0.01620243120837378 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-AF-PLM-1.4B_v0.2", + "model_sha": "e75e6487e1e12570174792f606ba3e4d3b92990a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-AF-PLM-1.4B_v0.3/result_2024-07-08 12:24:56.json b/ITT-AF/ITT-AF-PLM-1.4B_v0.3/result_2024-07-08 12:24:56.json new file mode 100644 index 0000000000000000000000000000000000000000..c59b52beb2a216be2f4b169cab13059c65a34ffe --- /dev/null +++ b/ITT-AF/ITT-AF-PLM-1.4B_v0.3/result_2024-07-08 12:24:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.22781569965870307, + "acc_stderr": 0.012256708602326914, + "acc_norm": 0.27047781569965873, + "acc_norm_stderr": 0.012980954547659554 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3148775144393547, + "acc_stderr": 0.0046351783711100405, + "acc_norm": 0.38169687313284206, + "acc_norm_stderr": 0.004848099661619696 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.280970625798212, + "acc_stderr": 0.016073127851221253, + "acc_norm": 0.280970625798212, + "acc_norm_stderr": 0.016073127851221253 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.035294868015111155, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.035294868015111155 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.0324430528300873, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.0324430528300873 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596918, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596918 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.029620227874790458, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.029620227874790458 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.03831226048850333, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.03831226048850333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.028510251512341933, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.028510251512341933 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2019704433497537, + "acc_stderr": 0.02824735012218027, + "acc_norm": 0.2019704433497537, + "acc_norm_stderr": 0.02824735012218027 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.026377567028645858, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.026377567028645858 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2641509433962264, + "acc_stderr": 0.02713429162874172, + "acc_norm": 0.2641509433962264, + "acc_norm_stderr": 0.02713429162874172 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.035839017547364134, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.035839017547364134 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918417, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023132376234543346, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023132376234543346 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.018272575810231867, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.018272575810231867 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.023929155517351287, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.023929155517351287 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.024847921358063962, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.024847921358063962 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24967405475880053, + "acc_stderr": 0.011054538377832338, + "acc_norm": 0.24967405475880053, + "acc_norm_stderr": 0.011054538377832338 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156472, + "mc2": 0.42272288074346875, + "mc2_stderr": 0.015262128830746851 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2833530106257379, + "acc_stderr": 0.015492852084597235, + "acc_norm": 0.345926800472255, + "acc_norm_stderr": 0.016353853414347575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-AF-PLM-1.4B_v0.3", + "model_sha": "c1457bcdfac89c34acdf512fb930ced479ad8848", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-AF-PLM-2.2B_v0.3/result_2024-07-05 16:53:49.json b/ITT-AF/ITT-AF-PLM-2.2B_v0.3/result_2024-07-05 16:53:49.json new file mode 100644 index 0000000000000000000000000000000000000000..3b4769dd3fae5bcd923d333fa1744ea842c67f40 --- /dev/null +++ b/ITT-AF/ITT-AF-PLM-2.2B_v0.3/result_2024-07-05 16:53:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2363481228668942, + "acc_stderr": 0.012414960524301823, + "acc_norm": 0.2986348122866894, + "acc_norm_stderr": 0.013374078615068752 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34226249751045607, + "acc_stderr": 0.00473497266829962, + "acc_norm": 0.42929695279824737, + "acc_norm_stderr": 0.004939642460172576 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.033773102522091945, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.033773102522091945 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2720306513409962, + "acc_stderr": 0.015913367447500517, + "acc_norm": 0.2720306513409962, + "acc_norm_stderr": 0.015913367447500517 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.02767845257821238, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.02767845257821238 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.0355092018568963, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.0355092018568963 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.02575586592263294, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.02575586592263294 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.02944249558585747, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.02944249558585747 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.1984732824427481, + "acc_stderr": 0.034981493854624714, + "acc_norm": 0.1984732824427481, + "acc_norm_stderr": 0.034981493854624714 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.031544498882702866, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.031544498882702866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617749, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617749 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.027025433498882385, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.027025433498882385 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2923076923076923, + "acc_stderr": 0.023060438380857747, + "acc_norm": 0.2923076923076923, + "acc_norm_stderr": 0.023060438380857747 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.04414343666854933, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.04414343666854933 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.03282649385304151, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.03282649385304151 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24838709677419354, + "acc_stderr": 0.024580028921481003, + "acc_norm": 0.24838709677419354, + "acc_norm_stderr": 0.024580028921481003 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.20512820512820512, + "acc_stderr": 0.02645350805404032, + "acc_norm": 0.20512820512820512, + "acc_norm_stderr": 0.02645350805404032 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.02619980880756192, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.02619980880756192 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.030965903123573037, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.030965903123573037 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918417, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.023948512905468348, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.023948512905468348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.02517104191530968, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.02517104191530968 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29533678756476683, + "acc_stderr": 0.0329229663915514, + "acc_norm": 0.29533678756476683, + "acc_norm_stderr": 0.0329229663915514 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.018272575810231863, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.018272575810231863 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333337, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333337 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.025160998214292456, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.025160998214292456 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516304, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516304 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2434640522875817, + "acc_stderr": 0.017362473762146634, + "acc_norm": 0.2434640522875817, + "acc_norm_stderr": 0.017362473762146634 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755808, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755808 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225603, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225603 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.025409301953225678, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.025409301953225678 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25316455696202533, + "acc_stderr": 0.028304657943035303, + "acc_norm": 0.25316455696202533, + "acc_norm_stderr": 0.028304657943035303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24185136897001303, + "acc_stderr": 0.010936550813827061, + "acc_norm": 0.24185136897001303, + "acc_norm_stderr": 0.010936550813827061 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.03182231867647553, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.03182231867647553 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.18787878787878787, + "acc_stderr": 0.03050193405942914, + "acc_norm": 0.18787878787878787, + "acc_norm_stderr": 0.03050193405942914 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520707, + "mc2": 0.40896407174805094, + "mc2_stderr": 0.015330862256670044 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27744982290436837, + "acc_stderr": 0.015393630236605971, + "acc_norm": 0.35064935064935066, + "acc_norm_stderr": 0.016405556903893295 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-AF-PLM-2.2B_v0.3", + "model_sha": "a6937643393b71111c32b04572db62197cc31534", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-AF-PLM-2.2B_v0.4/result_2024-07-16 11:12:50.json b/ITT-AF/ITT-AF-PLM-2.2B_v0.4/result_2024-07-16 11:12:50.json new file mode 100644 index 0000000000000000000000000000000000000000..de78dadb28003e80f3dbb549553100bec18c70aa --- /dev/null +++ b/ITT-AF/ITT-AF-PLM-2.2B_v0.4/result_2024-07-16 11:12:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2568259385665529, + "acc_stderr": 0.012766923794116801, + "acc_norm": 0.3216723549488055, + "acc_norm_stderr": 0.013650488084494166 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35441147181836286, + "acc_stderr": 0.004773570096185056, + "acc_norm": 0.4525990838478391, + "acc_norm_stderr": 0.004967308254425757 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260595, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260595 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26947637292464877, + "acc_stderr": 0.015866243073215058, + "acc_norm": 0.26947637292464877, + "acc_norm_stderr": 0.015866243073215058 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800255, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800255 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.22127659574468084, + "acc_stderr": 0.02713634960242405, + "acc_norm": 0.22127659574468084, + "acc_norm_stderr": 0.02713634960242405 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.035294868015111155, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.035294868015111155 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.27802690582959644, + "acc_stderr": 0.03006958487449405, + "acc_norm": 0.27802690582959644, + "acc_norm_stderr": 0.03006958487449405 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.1984732824427481, + "acc_stderr": 0.03498149385462471, + "acc_norm": 0.1984732824427481, + "acc_norm_stderr": 0.03498149385462471 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727772, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727772 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793254, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793254 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277723, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277723 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.023119362758232263, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.023119362758232263 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.03090379695211449, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.03090379695211449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.02606936229533513, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02606936229533513 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.027601921381417583, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.027601921381417583 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.026341480371118338, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.026341480371118338 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.04172343038705383, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.04172343038705383 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.02620276653465215, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.02620276653465215 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.20398009950248755, + "acc_stderr": 0.02849317624532607, + "acc_norm": 0.20398009950248755, + "acc_norm_stderr": 0.02849317624532607 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.18497109826589594, + "acc_stderr": 0.029605623981771186, + "acc_norm": 0.18497109826589594, + "acc_norm_stderr": 0.029605623981771186 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25137614678899084, + "acc_stderr": 0.018599206360287415, + "acc_norm": 0.25137614678899084, + "acc_norm_stderr": 0.018599206360287415 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.11904761904761904, + "acc_stderr": 0.028965535858562964, + "acc_norm": 0.11904761904761904, + "acc_norm_stderr": 0.028965535858562964 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.024288619466046105, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.024288619466046105 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.03896878985070417, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.03896878985070417 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403165, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403165 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.01798661530403031, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.01798661530403031 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.0315469628565663, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.0315469628565663 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.027833023871399677, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.027833023871399677 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3037974683544304, + "acc_stderr": 0.029936696387138598, + "acc_norm": 0.3037974683544304, + "acc_norm_stderr": 0.029936696387138598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676653, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604243, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604243 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.035014387062967806, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.035014387062967806 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237035, + "mc2": 0.418037238564343, + "mc2_stderr": 0.0152910277865786 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.282172373081464, + "acc_stderr": 0.01547327158398843, + "acc_norm": 0.358913813459268, + "acc_norm_stderr": 0.01649180210299904 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-AF-PLM-2.2B_v0.4", + "model_sha": "3b5d953b7ea0d262cda03c69edb141faa9686ab3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-Yi-Ko-6B-v1.0/result_2024-01-25 08:20:13.json b/ITT-AF/ITT-Yi-Ko-6B-v1.0/result_2024-01-25 08:20:13.json new file mode 100644 index 0000000000000000000000000000000000000000..eaf740b58d43b8bb7b9849825a6b572480ac9294 --- /dev/null +++ b/ITT-AF/ITT-Yi-Ko-6B-v1.0/result_2024-01-25 08:20:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36860068259385664, + "acc_stderr": 0.014097810678042187, + "acc_norm": 0.4249146757679181, + "acc_norm_stderr": 0.014445698968520769 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3986257717586138, + "acc_stderr": 0.004886147907627406, + "acc_norm": 0.538338976299542, + "acc_norm_stderr": 0.004975091055697193 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.561941251596424, + "acc_stderr": 0.017742232238257244, + "acc_norm": 0.561941251596424, + "acc_norm_stderr": 0.017742232238257244 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894255, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894255 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.034889016168527305, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.034889016168527305 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.025088301454694838, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.025088301454694838 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317213, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317213 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.02987257770889118, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.02987257770889118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961816, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961816 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696545, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696545 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817729, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817729 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0242785680243077, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0242785680243077 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670788, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670788 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.581651376146789, + "acc_stderr": 0.021149548596443874, + "acc_norm": 0.581651376146789, + "acc_norm_stderr": 0.021149548596443874 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.028304576673141107, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.028304576673141107 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024106, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024106 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03005820270430985, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03005820270430985 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2212290502793296, + "acc_stderr": 0.01388216459888727, + "acc_norm": 0.2212290502793296, + "acc_norm_stderr": 0.01388216459888727 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.03141470802586589, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.03141470802586589 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.01197150729498278, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.01197150729498278 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608746, + "mc2": 0.43117439069714647, + "mc2_stderr": 0.014988526622853661 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3825265643447462, + "acc_stderr": 0.016709165387228817, + "acc_norm": 0.4309327036599764, + "acc_norm_stderr": 0.017025558196043136 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-Yi-Ko-6B-v1.0", + "model_sha": "138b6cebc9ef970542a57f56701a4507dc5d12f7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-Yi-Ko-6B-v2.0/result_2024-01-27 04:14:20.json b/ITT-AF/ITT-Yi-Ko-6B-v2.0/result_2024-01-27 04:14:20.json new file mode 100644 index 0000000000000000000000000000000000000000..a2566c8d360b0051d4fe6427f26336f7f469ffc2 --- /dev/null +++ b/ITT-AF/ITT-Yi-Ko-6B-v2.0/result_2024-01-27 04:14:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3677474402730375, + "acc_stderr": 0.014090995618168489, + "acc_norm": 0.4283276450511945, + "acc_norm_stderr": 0.014460496367599026 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3997211710814579, + "acc_stderr": 0.004888398535520493, + "acc_norm": 0.5373431587333201, + "acc_norm_stderr": 0.004975845335086618 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5734355044699873, + "acc_stderr": 0.01768606697567565, + "acc_norm": 0.5734355044699873, + "acc_norm_stderr": 0.01768606697567565 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977978, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977978 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.02839442137098453, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.02839442137098453 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.0348890161685273, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.0348890161685273 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.041641887201693754, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.041641887201693754 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.025088301454694834, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.025088301454694834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205608, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047732, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047732 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.026918645383239004, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.026918645383239004 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353928, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353928 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5706422018348624, + "acc_stderr": 0.021222286397236504, + "acc_norm": 0.5706422018348624, + "acc_norm_stderr": 0.021222286397236504 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978252, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978252 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.01989841271763589, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.01989841271763589 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.0449394906861354, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.0449394906861354 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.030388051301678116, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.030388051301678116 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21564245810055865, + "acc_stderr": 0.013754835975482337, + "acc_norm": 0.21564245810055865, + "acc_norm_stderr": 0.013754835975482337 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235926, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235926 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.03195514741370672, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.03195514741370672 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.012014142101842977, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.012014142101842977 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015006, + "mc2": 0.41956796147607484, + "mc2_stderr": 0.01500218367833407 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4085005903187721, + "acc_stderr": 0.01690006287942712, + "acc_norm": 0.43683589138134593, + "acc_norm_stderr": 0.017052633559856076 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-Yi-Ko-6B-v2.0", + "model_sha": "89b900e75c8f20c494081b6d498de6ab63f97a19", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-Yi-Ko-6B-v4.0/result_2024-02-14 09:52:59.json b/ITT-AF/ITT-Yi-Ko-6B-v4.0/result_2024-02-14 09:52:59.json new file mode 100644 index 0000000000000000000000000000000000000000..ffceaf1459c865cdab463ee256c3e1f1d5f115cc --- /dev/null +++ b/ITT-AF/ITT-Yi-Ko-6B-v4.0/result_2024-02-14 09:52:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3643344709897611, + "acc_stderr": 0.014063260279882417, + "acc_norm": 0.4377133105802048, + "acc_norm_stderr": 0.014497573881108285 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41416052579167495, + "acc_stderr": 0.004915697886906119, + "acc_norm": 0.5499900418243377, + "acc_norm_stderr": 0.004964779805180654 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5734355044699873, + "acc_stderr": 0.017686066975675648, + "acc_norm": 0.5734355044699873, + "acc_norm_stderr": 0.017686066975675648 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029319, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029319 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.043389203057924014, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.043389203057924014 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6414141414141414, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.6414141414141414, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643945, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643945 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933903, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933903 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.028438677998909558, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.028438677998909558 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.029996951858349483, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.029996951858349483 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02831753349606648, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02831753349606648 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149152, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149152 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.03555300319557669, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.03555300319557669 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873632, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873632 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6165137614678899, + "acc_stderr": 0.020847156641915984, + "acc_norm": 0.6165137614678899, + "acc_norm_stderr": 0.020847156641915984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4199346405228758, + "acc_stderr": 0.019966811178256477, + "acc_norm": 0.4199346405228758, + "acc_norm_stderr": 0.019966811178256477 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028545, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028545 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.014736926383761994, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.014736926383761994 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877746, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32985658409387225, + "acc_stderr": 0.012008129938540486, + "acc_norm": 0.32985658409387225, + "acc_norm_stderr": 0.012008129938540486 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394812, + "mc2": 0.441552259633933, + "mc2_stderr": 0.015274593381980957 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5525383707201889, + "acc_stderr": 0.01709519030150058, + "acc_norm": 0.5608028335301063, + "acc_norm_stderr": 0.017062775744780705 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-Yi-Ko-6B-v4.0", + "model_sha": "55bafef5b47043503949a3b663903c58689d444f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-Yi-Ko-6B-v5.0/result_2024-02-29 09:49:15.json b/ITT-AF/ITT-Yi-Ko-6B-v5.0/result_2024-02-29 09:49:15.json new file mode 100644 index 0000000000000000000000000000000000000000..40deeb247035a299c9609e9f370cec34f1134ea2 --- /dev/null +++ b/ITT-AF/ITT-Yi-Ko-6B-v5.0/result_2024-02-29 09:49:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3703071672354949, + "acc_stderr": 0.01411129875167495, + "acc_norm": 0.4249146757679181, + "acc_norm_stderr": 0.014445698968520772 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4148575980880303, + "acc_stderr": 0.004916905095810844, + "acc_norm": 0.5548695478988249, + "acc_norm_stderr": 0.004959645263390244 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5670498084291188, + "acc_stderr": 0.017718469101513985, + "acc_norm": 0.5670498084291188, + "acc_norm_stderr": 0.017718469101513985 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.028386198084177687, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.028386198084177687 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.034373055019806184, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.034373055019806184 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.04161808503501531, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.04161808503501531 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493545, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493545 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.025348006031534795, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.025348006031534795 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5096774193548387, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.5096774193548387, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.030463656747340254, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.030463656747340254 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.03602573571288442, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.03602573571288442 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6073394495412844, + "acc_stderr": 0.020937505161201096, + "acc_norm": 0.6073394495412844, + "acc_norm_stderr": 0.020937505161201096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.02856869975222586, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.02856869975222586 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437539, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437539 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.0449394906861354, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.0449394906861354 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293647, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293647 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925284, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925284 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32985658409387225, + "acc_stderr": 0.012008129938540479, + "acc_norm": 0.32985658409387225, + "acc_norm_stderr": 0.012008129938540479 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635896, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635896 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3953488372093023, + "mc1_stderr": 0.017115815632418183, + "mc2": 0.5296928164048468, + "mc2_stderr": 0.015182640337382567 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6009445100354192, + "acc_stderr": 0.016836377292849303, + "acc_norm": 0.6092089728453365, + "acc_norm_stderr": 0.016775298465108255 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-Yi-Ko-6B-v5.0", + "model_sha": "a28907704cf3be5eacc96fa75a803bfbe021659d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ITT-AF/ITT-Yi-Ko-6B-v6.0/result_2024-03-07 03:47:37.json b/ITT-AF/ITT-Yi-Ko-6B-v6.0/result_2024-03-07 03:47:37.json new file mode 100644 index 0000000000000000000000000000000000000000..ef96e816f5d9a9be3641b1ce9728eb8bc8afff57 --- /dev/null +++ b/ITT-AF/ITT-Yi-Ko-6B-v6.0/result_2024-03-07 03:47:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.014175915490000328, + "acc_norm": 0.4351535836177474, + "acc_norm_stderr": 0.014487986197186045 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4155546703843856, + "acc_stderr": 0.004918102168717933, + "acc_norm": 0.5569607647878908, + "acc_norm_stderr": 0.004957296691391572 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.565772669220945, + "acc_stderr": 0.01772458938967779, + "acc_norm": 0.565772669220945, + "acc_norm_stderr": 0.01772458938967779 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547155, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942656, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.03070948699255655, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.03070948699255655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871927, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871927 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658752, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658752 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.037507570448955384, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.037507570448955384 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.041711158581816184, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.041711158581816184 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303118, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303118 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.03922378290610988, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.03922378290610988 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583302, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583302 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6165137614678899, + "acc_stderr": 0.02084715664191598, + "acc_norm": 0.6165137614678899, + "acc_norm_stderr": 0.02084715664191598 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147124, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147124 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4297385620915033, + "acc_stderr": 0.020027122784928554, + "acc_norm": 0.4297385620915033, + "acc_norm_stderr": 0.020027122784928554 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647206, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647206 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2245810055865922, + "acc_stderr": 0.013956803666544641, + "acc_norm": 0.2245810055865922, + "acc_norm_stderr": 0.013956803666544641 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.02981263070156974, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.02981263070156974 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.03195514741370672, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.03195514741370672 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3389830508474576, + "acc_stderr": 0.01208994185758447, + "acc_norm": 0.3389830508474576, + "acc_norm_stderr": 0.01208994185758447 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.401468788249694, + "mc1_stderr": 0.01716027390169365, + "mc2": 0.5377364375024334, + "mc2_stderr": 0.015575357735285155 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5714285714285714, + "acc_stderr": 0.01701403811929749, + "acc_norm": 0.577331759149941, + "acc_norm_stderr": 0.016983506079577607 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ITT-AF/ITT-Yi-Ko-6B-v6.0", + "model_sha": "24e9e905be8917263a9ea2a4e6ff193b5635800c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Intel/neural-chat-7b-v3-1/result_2023-11-28 00:35:05.json b/Intel/neural-chat-7b-v3-1/result_2023-11-28 00:35:05.json new file mode 100644 index 0000000000000000000000000000000000000000..ee680139612ff97bebb80c8bce408247f12791ae --- /dev/null +++ b/Intel/neural-chat-7b-v3-1/result_2023-11-28 00:35:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.01388881628678211, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.014252959848892896 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3745269866560446, + "acc_stderr": 0.004830113797327052, + "acc_norm": 0.47998406691894047, + "acc_norm_stderr": 0.00498578162046701 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273482, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273482 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468537, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468537 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197426, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.0336612448905145, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.0336612448905145 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347354, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347354 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924336, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924336 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616255, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.0344578996436275, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.0344578996436275 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149138, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.02680372058320619, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.02680372058320619 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008746, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008746 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214334, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36437908496732024, + "acc_stderr": 0.019469518221573695, + "acc_norm": 0.36437908496732024, + "acc_norm_stderr": 0.019469518221573695 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.02847350127296376, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.02847350127296376 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828979, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828979 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.015005762446786171, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.015005762446786171 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.03200682020163909, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.03200682020163909 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214938, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214938 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.037694303145125674, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.037694303145125674 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3708690330477356, + "mc1_stderr": 0.016909693580248807, + "mc2": 0.5496429922579386, + "mc2_stderr": 0.016045283495853307 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4167650531286895, + "acc_stderr": 0.016950489146108826, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.017057753702160283 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Intel/neural-chat-7b-v3-1", + "model_sha": "c70aa428800d151f4eae2b6d4b6a08c773868987", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JONGYUN/DPO_Test_2/result_2024-02-21 04:41:16.json b/JONGYUN/DPO_Test_2/result_2024-02-21 04:41:16.json new file mode 100644 index 0000000000000000000000000000000000000000..c47b7abd746b05c46eb6c79f0890688c9b18c240 --- /dev/null +++ b/JONGYUN/DPO_Test_2/result_2024-02-21 04:41:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46928327645051193, + "acc_stderr": 0.014583792546304037, + "acc_norm": 0.5255972696245734, + "acc_norm_stderr": 0.014592230885298967 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4645488946425015, + "acc_stderr": 0.0049772234853420316, + "acc_norm": 0.6356303525194185, + "acc_norm_stderr": 0.00480269410620365 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6432748538011696, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.6432748538011696, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.665389527458493, + "acc_stderr": 0.01687346864159216, + "acc_norm": 0.665389527458493, + "acc_norm_stderr": 0.01687346864159216 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464245, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252603, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252603 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4978723404255319, + "acc_stderr": 0.032685726586674915, + "acc_norm": 0.4978723404255319, + "acc_norm_stderr": 0.032685726586674915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6463022508038585, + "acc_stderr": 0.02715520810320088, + "acc_norm": 0.6463022508038585, + "acc_norm_stderr": 0.02715520810320088 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6233183856502242, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.6233183856502242, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6564885496183206, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.6564885496183206, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5102564102564102, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.5102564102564102, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5741935483870968, + "acc_stderr": 0.028129112709165904, + "acc_norm": 0.5741935483870968, + "acc_norm_stderr": 0.028129112709165904 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8076923076923077, + "acc_stderr": 0.025819233256483706, + "acc_norm": 0.8076923076923077, + "acc_norm_stderr": 0.025819233256483706 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555404, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555404 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41534391534391535, + "acc_stderr": 0.025379524910778405, + "acc_norm": 0.41534391534391535, + "acc_norm_stderr": 0.025379524910778405 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5347222222222222, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.5347222222222222, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.615606936416185, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.615606936416185, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6141975308641975, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.6141975308641975, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6752293577981652, + "acc_stderr": 0.020077729109310324, + "acc_norm": 0.6752293577981652, + "acc_norm_stderr": 0.020077729109310324 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.02807415894760066, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.02807415894760066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.04008973785779205, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.04008973785779205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.020192808271433788, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.020192808271433788 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.02914454478159614, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.02914454478159614 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833586, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833586 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.03395322726375797, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.03395322726375797 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2245810055865922, + "acc_stderr": 0.013956803666544637, + "acc_norm": 0.2245810055865922, + "acc_norm_stderr": 0.013956803666544637 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4632352941176471, + "acc_stderr": 0.03029061918048569, + "acc_norm": 0.4632352941176471, + "acc_norm_stderr": 0.03029061918048569 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.689795918367347, + "acc_stderr": 0.02961345987248438, + "acc_norm": 0.689795918367347, + "acc_norm_stderr": 0.02961345987248438 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7679324894514767, + "acc_stderr": 0.027479744550808507, + "acc_norm": 0.7679324894514767, + "acc_norm_stderr": 0.027479744550808507 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39765319426336376, + "acc_stderr": 0.012499840347460643, + "acc_norm": 0.39765319426336376, + "acc_norm_stderr": 0.012499840347460643 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7303921568627451, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.7303921568627451, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7151515151515152, + "acc_stderr": 0.03524390844511781, + "acc_norm": 0.7151515151515152, + "acc_norm_stderr": 0.03524390844511781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3329253365973072, + "mc1_stderr": 0.016497402382012052, + "mc2": 0.4947134696523839, + "mc2_stderr": 0.01547234495228754 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5997638724911453, + "acc_stderr": 0.016844693510505035, + "acc_norm": 0.6186540731995277, + "acc_norm_stderr": 0.016699301768828077 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JONGYUN/DPO_Test_2", + "model_sha": "d7cabe3ab37f15fe28f43bac6c63b94f4da561e8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JY623/KoSOLAR-10.7B-merge-v2.0/result_2024-04-03 09:00:33.json b/JY623/KoSOLAR-10.7B-merge-v2.0/result_2024-04-03 09:00:33.json new file mode 100644 index 0000000000000000000000000000000000000000..190054353791797c8ee0e5c3e583e851498f3272 --- /dev/null +++ b/JY623/KoSOLAR-10.7B-merge-v2.0/result_2024-04-03 09:00:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.697098976109215, + "acc_stderr": 0.013428241573185349, + "acc_norm": 0.7482935153583617, + "acc_norm_stderr": 0.012682496334042963 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5962955586536547, + "acc_stderr": 0.004896368185765242, + "acc_norm": 0.7506472814180443, + "acc_norm_stderr": 0.004317541575275725 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7251461988304093, + "acc_stderr": 0.034240429246915824, + "acc_norm": 0.7251461988304093, + "acc_norm_stderr": 0.034240429246915824 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7766990291262136, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.7766990291262136, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.698595146871009, + "acc_stderr": 0.016409091097268794, + "acc_norm": 0.698595146871009, + "acc_norm_stderr": 0.016409091097268794 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5063829787234042, + "acc_stderr": 0.032683358999363345, + "acc_norm": 0.5063829787234042, + "acc_norm_stderr": 0.032683358999363345 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.02736807824397164, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.02736807824397164 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6457399103139013, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.6457399103139013, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7474747474747475, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.7474747474747475, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.03163145807552378, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.03163145807552378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6384615384615384, + "acc_stderr": 0.024359581465397007, + "acc_norm": 0.6384615384615384, + "acc_norm_stderr": 0.024359581465397007 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.7, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.7, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.603225806451613, + "acc_stderr": 0.027831231605767948, + "acc_norm": 0.603225806451613, + "acc_norm_stderr": 0.027831231605767948 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8376068376068376, + "acc_stderr": 0.024161618127987745, + "acc_norm": 0.8376068376068376, + "acc_norm_stderr": 0.024161618127987745 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5924528301886792, + "acc_stderr": 0.030242233800854494, + "acc_norm": 0.5924528301886792, + "acc_norm_stderr": 0.030242233800854494 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.029723278961476668, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.029723278961476668 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555402, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555402 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.025634258115554965, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.025634258115554965 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6011560693641619, + "acc_stderr": 0.02636243757454654, + "acc_norm": 0.6011560693641619, + "acc_norm_stderr": 0.02636243757454654 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5705521472392638, + "acc_stderr": 0.03889066619112722, + "acc_norm": 0.5705521472392638, + "acc_norm_stderr": 0.03889066619112722 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6697530864197531, + "acc_stderr": 0.026168298456732846, + "acc_norm": 0.6697530864197531, + "acc_norm_stderr": 0.026168298456732846 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7461139896373057, + "acc_stderr": 0.03141024780565318, + "acc_norm": 0.7461139896373057, + "acc_norm_stderr": 0.03141024780565318 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7321100917431193, + "acc_stderr": 0.018987462257978652, + "acc_norm": 0.7321100917431193, + "acc_norm_stderr": 0.018987462257978652 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6241830065359477, + "acc_stderr": 0.027732834353363947, + "acc_norm": 0.6241830065359477, + "acc_norm_stderr": 0.027732834353363947 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6513157894736842, + "acc_stderr": 0.03878139888797611, + "acc_norm": 0.6513157894736842, + "acc_norm_stderr": 0.03878139888797611 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5849673202614379, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.5849673202614379, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4397163120567376, + "acc_stderr": 0.029609912075594113, + "acc_norm": 0.4397163120567376, + "acc_norm_stderr": 0.029609912075594113 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.0340763209385405, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.0340763209385405 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.4033519553072626, + "acc_stderr": 0.016407123032195246, + "acc_norm": 0.4033519553072626, + "acc_norm_stderr": 0.016407123032195246 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5257352941176471, + "acc_stderr": 0.030332578094555026, + "acc_norm": 0.5257352941176471, + "acc_norm_stderr": 0.030332578094555026 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.689795918367347, + "acc_stderr": 0.02961345987248438, + "acc_norm": 0.689795918367347, + "acc_norm_stderr": 0.02961345987248438 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.729957805907173, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.729957805907173, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4517601043024772, + "acc_stderr": 0.012710662233660247, + "acc_norm": 0.4517601043024772, + "acc_norm_stderr": 0.012710662233660247 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.032962451101722294, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.032962451101722294 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7221542227662179, + "mc1_stderr": 0.015680929364024664, + "mc2": 0.8117453553489173, + "mc2_stderr": 0.01295465373920051 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.564344746162928, + "acc_stderr": 0.017047415229476327, + "acc_norm": 0.5820543093270366, + "acc_norm_stderr": 0.01695729200527971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JY623/KoSOLAR-10.7B-merge-v2.0", + "model_sha": "2b54466381de31e8945204dfe3bd6c0642cf9ce5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JY623/KoSOLAR-10.7B-merge-v3.0/result_2024-04-15 09:49:45.json b/JY623/KoSOLAR-10.7B-merge-v3.0/result_2024-04-15 09:49:45.json new file mode 100644 index 0000000000000000000000000000000000000000..ca7d121d2d672a85822d7e953459c075473df711 --- /dev/null +++ b/JY623/KoSOLAR-10.7B-merge-v3.0/result_2024-04-15 09:49:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.48293515358361777, + "acc_stderr": 0.014602878388536598, + "acc_norm": 0.5460750853242321, + "acc_norm_stderr": 0.014549221105171864 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46644094801832303, + "acc_stderr": 0.004978529642140938, + "acc_norm": 0.6319458275243975, + "acc_norm_stderr": 0.00481290527906644 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7151979565772669, + "acc_stderr": 0.016139174096522584, + "acc_norm": 0.7151979565772669, + "acc_norm_stderr": 0.016139174096522584 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.574468085106383, + "acc_stderr": 0.032321469162244695, + "acc_norm": 0.574468085106383, + "acc_norm_stderr": 0.032321469162244695 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6430868167202572, + "acc_stderr": 0.027210420375934023, + "acc_norm": 0.6430868167202572, + "acc_norm_stderr": 0.027210420375934023 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6278026905829597, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.6278026905829597, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.04225875451969639, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.04225875451969639 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.029620227874790486, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.029620227874790486 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6428571428571429, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.6428571428571429, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5871794871794872, + "acc_stderr": 0.02496268356433178, + "acc_norm": 0.5871794871794872, + "acc_norm_stderr": 0.02496268356433178 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406795, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406795 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.667741935483871, + "acc_stderr": 0.02679556084812279, + "acc_norm": 0.667741935483871, + "acc_norm_stderr": 0.02679556084812279 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.025140935950335435, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.025140935950335435 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5773584905660377, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.5773584905660377, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.046075820907199756, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.046075820907199756 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066485, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7611940298507462, + "acc_stderr": 0.03014777593540922, + "acc_norm": 0.7611940298507462, + "acc_norm_stderr": 0.03014777593540922 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.02535574126305526, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.02535574126305526 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5902777777777778, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.5902777777777778, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.026261677607806646, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.026261677607806646 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6134969325153374, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.6134969325153374, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6604938271604939, + "acc_stderr": 0.026348564412011628, + "acc_norm": 0.6604938271604939, + "acc_norm_stderr": 0.026348564412011628 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.772020725388601, + "acc_stderr": 0.03027690994517826, + "acc_norm": 0.772020725388601, + "acc_norm_stderr": 0.03027690994517826 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4824561403508772, + "acc_stderr": 0.04700708033551038, + "acc_norm": 0.4824561403508772, + "acc_norm_stderr": 0.04700708033551038 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7412844036697248, + "acc_stderr": 0.018776052319619624, + "acc_norm": 0.7412844036697248, + "acc_norm_stderr": 0.018776052319619624 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6405228758169934, + "acc_stderr": 0.027475969910660952, + "acc_norm": 0.6405228758169934, + "acc_norm_stderr": 0.027475969910660952 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6118421052631579, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.6118421052631579, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5408496732026143, + "acc_stderr": 0.020160213617222516, + "acc_norm": 0.5408496732026143, + "acc_norm_stderr": 0.020160213617222516 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41843971631205673, + "acc_stderr": 0.029427994039419994, + "acc_norm": 0.41843971631205673, + "acc_norm_stderr": 0.029427994039419994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5698529411764706, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.5698529411764706, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.7591836734693878, + "acc_stderr": 0.02737294220178816, + "acc_norm": 0.7591836734693878, + "acc_norm_stderr": 0.02737294220178816 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.8016877637130801, + "acc_stderr": 0.0259550208416211, + "acc_norm": 0.8016877637130801, + "acc_norm_stderr": 0.0259550208416211 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.42503259452411996, + "acc_stderr": 0.012625879884891994, + "acc_norm": 0.42503259452411996, + "acc_norm_stderr": 0.012625879884891994 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7745098039215687, + "acc_stderr": 0.029331162294251742, + "acc_norm": 0.7745098039215687, + "acc_norm_stderr": 0.029331162294251742 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7696969696969697, + "acc_stderr": 0.03287666758603488, + "acc_norm": 0.7696969696969697, + "acc_norm_stderr": 0.03287666758603488 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4173806609547124, + "mc1_stderr": 0.01726289106327218, + "mc2": 0.5720704055079059, + "mc2_stderr": 0.015554344775507314 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.577331759149941, + "acc_stderr": 0.016983506079577607, + "acc_norm": 0.5879574970484062, + "acc_norm_stderr": 0.01692227673852836 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JY623/KoSOLAR-10.7B-merge-v3.0", + "model_sha": "b3d3709be14ca40db5d696a2b175f1e4d6c2ea6e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JY623/KoSOLAR-10.7B-merge-v3.2/result_2024-04-16 07:12:38.json b/JY623/KoSOLAR-10.7B-merge-v3.2/result_2024-04-16 07:12:38.json new file mode 100644 index 0000000000000000000000000000000000000000..64b1cf681f11c88099c1931575438d9bd08e0fe8 --- /dev/null +++ b/JY623/KoSOLAR-10.7B-merge-v3.2/result_2024-04-16 07:12:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4667235494880546, + "acc_stderr": 0.014578995859605804, + "acc_norm": 0.5358361774744027, + "acc_norm_stderr": 0.014573813664735714 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4539932284405497, + "acc_stderr": 0.004968613539309247, + "acc_norm": 0.634833698466441, + "acc_norm_stderr": 0.00480492760877314 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6842105263157895, + "acc_stderr": 0.03565079670708311, + "acc_norm": 0.6842105263157895, + "acc_norm_stderr": 0.03565079670708311 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7151979565772669, + "acc_stderr": 0.016139174096522584, + "acc_norm": 0.7151979565772669, + "acc_norm_stderr": 0.016139174096522584 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5787234042553191, + "acc_stderr": 0.032278345101462665, + "acc_norm": 0.5787234042553191, + "acc_norm_stderr": 0.032278345101462665 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5301204819277109, + "acc_stderr": 0.03885425420866767, + "acc_norm": 0.5301204819277109, + "acc_norm_stderr": 0.03885425420866767 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6430868167202572, + "acc_stderr": 0.027210420375934023, + "acc_norm": 0.6430868167202572, + "acc_norm_stderr": 0.027210420375934023 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6188340807174888, + "acc_stderr": 0.03259625118416827, + "acc_norm": 0.6188340807174888, + "acc_norm_stderr": 0.03259625118416827 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.04225875451969639, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.04225875451969639 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7929292929292929, + "acc_stderr": 0.02886977846026705, + "acc_norm": 0.7929292929292929, + "acc_norm_stderr": 0.02886977846026705 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5666666666666667, + "acc_stderr": 0.0251246535258851, + "acc_norm": 0.5666666666666667, + "acc_norm_stderr": 0.0251246535258851 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6741935483870968, + "acc_stderr": 0.0266620105785671, + "acc_norm": 0.6741935483870968, + "acc_norm_stderr": 0.0266620105785671 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.811965811965812, + "acc_stderr": 0.025598193686652258, + "acc_norm": 0.811965811965812, + "acc_norm_stderr": 0.025598193686652258 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5886792452830188, + "acc_stderr": 0.030285009259009798, + "acc_norm": 0.5886792452830188, + "acc_norm_stderr": 0.030285009259009798 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.045820048415054174, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.045820048415054174 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815632, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815632 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.030965903123573033, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.030965903123573033 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42328042328042326, + "acc_stderr": 0.025446365634406783, + "acc_norm": 0.42328042328042326, + "acc_norm_stderr": 0.025446365634406783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5763888888888888, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.5763888888888888, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6242774566473989, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.6242774566473989, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5705521472392638, + "acc_stderr": 0.038890666191127236, + "acc_norm": 0.5705521472392638, + "acc_norm_stderr": 0.038890666191127236 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6635802469135802, + "acc_stderr": 0.026289734945952926, + "acc_norm": 0.6635802469135802, + "acc_norm_stderr": 0.026289734945952926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7668393782383419, + "acc_stderr": 0.03051611137147601, + "acc_norm": 0.7668393782383419, + "acc_norm_stderr": 0.03051611137147601 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.047028804320496165, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.047028804320496165 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7247706422018348, + "acc_stderr": 0.019149093743155196, + "acc_norm": 0.7247706422018348, + "acc_norm_stderr": 0.019149093743155196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.027363593284684965, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.027363593284684965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.618421052631579, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.618421052631579, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5473856209150327, + "acc_stderr": 0.020136790918492534, + "acc_norm": 0.5473856209150327, + "acc_norm_stderr": 0.020136790918492534 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41843971631205673, + "acc_stderr": 0.029427994039419994, + "acc_norm": 0.41843971631205673, + "acc_norm_stderr": 0.029427994039419994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.03407632093854053, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.03407632093854053 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.014333522059217892, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.014333522059217892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.77, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.77, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5514705882352942, + "acc_stderr": 0.030211479609121596, + "acc_norm": 0.5514705882352942, + "acc_norm_stderr": 0.030211479609121596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.7510204081632653, + "acc_stderr": 0.027682979522960234, + "acc_norm": 0.7510204081632653, + "acc_norm_stderr": 0.027682979522960234 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.8059071729957806, + "acc_stderr": 0.02574490253229094, + "acc_norm": 0.8059071729957806, + "acc_norm_stderr": 0.02574490253229094 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.42698826597131684, + "acc_stderr": 0.012633353557534418, + "acc_norm": 0.42698826597131684, + "acc_norm_stderr": 0.012633353557534418 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7696078431372549, + "acc_stderr": 0.02955429260569508, + "acc_norm": 0.7696078431372549, + "acc_norm_stderr": 0.02955429260569508 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7636363636363637, + "acc_stderr": 0.03317505930009179, + "acc_norm": 0.7636363636363637, + "acc_norm_stderr": 0.03317505930009179 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.379436964504284, + "mc1_stderr": 0.016987039266142992, + "mc2": 0.543917694141216, + "mc2_stderr": 0.015683013417932032 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5596221959858324, + "acc_stderr": 0.017067699774312967, + "acc_norm": 0.5761511216056671, + "acc_norm_stderr": 0.016989810834628253 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JY623/KoSOLAR-10.7B-merge-v3.2", + "model_sha": "2d91b66ad4dca0a9238c756d67112808657c0782", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JY623/KoSOLAR-10.7B-merge-v3.4/result_2024-04-16 09:05:08.json b/JY623/KoSOLAR-10.7B-merge-v3.4/result_2024-04-16 09:05:08.json new file mode 100644 index 0000000000000000000000000000000000000000..dcda094cf421b26763d1761c17de77bc385d6b45 --- /dev/null +++ b/JY623/KoSOLAR-10.7B-merge-v3.4/result_2024-04-16 09:05:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.47013651877133106, + "acc_stderr": 0.014585305840007102, + "acc_norm": 0.5392491467576792, + "acc_norm_stderr": 0.014566303676636586 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45439155546703847, + "acc_stderr": 0.004968979259738328, + "acc_norm": 0.6374228241386178, + "acc_norm_stderr": 0.004797616754372309 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7139208173690932, + "acc_stderr": 0.016160871405127515, + "acc_norm": 0.7139208173690932, + "acc_norm_stderr": 0.016160871405127515 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768076, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768076 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.574468085106383, + "acc_stderr": 0.0323214691622447, + "acc_norm": 0.574468085106383, + "acc_norm_stderr": 0.0323214691622447 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5301204819277109, + "acc_stderr": 0.03885425420866767, + "acc_norm": 0.5301204819277109, + "acc_norm_stderr": 0.03885425420866767 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6366559485530546, + "acc_stderr": 0.027316847674192714, + "acc_norm": 0.6366559485530546, + "acc_norm_stderr": 0.027316847674192714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6278026905829597, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.6278026905829597, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.04225875451969639, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.04225875451969639 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.797979797979798, + "acc_stderr": 0.028606204289229872, + "acc_norm": 0.797979797979798, + "acc_norm_stderr": 0.028606204289229872 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.031041941304059288, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.031041941304059288 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5666666666666667, + "acc_stderr": 0.0251246535258851, + "acc_norm": 0.5666666666666667, + "acc_norm_stderr": 0.0251246535258851 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6580645161290323, + "acc_stderr": 0.02698528957655273, + "acc_norm": 0.6580645161290323, + "acc_norm_stderr": 0.02698528957655273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.811965811965812, + "acc_stderr": 0.025598193686652258, + "acc_norm": 0.811965811965812, + "acc_norm_stderr": 0.025598193686652258 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5849056603773585, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.5849056603773585, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.045820048415054174, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.045820048415054174 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.025487187147859372, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.025487187147859372 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6242774566473989, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.6242774566473989, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5705521472392638, + "acc_stderr": 0.038890666191127236, + "acc_norm": 0.5705521472392638, + "acc_norm_stderr": 0.038890666191127236 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6604938271604939, + "acc_stderr": 0.026348564412011628, + "acc_norm": 0.6604938271604939, + "acc_norm_stderr": 0.026348564412011628 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7668393782383419, + "acc_stderr": 0.03051611137147601, + "acc_norm": 0.7668393782383419, + "acc_norm_stderr": 0.03051611137147601 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.047028804320496165, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.047028804320496165 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.726605504587156, + "acc_stderr": 0.01910929984609828, + "acc_norm": 0.726605504587156, + "acc_norm_stderr": 0.01910929984609828 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6405228758169934, + "acc_stderr": 0.027475969910660952, + "acc_norm": 0.6405228758169934, + "acc_norm_stderr": 0.027475969910660952 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.625, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.625, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.545751633986928, + "acc_stderr": 0.020142974553795205, + "acc_norm": 0.545751633986928, + "acc_norm_stderr": 0.020142974553795205 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.029462189233370576, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.029462189233370576 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.01442229220480885, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.01442229220480885 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5404411764705882, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.5404411764705882, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.7551020408163265, + "acc_stderr": 0.02752963744017492, + "acc_norm": 0.7551020408163265, + "acc_norm_stderr": 0.02752963744017492 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7932489451476793, + "acc_stderr": 0.026361651668389094, + "acc_norm": 0.7932489451476793, + "acc_norm_stderr": 0.026361651668389094 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4302477183833116, + "acc_stderr": 0.012645361435115226, + "acc_norm": 0.4302477183833116, + "acc_norm_stderr": 0.012645361435115226 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7696078431372549, + "acc_stderr": 0.02955429260569508, + "acc_norm": 0.7696078431372549, + "acc_norm_stderr": 0.02955429260569508 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7636363636363637, + "acc_stderr": 0.03317505930009179, + "acc_norm": 0.7636363636363637, + "acc_norm_stderr": 0.03317505930009179 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3843329253365973, + "mc1_stderr": 0.0170287073012452, + "mc2": 0.5527618960115636, + "mc2_stderr": 0.01569377342067013 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5548996458087367, + "acc_stderr": 0.017086417431005464, + "acc_norm": 0.5737898465171193, + "acc_norm_stderr": 0.01700212260948925 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JY623/KoSOLAR-10.7B-merge-v3.4", + "model_sha": "82e4b39d0ce8f31db2b61c9f79e4e5cc9eb3d74c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JY623/KoSOLAR-v2.0/result_2024-03-28 06:16:15.json b/JY623/KoSOLAR-v2.0/result_2024-03-28 06:16:15.json new file mode 100644 index 0000000000000000000000000000000000000000..b7f65884759b390aa5868c3eeaec3649802b4da7 --- /dev/null +++ b/JY623/KoSOLAR-v2.0/result_2024-03-28 06:16:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6902730375426621, + "acc_stderr": 0.013512058415238363, + "acc_norm": 0.7431740614334471, + "acc_norm_stderr": 0.0127669237941168 + }, + "harness|ko_hellaswag|10": { + "acc": 0.47012547301334395, + "acc_stderr": 0.004980866814462744, + "acc_norm": 0.6261700856403107, + "acc_norm_stderr": 0.0048283050419044024 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7192982456140351, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.7192982456140351, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7522349936143039, + "acc_stderr": 0.015438083080568961, + "acc_norm": 0.7522349936143039, + "acc_norm_stderr": 0.015438083080568961 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5531914893617021, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.5531914893617021, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5602409638554217, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.5602409638554217, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6495176848874598, + "acc_stderr": 0.027098652621301747, + "acc_norm": 0.6495176848874598, + "acc_norm_stderr": 0.027098652621301747 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6143497757847534, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.6143497757847534, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.797979797979798, + "acc_stderr": 0.028606204289229865, + "acc_norm": 0.797979797979798, + "acc_norm_stderr": 0.028606204289229865 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.04878608714466996, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.04878608714466996 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.7100840336134454, + "acc_stderr": 0.0294724858331361, + "acc_norm": 0.7100840336134454, + "acc_norm_stderr": 0.0294724858331361 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6384615384615384, + "acc_stderr": 0.024359581465397018, + "acc_norm": 0.6384615384615384, + "acc_norm_stderr": 0.024359581465397018 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.035158955511656986, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.035158955511656986 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6645161290322581, + "acc_stderr": 0.02686020644472436, + "acc_norm": 0.6645161290322581, + "acc_norm_stderr": 0.02686020644472436 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8247863247863247, + "acc_stderr": 0.02490443909891822, + "acc_norm": 0.8247863247863247, + "acc_norm_stderr": 0.02490443909891822 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5962264150943396, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.5962264150943396, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630886, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630886 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681681, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681681 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.03789401760283646, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.03789401760283646 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.02563425811555496, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.02563425811555496 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5972222222222222, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.5972222222222222, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.79, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.79, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.638728323699422, + "acc_stderr": 0.02586220185227788, + "acc_norm": 0.638728323699422, + "acc_norm_stderr": 0.02586220185227788 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6196319018404908, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.6196319018404908, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.026041766202717163, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.026041766202717163 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7409326424870466, + "acc_stderr": 0.0316187791793541, + "acc_norm": 0.7409326424870466, + "acc_norm_stderr": 0.0316187791793541 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7761467889908257, + "acc_stderr": 0.017871217767790198, + "acc_norm": 0.7761467889908257, + "acc_norm_stderr": 0.017871217767790198 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6633986928104575, + "acc_stderr": 0.027057974624494382, + "acc_norm": 0.6633986928104575, + "acc_norm_stderr": 0.027057974624494382 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.03984979653302872, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.03984979653302872 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6513157894736842, + "acc_stderr": 0.03878139888797611, + "acc_norm": 0.6513157894736842, + "acc_norm_stderr": 0.03878139888797611 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5604575163398693, + "acc_stderr": 0.020079420408087918, + "acc_norm": 0.5604575163398693, + "acc_norm_stderr": 0.020079420408087918 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4432624113475177, + "acc_stderr": 0.029634838473766002, + "acc_norm": 0.4432624113475177, + "acc_norm_stderr": 0.029634838473766002 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977747, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977747 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.43910614525139663, + "acc_stderr": 0.01659802212058043, + "acc_norm": 0.43910614525139663, + "acc_norm_stderr": 0.01659802212058043 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.6580882352941176, + "acc_stderr": 0.028814722422254177, + "acc_norm": 0.6580882352941176, + "acc_norm_stderr": 0.028814722422254177 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6489795918367347, + "acc_stderr": 0.03055531675557364, + "acc_norm": 0.6489795918367347, + "acc_norm_stderr": 0.03055531675557364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7721518987341772, + "acc_stderr": 0.02730348459906943, + "acc_norm": 0.7721518987341772, + "acc_norm_stderr": 0.02730348459906943 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.45632333767926986, + "acc_stderr": 0.01272142050146255, + "acc_norm": 0.45632333767926986, + "acc_norm_stderr": 0.01272142050146255 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6813725490196079, + "acc_stderr": 0.032702871814820816, + "acc_norm": 0.6813725490196079, + "acc_norm_stderr": 0.032702871814820816 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5899632802937577, + "mc1_stderr": 0.01721784471744932, + "mc2": 0.7013763236137057, + "mc2_stderr": 0.01435091264551528 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5466351829988194, + "acc_stderr": 0.017115418225226862, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.016876941165045612 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JY623/KoSOLAR-v2.0", + "model_sha": "81d08d30db75f5d1f3328b77034e59d42246119d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JY623/KoSOLAR-v2.1/result_2024-03-28 06:32:50.json b/JY623/KoSOLAR-v2.1/result_2024-03-28 06:32:50.json new file mode 100644 index 0000000000000000000000000000000000000000..549f2fda925dcc2684fc8d7c2b8fdfe6a2a35f2c --- /dev/null +++ b/JY623/KoSOLAR-v2.1/result_2024-03-28 06:32:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7022184300341296, + "acc_stderr": 0.013363080107244485, + "acc_norm": 0.75, + "acc_norm_stderr": 0.012653835621466646 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5191196972714599, + "acc_stderr": 0.004986131919673963, + "acc_norm": 0.6786496713802032, + "acc_norm_stderr": 0.004660405565338769 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7192982456140351, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.7192982456140351, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7281553398058253, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.7281553398058253, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7381864623243933, + "acc_stderr": 0.01572083867844526, + "acc_norm": 0.7381864623243933, + "acc_norm_stderr": 0.01572083867844526 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5404255319148936, + "acc_stderr": 0.03257901482099834, + "acc_norm": 0.5404255319148936, + "acc_norm_stderr": 0.03257901482099834 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6495176848874598, + "acc_stderr": 0.027098652621301744, + "acc_norm": 0.6495176848874598, + "acc_norm_stderr": 0.027098652621301744 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6681614349775785, + "acc_stderr": 0.031602951437766785, + "acc_norm": 0.6681614349775785, + "acc_norm_stderr": 0.031602951437766785 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.042258754519696386, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.042258754519696386 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7727272727272727, + "acc_stderr": 0.029857515673386417, + "acc_norm": 0.7727272727272727, + "acc_norm_stderr": 0.029857515673386417 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.04122737111370332, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.04122737111370332 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.047551296160629475, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.047551296160629475 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6461538461538462, + "acc_stderr": 0.02424378399406214, + "acc_norm": 0.6461538461538462, + "acc_norm_stderr": 0.02424378399406214 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.03510766597959217, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.03510766597959217 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6580645161290323, + "acc_stderr": 0.026985289576552732, + "acc_norm": 0.6580645161290323, + "acc_norm_stderr": 0.026985289576552732 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8589743589743589, + "acc_stderr": 0.022801382534597528, + "acc_norm": 0.8589743589743589, + "acc_norm_stderr": 0.022801382534597528 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6113207547169811, + "acc_stderr": 0.03000048544867599, + "acc_norm": 0.6113207547169811, + "acc_norm_stderr": 0.03000048544867599 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.0449429086625209, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.0449429086625209 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.029869605095316904, + "acc_norm": 0.4, + "acc_norm_stderr": 0.029869605095316904 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7512437810945274, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.7512437810945274, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.025699352832131792, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.025699352832131792 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6319444444444444, + "acc_stderr": 0.04032999053960719, + "acc_norm": 0.6319444444444444, + "acc_norm_stderr": 0.04032999053960719 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.79, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.79, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.630057803468208, + "acc_stderr": 0.02599247202930639, + "acc_norm": 0.630057803468208, + "acc_norm_stderr": 0.02599247202930639 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.656441717791411, + "acc_stderr": 0.03731133519673893, + "acc_norm": 0.656441717791411, + "acc_norm_stderr": 0.03731133519673893 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.7067901234567902, + "acc_stderr": 0.02532988817190092, + "acc_norm": 0.7067901234567902, + "acc_norm_stderr": 0.02532988817190092 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7668393782383419, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.7668393782383419, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.04702880432049615, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.04702880432049615 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7926605504587156, + "acc_stderr": 0.01738141556360866, + "acc_norm": 0.7926605504587156, + "acc_norm_stderr": 0.01738141556360866 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6601307189542484, + "acc_stderr": 0.027121956071388852, + "acc_norm": 0.6601307189542484, + "acc_norm_stderr": 0.027121956071388852 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7603305785123967, + "acc_stderr": 0.03896878985070416, + "acc_norm": 0.7603305785123967, + "acc_norm_stderr": 0.03896878985070416 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6842105263157895, + "acc_stderr": 0.0378272898086547, + "acc_norm": 0.6842105263157895, + "acc_norm_stderr": 0.0378272898086547 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.6078431372549019, + "acc_stderr": 0.01975172650876263, + "acc_norm": 0.6078431372549019, + "acc_norm_stderr": 0.01975172650876263 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.029658235097666907, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.029658235097666907 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977747, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977747 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.4692737430167598, + "acc_stderr": 0.016690896161944385, + "acc_norm": 0.4692737430167598, + "acc_norm_stderr": 0.016690896161944385 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5808823529411765, + "acc_stderr": 0.02997280717046462, + "acc_norm": 0.5808823529411765, + "acc_norm_stderr": 0.02997280717046462 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.710204081632653, + "acc_stderr": 0.029043088683304345, + "acc_norm": 0.710204081632653, + "acc_norm_stderr": 0.029043088683304345 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7679324894514767, + "acc_stderr": 0.027479744550808503, + "acc_norm": 0.7679324894514767, + "acc_norm_stderr": 0.027479744550808503 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4634941329856584, + "acc_stderr": 0.012736153390214965, + "acc_norm": 0.4634941329856584, + "acc_norm_stderr": 0.012736153390214965 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7107843137254902, + "acc_stderr": 0.031822318676475544, + "acc_norm": 0.7107843137254902, + "acc_norm_stderr": 0.031822318676475544 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6646266829865362, + "mc1_stderr": 0.016527534039668987, + "mc2": 0.7684588284159719, + "mc2_stderr": 0.013616670960296498 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5029515938606848, + "acc_stderr": 0.017190054580194694, + "acc_norm": 0.5218417945690673, + "acc_norm_stderr": 0.017173944474294385 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JY623/KoSOLAR-v2.1", + "model_sha": "8edd3ffafb7d436b458d4a1c1224ef4d87ddb558", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JY623/KoSOLRA-10.7B-merge-v2.2/result_2024-04-04 16:35:30.json b/JY623/KoSOLRA-10.7B-merge-v2.2/result_2024-04-04 16:35:30.json new file mode 100644 index 0000000000000000000000000000000000000000..6e6f82078fd195dae53ac8d5ae5dd8109ac3c999 --- /dev/null +++ b/JY623/KoSOLRA-10.7B-merge-v2.2/result_2024-04-04 16:35:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6988054607508533, + "acc_stderr": 0.013406741767847634, + "acc_norm": 0.7406143344709898, + "acc_norm_stderr": 0.0128082735739271 + }, + "harness|ko_hellaswag|10": { + "acc": 0.49960167297351127, + "acc_stderr": 0.004989779828043845, + "acc_norm": 0.6606253734315873, + "acc_norm_stderr": 0.004725293905228266 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7076023391812866, + "acc_stderr": 0.034886477134579215, + "acc_norm": 0.7076023391812866, + "acc_norm_stderr": 0.034886477134579215 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7203065134099617, + "acc_stderr": 0.016050792148036567, + "acc_norm": 0.7203065134099617, + "acc_norm_stderr": 0.016050792148036567 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5531914893617021, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.5531914893617021, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6430868167202572, + "acc_stderr": 0.027210420375934026, + "acc_norm": 0.6430868167202572, + "acc_norm_stderr": 0.027210420375934026 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6502242152466368, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.6502242152466368, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7727272727272727, + "acc_stderr": 0.029857515673386417, + "acc_norm": 0.7727272727272727, + "acc_norm_stderr": 0.029857515673386417 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.041443118108781526, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.041443118108781526 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006718, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006718 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.634453781512605, + "acc_stderr": 0.031282177063684614, + "acc_norm": 0.634453781512605, + "acc_norm_stderr": 0.031282177063684614 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6384615384615384, + "acc_stderr": 0.024359581465397014, + "acc_norm": 0.6384615384615384, + "acc_norm_stderr": 0.024359581465397014 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301811, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301811 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.632258064516129, + "acc_stderr": 0.027430866579973463, + "acc_norm": 0.632258064516129, + "acc_norm_stderr": 0.027430866579973463 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8547008547008547, + "acc_stderr": 0.02308663508684141, + "acc_norm": 0.8547008547008547, + "acc_norm_stderr": 0.02308663508684141 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6075471698113207, + "acc_stderr": 0.030052580579557845, + "acc_norm": 0.6075471698113207, + "acc_norm_stderr": 0.030052580579557845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3814814814814815, + "acc_stderr": 0.029616718927497593, + "acc_norm": 0.3814814814814815, + "acc_norm_stderr": 0.029616718927497593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.030965903123573037, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.030965903123573037 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.03778621079092056, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.03778621079092056 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.02563425811555496, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.02563425811555496 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5902777777777778, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.5902777777777778, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932264, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932264 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6184971098265896, + "acc_stderr": 0.0261521986197268, + "acc_norm": 0.6184971098265896, + "acc_norm_stderr": 0.0261521986197268 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6012269938650306, + "acc_stderr": 0.03847021420456024, + "acc_norm": 0.6012269938650306, + "acc_norm_stderr": 0.03847021420456024 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6944444444444444, + "acc_stderr": 0.025630824975621344, + "acc_norm": 0.6944444444444444, + "acc_norm_stderr": 0.025630824975621344 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7772020725388601, + "acc_stderr": 0.03003114797764154, + "acc_norm": 0.7772020725388601, + "acc_norm_stderr": 0.03003114797764154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7596330275229358, + "acc_stderr": 0.01832060732096407, + "acc_norm": 0.7596330275229358, + "acc_norm_stderr": 0.01832060732096407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6601307189542484, + "acc_stderr": 0.027121956071388852, + "acc_norm": 0.6601307189542484, + "acc_norm_stderr": 0.027121956071388852 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6513157894736842, + "acc_stderr": 0.03878139888797612, + "acc_norm": 0.6513157894736842, + "acc_norm_stderr": 0.03878139888797612 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.619281045751634, + "acc_stderr": 0.019643801557924806, + "acc_norm": 0.619281045751634, + "acc_norm_stderr": 0.019643801557924806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4397163120567376, + "acc_stderr": 0.02960991207559411, + "acc_norm": 0.4397163120567376, + "acc_norm_stderr": 0.02960991207559411 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.441340782122905, + "acc_stderr": 0.01660702178105088, + "acc_norm": 0.441340782122905, + "acc_norm_stderr": 0.01660702178105088 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5367647058823529, + "acc_stderr": 0.03029061918048569, + "acc_norm": 0.5367647058823529, + "acc_norm_stderr": 0.03029061918048569 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.7020408163265306, + "acc_stderr": 0.029279567411065667, + "acc_norm": 0.7020408163265306, + "acc_norm_stderr": 0.029279567411065667 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7468354430379747, + "acc_stderr": 0.028304657943035296, + "acc_norm": 0.7468354430379747, + "acc_norm_stderr": 0.028304657943035296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4471968709256845, + "acc_stderr": 0.012698825252435118, + "acc_norm": 0.4471968709256845, + "acc_norm_stderr": 0.012698825252435118 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.03283472056108561, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.03283472056108561 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6376988984088128, + "mc1_stderr": 0.016826646897262258, + "mc2": 0.7421128686879495, + "mc2_stderr": 0.014033621620310223 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5501770956316411, + "acc_stderr": 0.017103573343825715, + "acc_norm": 0.5667060212514758, + "acc_norm_stderr": 0.017036683641893098 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JY623/KoSOLRA-10.7B-merge-v2.2", + "model_sha": "a4b741a12d12b0d101c1659421fd78ffc0474cca", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JY623/KoSOLRA-10.7B-merge-v2.3/result_2024-04-05 08:51:42.json b/JY623/KoSOLRA-10.7B-merge-v2.3/result_2024-04-05 08:51:42.json new file mode 100644 index 0000000000000000000000000000000000000000..10734f76a2581264303bef37a48fc2fb419a61ec --- /dev/null +++ b/JY623/KoSOLRA-10.7B-merge-v2.3/result_2024-04-05 08:51:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7056313993174061, + "acc_stderr": 0.013318528460539422, + "acc_norm": 0.7517064846416383, + "acc_norm_stderr": 0.012624912868089762 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5329615614419438, + "acc_stderr": 0.00497892716479289, + "acc_norm": 0.6955785700059749, + "acc_norm_stderr": 0.004592215118295274 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7309941520467836, + "acc_stderr": 0.03401052620104089, + "acc_norm": 0.7309941520467836, + "acc_norm_stderr": 0.03401052620104089 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7669902912621359, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.7669902912621359, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7369093231162197, + "acc_stderr": 0.015745497169049046, + "acc_norm": 0.7369093231162197, + "acc_norm_stderr": 0.015745497169049046 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5404255319148936, + "acc_stderr": 0.03257901482099834, + "acc_norm": 0.5404255319148936, + "acc_norm_stderr": 0.03257901482099834 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6591639871382636, + "acc_stderr": 0.026920841260776155, + "acc_norm": 0.6591639871382636, + "acc_norm_stderr": 0.026920841260776155 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.672645739910314, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.672645739910314, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.042764865428145914, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.042764865428145914 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7626262626262627, + "acc_stderr": 0.030313710538198906, + "acc_norm": 0.7626262626262627, + "acc_norm_stderr": 0.030313710538198906 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5655172413793104, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.5655172413793104, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.047551296160629475, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.047551296160629475 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6554621848739496, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.6554621848739496, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6487179487179487, + "acc_stderr": 0.024203665177902806, + "acc_norm": 0.6487179487179487, + "acc_norm_stderr": 0.024203665177902806 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.03510766597959217, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.03510766597959217 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.635483870967742, + "acc_stderr": 0.02737987122994324, + "acc_norm": 0.635483870967742, + "acc_norm_stderr": 0.02737987122994324 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8589743589743589, + "acc_stderr": 0.02280138253459753, + "acc_norm": 0.8589743589743589, + "acc_norm_stderr": 0.02280138253459753 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6075471698113207, + "acc_stderr": 0.03005258057955785, + "acc_norm": 0.6075471698113207, + "acc_norm_stderr": 0.03005258057955785 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.029773847012532967, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.029773847012532967 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7562189054726368, + "acc_stderr": 0.03036049015401465, + "acc_norm": 0.7562189054726368, + "acc_norm_stderr": 0.03036049015401465 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4576719576719577, + "acc_stderr": 0.02565886886205832, + "acc_norm": 0.4576719576719577, + "acc_norm_stderr": 0.02565886886205832 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6180555555555556, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.6180555555555556, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932264, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932264 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.630057803468208, + "acc_stderr": 0.025992472029306393, + "acc_norm": 0.630057803468208, + "acc_norm_stderr": 0.025992472029306393 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.656441717791411, + "acc_stderr": 0.03731133519673893, + "acc_norm": 0.656441717791411, + "acc_norm_stderr": 0.03731133519673893 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6944444444444444, + "acc_stderr": 0.02563082497562135, + "acc_norm": 0.6944444444444444, + "acc_norm_stderr": 0.02563082497562135 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7512953367875648, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.7512953367875648, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.5, + "acc_stderr": 0.047036043419179864, + "acc_norm": 0.5, + "acc_norm_stderr": 0.047036043419179864 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7834862385321101, + "acc_stderr": 0.01765871059444314, + "acc_norm": 0.7834862385321101, + "acc_norm_stderr": 0.01765871059444314 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.027184498909941616, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.027184498909941616 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7933884297520661, + "acc_stderr": 0.03695980128098824, + "acc_norm": 0.7933884297520661, + "acc_norm_stderr": 0.03695980128098824 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6578947368421053, + "acc_stderr": 0.038607315993160904, + "acc_norm": 0.6578947368421053, + "acc_norm_stderr": 0.038607315993160904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.6143790849673203, + "acc_stderr": 0.01969145905235404, + "acc_norm": 0.6143790849673203, + "acc_norm_stderr": 0.01969145905235404 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.450354609929078, + "acc_stderr": 0.02968010556502904, + "acc_norm": 0.450354609929078, + "acc_norm_stderr": 0.02968010556502904 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.45027932960893857, + "acc_stderr": 0.016639615236845817, + "acc_norm": 0.45027932960893857, + "acc_norm_stderr": 0.016639615236845817 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.030042615832714867, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.030042615832714867 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6857142857142857, + "acc_stderr": 0.029719329422417454, + "acc_norm": 0.6857142857142857, + "acc_norm_stderr": 0.029719329422417454 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.46284224250325945, + "acc_stderr": 0.01273492357953206, + "acc_norm": 0.46284224250325945, + "acc_norm_stderr": 0.01273492357953206 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7107843137254902, + "acc_stderr": 0.031822318676475544, + "acc_norm": 0.7107843137254902, + "acc_norm_stderr": 0.031822318676475544 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6829865361077111, + "mc1_stderr": 0.01628920337440338, + "mc2": 0.7885502219855742, + "mc2_stderr": 0.013364293397046381 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5525383707201889, + "acc_stderr": 0.01709519030150058, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.01701984753597221 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JY623/KoSOLRA-10.7B-merge-v2.3", + "model_sha": "7fca6e88fa262cf88251537d1deff1aab6b37a73", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JYKIM-AI/Mistral-7B-SFT-v0.1/result_2023-11-20 10:30:05.json b/JYKIM-AI/Mistral-7B-SFT-v0.1/result_2023-11-20 10:30:05.json new file mode 100644 index 0000000000000000000000000000000000000000..997dea0055a29b2c29387b316fbcd1a349512a5a --- /dev/null +++ b/JYKIM-AI/Mistral-7B-SFT-v0.1/result_2023-11-20 10:30:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25426621160409557, + "acc_stderr": 0.012724999945157741, + "acc_norm": 0.31143344709897613, + "acc_norm_stderr": 0.013532472099850944 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33100975901214896, + "acc_stderr": 0.00469614833957098, + "acc_norm": 0.40290778729336785, + "acc_norm_stderr": 0.004894801119898609 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.03424042924691583, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.03424042924691583 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260597, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260597 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.32567049808429116, + "acc_stderr": 0.016757989458549675, + "acc_norm": 0.32567049808429116, + "acc_norm_stderr": 0.016757989458549675 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501117, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501117 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553026, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553026 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.026082700695399662, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.026082700695399662 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.0403931497872456, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.0403931497872456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713545, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713545 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727772, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727772 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.03077805742293167, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.03077805742293167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.024433016466052455, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.024433016466052455 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.04414343666854932, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.04414343666854932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18226600985221675, + "acc_stderr": 0.02716334085964515, + "acc_norm": 0.18226600985221675, + "acc_norm_stderr": 0.02716334085964515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.025649381063029258, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.025649381063029258 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.02648035717989569, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.02648035717989569 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275798, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275798 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916718, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916718 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2398843930635838, + "acc_stderr": 0.022989592543123567, + "acc_norm": 0.2398843930635838, + "acc_norm_stderr": 0.022989592543123567 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868055, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868055 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.03410780251836183, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.03410780251836183 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28623853211009176, + "acc_stderr": 0.019379436628919982, + "acc_norm": 0.28623853211009176, + "acc_norm_stderr": 0.019379436628919982 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011744, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.025738854797818737, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.025738854797818737 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.016729937565537534, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.016729937565537534 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307857, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307857 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010088, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010088 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2516297262059974, + "acc_stderr": 0.011083276280441904, + "acc_norm": 0.2516297262059974, + "acc_norm_stderr": 0.011083276280441904 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923393, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923393 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842892, + "mc2": 0.4372879851386349, + "mc2_stderr": 0.015410544751862798 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22904368358913813, + "acc_stderr": 0.01444737227725382, + "acc_norm": 0.2715466351829988, + "acc_norm_stderr": 0.015291071117310378 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JYKIM-AI/Mistral-7B-SFT-v0.1", + "model_sha": "d950efa7c559ee0ef5e785429f1e354386cfedf0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JYKIM-AI/Mistral-7B-SFT/result_2023-11-20 11:05:49.json b/JYKIM-AI/Mistral-7B-SFT/result_2023-11-20 11:05:49.json new file mode 100644 index 0000000000000000000000000000000000000000..cfa92074349f1267ef56b5ea7ac2d165dd0a17e2 --- /dev/null +++ b/JYKIM-AI/Mistral-7B-SFT/result_2023-11-20 11:05:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25426621160409557, + "acc_stderr": 0.012724999945157741, + "acc_norm": 0.31143344709897613, + "acc_norm_stderr": 0.013532472099850944 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3311093407687712, + "acc_stderr": 0.004696505101217403, + "acc_norm": 0.40290778729336785, + "acc_norm_stderr": 0.004894801119898608 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.03424042924691583, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.03424042924691583 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260597, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260597 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.32567049808429116, + "acc_stderr": 0.016757989458549675, + "acc_norm": 0.32567049808429116, + "acc_norm_stderr": 0.016757989458549675 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501117, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501117 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553026, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553026 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.026082700695399662, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.026082700695399662 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.0403931497872456, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.0403931497872456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713545, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713545 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727772, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727772 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.03077805742293167, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.03077805742293167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.024433016466052455, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.024433016466052455 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.04414343666854932, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.04414343666854932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18226600985221675, + "acc_stderr": 0.02716334085964515, + "acc_norm": 0.18226600985221675, + "acc_norm_stderr": 0.02716334085964515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.025649381063029258, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.025649381063029258 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.02648035717989569, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.02648035717989569 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275798, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275798 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916718, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916718 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2398843930635838, + "acc_stderr": 0.022989592543123567, + "acc_norm": 0.2398843930635838, + "acc_norm_stderr": 0.022989592543123567 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868055, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868055 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.03410780251836183, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.03410780251836183 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28623853211009176, + "acc_stderr": 0.019379436628919982, + "acc_norm": 0.28623853211009176, + "acc_norm_stderr": 0.019379436628919982 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011744, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.025738854797818737, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.025738854797818737 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.016729937565537534, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.016729937565537534 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307857, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307857 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010088, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010088 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2516297262059974, + "acc_stderr": 0.011083276280441904, + "acc_norm": 0.2516297262059974, + "acc_norm_stderr": 0.011083276280441904 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923393, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923393 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842892, + "mc2": 0.4372897231981029, + "mc2_stderr": 0.01541061408460767 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22904368358913813, + "acc_stderr": 0.01444737227725382, + "acc_norm": 0.2715466351829988, + "acc_norm_stderr": 0.015291071117310378 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JYKIM-AI/Mistral-7B-SFT", + "model_sha": "ea2e2395fcf295d293c236b0228ece970cca5aba", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Ja3ck/Mistral-instruct-DPO-Y24-v2/result_2023-12-06 23:00:18.json b/Ja3ck/Mistral-instruct-DPO-Y24-v2/result_2023-12-06 23:00:18.json new file mode 100644 index 0000000000000000000000000000000000000000..0e7621bf13fad345ca0f7d6cb23c1ac60a3a950b --- /dev/null +++ b/Ja3ck/Mistral-instruct-DPO-Y24-v2/result_2023-12-06 23:00:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3532423208191126, + "acc_stderr": 0.013967822714840053, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.014422181226303028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38697470623381797, + "acc_stderr": 0.004860623733461132, + "acc_norm": 0.5070703047201752, + "acc_norm_stderr": 0.004989282516055394 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49169859514687103, + "acc_stderr": 0.017877498991072008, + "acc_norm": 0.49169859514687103, + "acc_norm_stderr": 0.017877498991072008 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.028397944907806612, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.028397944907806612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841585, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841585 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.035107665979592154, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.035107665979592154 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02874204090394849, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02874204090394849 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699954, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699954 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.04062990784146667, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.04062990784146667 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.02687408588351835, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.02687408588351835 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594384, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594384 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5, + "acc_stderr": 0.028629916715693413, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028629916715693413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762633, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762633 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997865, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997865 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254187, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254187 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3435462842242503, + "acc_stderr": 0.012128961174190161, + "acc_norm": 0.3435462842242503, + "acc_norm_stderr": 0.012128961174190161 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431855, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431855 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.01574402724825605, + "mc2": 0.46644130422803615, + "mc2_stderr": 0.015463171251968822 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41912632821723733, + "acc_stderr": 0.016963995010862792, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.017090852631668332 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Ja3ck/Mistral-instruct-DPO-Y24-v2", + "model_sha": "15acc5b8edbcbfda168710c8764a4d13fc98fd05", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Ja3ck/Mistral-instruct-IPO-Y24-v1/result_2023-12-11 06:51:39.json b/Ja3ck/Mistral-instruct-IPO-Y24-v1/result_2023-12-11 06:51:39.json new file mode 100644 index 0000000000000000000000000000000000000000..0d8d05bf2d1d75e70fb99d4110b1b93980342812 --- /dev/null +++ b/Ja3ck/Mistral-instruct-IPO-Y24-v1/result_2023-12-11 06:51:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38310580204778155, + "acc_stderr": 0.014206472661672877, + "acc_norm": 0.4232081911262799, + "acc_norm_stderr": 0.014438036220848017 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39454291973710415, + "acc_stderr": 0.004877534215987089, + "acc_norm": 0.5108544114718183, + "acc_norm_stderr": 0.0049886054982739 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.01787994891443167, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.01787994891443167 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232963, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232963 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.02530295889085015, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.02530295889085015 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.02742001935094527, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.02742001935094527 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.0250437573185202, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.0250437573185202 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668773, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668773 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521664, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521664 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.019886221037501862, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.019886221037501862 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.046355501356099754, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.046355501356099754 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.033509916046960436, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.033509916046960436 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2860335195530726, + "acc_stderr": 0.015113972129062136, + "acc_norm": 0.2860335195530726, + "acc_norm_stderr": 0.015113972129062136 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.03181425118197787, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.03181425118197787 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3468057366362451, + "acc_stderr": 0.012156071332318705, + "acc_norm": 0.3468057366362451, + "acc_norm_stderr": 0.012156071332318705 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4121212121212121, + "acc_stderr": 0.03843566993588718, + "acc_norm": 0.4121212121212121, + "acc_norm_stderr": 0.03843566993588718 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.46200402478414904, + "mc2_stderr": 0.015516827306627103 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.39433293978748524, + "acc_stderr": 0.016802090674893203, + "acc_norm": 0.4769775678866588, + "acc_norm_stderr": 0.017172121546727634 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Ja3ck/Mistral-instruct-IPO-Y24-v1", + "model_sha": "322906ac8b7dd81de714569db3848eda97d5d40f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Ja3ck/Mistral-instruct-Y24-DPO/result_2023-11-28 01:13:47.json b/Ja3ck/Mistral-instruct-Y24-DPO/result_2023-11-28 01:13:47.json new file mode 100644 index 0000000000000000000000000000000000000000..b27a0dcc42c575ae97e04a4ad7d1d9d71433a936 --- /dev/null +++ b/Ja3ck/Mistral-instruct-Y24-DPO/result_2023-11-28 01:13:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3873720136518771, + "acc_stderr": 0.01423587248790987, + "acc_norm": 0.4274744027303754, + "acc_norm_stderr": 0.014456862944650647 + }, + "harness|ko_hellaswag|10": { + "acc": 0.392850029874527, + "acc_stderr": 0.0048738583238407945, + "acc_norm": 0.5120493925512846, + "acc_norm_stderr": 0.004988332289642083 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219295, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219295 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5057471264367817, + "acc_stderr": 0.017878782326129224, + "acc_norm": 0.5057471264367817, + "acc_norm_stderr": 0.017878782326129224 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108102, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.028422687404312107, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.028422687404312107 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.028447965476231022, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.028447965476231022 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.034288678487786564, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.034288678487786564 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137595, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137595 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489358, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489358 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138296, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138296 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521664, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521664 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061177, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.028893955412115882, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.028893955412115882 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875192, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875192 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.033622774366080424, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.033622774366080424 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.015005762446786173, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.015005762446786173 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585895, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585895 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4121212121212121, + "acc_stderr": 0.03843566993588718, + "acc_norm": 0.4121212121212121, + "acc_norm_stderr": 0.03843566993588718 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.015945068581236614, + "mc2": 0.4623692353701492, + "mc2_stderr": 0.015502597273810991 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3860684769775679, + "acc_stderr": 0.016738130760321743, + "acc_norm": 0.4722550177095632, + "acc_norm_stderr": 0.017163867979456012 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Ja3ck/Mistral-instruct-Y24-DPO", + "model_sha": "5cadddfbeeac1dd2be25ea036d8623968e987f3c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Ja3ck/Mistral-instruct-Y24-v5/result_2023-11-24 03:55:25.json b/Ja3ck/Mistral-instruct-Y24-v5/result_2023-11-24 03:55:25.json new file mode 100644 index 0000000000000000000000000000000000000000..2a53bc03e4bd24718b7022542c684c3bacdba510 --- /dev/null +++ b/Ja3ck/Mistral-instruct-Y24-v5/result_2023-11-24 03:55:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.01421244498065189, + "acc_norm": 0.4274744027303754, + "acc_norm_stderr": 0.014456862944650647 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3906592312288389, + "acc_stderr": 0.004869010152280755, + "acc_norm": 0.5073690499900418, + "acc_norm_stderr": 0.004989239462835229 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5044699872286079, + "acc_stderr": 0.01787924897058436, + "acc_norm": 0.5044699872286079, + "acc_norm_stderr": 0.01787924897058436 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840678, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840678 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.02530295889085015, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.02530295889085015 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.028422687404312107, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.028422687404312107 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137595, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137595 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400477, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400477 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.021414757058175502, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.021414757058175502 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.01991037746310594, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.01991037746310594 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.028838921471251458, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.028838921471251458 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.046355501356099754, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.046355501356099754 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160834, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160834 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.015024083883322884, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.015024083883322884 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329387, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329387 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.0318421386668758, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.0318421386668758 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3468057366362451, + "acc_stderr": 0.012156071332318705, + "acc_norm": 0.3468057366362451, + "acc_norm_stderr": 0.012156071332318705 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024637, + "mc2": 0.44256276494088104, + "mc2_stderr": 0.015431425162220794 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38488783943329397, + "acc_stderr": 0.016728579701498665, + "acc_norm": 0.4722550177095632, + "acc_norm_stderr": 0.01716386797945601 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Ja3ck/Mistral-instruct-Y24-v5", + "model_sha": "5d268f9f5c87c414661e40ffc464ae5686964586", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Ja3ck/Mistral-instruct-Y24-v6/result_2023-12-04 07:37:56.json b/Ja3ck/Mistral-instruct-Y24-v6/result_2023-12-04 07:37:56.json new file mode 100644 index 0000000000000000000000000000000000000000..99f666f940532ae19a5467126febc9baa32bcf98 --- /dev/null +++ b/Ja3ck/Mistral-instruct-Y24-v6/result_2023-12-04 07:37:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34726962457337884, + "acc_stderr": 0.013913034529620442, + "acc_norm": 0.39505119453924914, + "acc_norm_stderr": 0.014285898292938175 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38259310894244175, + "acc_stderr": 0.0048502689869033494, + "acc_norm": 0.49412467635929097, + "acc_norm_stderr": 0.004989436910754223 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370608, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370608 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458935, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49808429118773945, + "acc_stderr": 0.017879832259026677, + "acc_norm": 0.49808429118773945, + "acc_norm_stderr": 0.017879832259026677 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929777, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.025158266016868554, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.025158266016868554 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02874204090394849, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02874204090394849 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119994, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119994 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699954, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699954 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.02684298551961537, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.02684298551961537 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.027786800931427453, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.027786800931427453 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4935779816513762, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.4935779816513762, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.042059539338841226, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.042059539338841226 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.01978046595477752, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.01978046595477752 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.033723432716530624, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.033723432716530624 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476787, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476787 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254187, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254187 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.0318421386668758, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.0318421386668758 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214941, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214941 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237272, + "mc2": 0.4278199183026475, + "mc2_stderr": 0.015157555430007909 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42739079102715466, + "acc_stderr": 0.017008129844823156, + "acc_norm": 0.45218417945690675, + "acc_norm_stderr": 0.017111567130916785 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Ja3ck/Mistral-instruct-Y24-v6", + "model_sha": "8bb579459c344a1ea4abdc76b52532717ae456ce", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Ja3ck/llama-2-13b-DPO-Y24-v2/result_2023-11-29 06:49:31.json b/Ja3ck/llama-2-13b-DPO-Y24-v2/result_2023-11-29 06:49:31.json new file mode 100644 index 0000000000000000000000000000000000000000..5b39427551fb7d36ba9a4080f8e59e18e6431560 --- /dev/null +++ b/Ja3ck/llama-2-13b-DPO-Y24-v2/result_2023-11-29 06:49:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32764505119453924, + "acc_stderr": 0.013715847940719348, + "acc_norm": 0.3822525597269625, + "acc_norm_stderr": 0.014200454049979288 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36427006572395937, + "acc_stderr": 0.004802413919932662, + "acc_norm": 0.4647480581557459, + "acc_norm_stderr": 0.00497736436479559 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197598, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197598 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36554621848739494, + "acc_stderr": 0.03128217706368461, + "acc_norm": 0.36554621848739494, + "acc_norm_stderr": 0.03128217706368461 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051621, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051621 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.030052580579557835, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.030052580579557835 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184407, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184407 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.035333892347392454, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.035333892347392454 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.035995863012470784, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.035995863012470784 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.023135287974325628, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.023135287974325628 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.026680134761679214, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.026680134761679214 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3950617283950617, + "acc_stderr": 0.027201117666925657, + "acc_norm": 0.3950617283950617, + "acc_norm_stderr": 0.027201117666925657 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557673, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579858, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579858 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42385321100917434, + "acc_stderr": 0.021187263209087526, + "acc_norm": 0.42385321100917434, + "acc_norm_stderr": 0.021187263209087526 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423545, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423545 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874142, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874142 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.019184639328092487, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.019184639328092487 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02988691054762697, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02988691054762697 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398866, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398866 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030802, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030802 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45147679324894513, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.45147679324894513, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2940026075619296, + "acc_stderr": 0.011636062953698609, + "acc_norm": 0.2940026075619296, + "acc_norm_stderr": 0.011636062953698609 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833343, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833343 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398395, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398395 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715004, + "mc2": 0.41094521391654454, + "mc2_stderr": 0.015180355971022358 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42857142857142855, + "acc_stderr": 0.01701403811929748, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.017189767032130817 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Ja3ck/llama-2-13b-DPO-Y24-v2", + "model_sha": "2b9ef358d9d1cfdb22de7d0d865782bfd45f8ab9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Ja3ck/llama-2-13b-instruct-Y24-v1/result_2023-11-29 06:12:47.json b/Ja3ck/llama-2-13b-instruct-Y24-v1/result_2023-11-29 06:12:47.json new file mode 100644 index 0000000000000000000000000000000000000000..1a2faf4ad55ffe14c87dbe212ecb75a30c4ed54e --- /dev/null +++ b/Ja3ck/llama-2-13b-instruct-Y24-v1/result_2023-11-29 06:12:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3267918088737201, + "acc_stderr": 0.013706665975587336, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668526 + }, + "harness|ko_hellaswag|10": { + "acc": 0.364070902210715, + "acc_stderr": 0.0048018528813297484, + "acc_norm": 0.46415056761601275, + "acc_norm_stderr": 0.004976939333240076 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4661558109833972, + "acc_stderr": 0.0178389560091368, + "acc_norm": 0.4661558109833972, + "acc_norm_stderr": 0.0178389560091368 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197598, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197598 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36554621848739494, + "acc_stderr": 0.03128217706368461, + "acc_norm": 0.36554621848739494, + "acc_norm_stderr": 0.03128217706368461 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776296, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776296 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184407, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184407 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.035995863012470784, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.035995863012470784 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.026680134761679214, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.026680134761679214 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3950617283950617, + "acc_stderr": 0.027201117666925657, + "acc_norm": 0.3950617283950617, + "acc_norm_stderr": 0.027201117666925657 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557673, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579858, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579858 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42201834862385323, + "acc_stderr": 0.02117499140776317, + "acc_norm": 0.42201834862385323, + "acc_norm_stderr": 0.02117499140776317 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423545, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423545 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.019184639328092487, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.019184639328092487 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.02718712701150381, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.02718712701150381 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02988691054762697, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02988691054762697 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398866, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398866 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030802, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030802 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45147679324894513, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.45147679324894513, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29335071707953064, + "acc_stderr": 0.011628520449582075, + "acc_norm": 0.29335071707953064, + "acc_norm_stderr": 0.011628520449582075 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833343, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833343 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398395, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398395 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.408852370253922, + "mc2_stderr": 0.015158662984848508 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43211334120425027, + "acc_stderr": 0.017031170198851746, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.017189767032130817 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Ja3ck/llama-2-13b-instruct-Y24-v1", + "model_sha": "edce003ff6a63c6a225564d7763a89ade6eaa15d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Ja3ck/llama-2-13b-instruct-Y24-v2/result_2023-11-29 06:29:40.json b/Ja3ck/llama-2-13b-instruct-Y24-v2/result_2023-11-29 06:29:40.json new file mode 100644 index 0000000000000000000000000000000000000000..216d3fce1174d94b55c61a6d80106afdda1c40cb --- /dev/null +++ b/Ja3ck/llama-2-13b-instruct-Y24-v2/result_2023-11-29 06:29:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32337883959044367, + "acc_stderr": 0.01366942163001212, + "acc_norm": 0.37542662116040953, + "acc_norm_stderr": 0.01415063143511173 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3632742481577375, + "acc_stderr": 0.00479959984039737, + "acc_norm": 0.4640509858593906, + "acc_norm_stderr": 0.0049768677965835615 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468547, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468547 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353228, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353228 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42765273311897106, + "acc_stderr": 0.028099240775809567, + "acc_norm": 0.42765273311897106, + "acc_norm_stderr": 0.028099240775809567 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36134453781512604, + "acc_stderr": 0.031204691225150013, + "acc_norm": 0.36134453781512604, + "acc_norm_stderr": 0.031204691225150013 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.382051282051282, + "acc_stderr": 0.024635549163908223, + "acc_norm": 0.382051282051282, + "acc_norm_stderr": 0.024635549163908223 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.0302422338008545, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.0302422338008545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340496, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.035149425512674394, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.035149425512674394 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02351729433596329, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02351729433596329 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.026589231142174267, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.026589231142174267 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456024, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456024 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.027339546640662734, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.027339546640662734 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579858, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579858 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43853211009174314, + "acc_stderr": 0.021274713073954562, + "acc_norm": 0.43853211009174314, + "acc_norm_stderr": 0.021274713073954562 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.018975427920507215, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.018975427920507215 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347019, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347019 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.029346665094372937, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.029346665094372937 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.02850145286039659, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.02850145286039659 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3346938775510204, + "acc_stderr": 0.030209235226242307, + "acc_norm": 0.3346938775510204, + "acc_norm_stderr": 0.030209235226242307 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4050632911392405, + "acc_stderr": 0.03195514741370673, + "acc_norm": 0.4050632911392405, + "acc_norm_stderr": 0.03195514741370673 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2926988265971317, + "acc_stderr": 0.011620949195849523, + "acc_norm": 0.2926988265971317, + "acc_norm_stderr": 0.011620949195849523 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.037937131711656344, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.037937131711656344 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087305, + "mc2": 0.43028202661607795, + "mc2_stderr": 0.015397184787151977 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4639905548996458, + "acc_stderr": 0.017145715365486664, + "acc_norm": 0.5501770956316411, + "acc_norm_stderr": 0.01710357334382571 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Ja3ck/llama-2-13b-instruct-Y24-v2", + "model_sha": "8cb60efc316f104368a65f4b1b68fc52af84f546", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jaehyeon222/M-SOLAR-10.7B-v1.0-DPO/result_2024-01-05 04:59:11.json b/Jaehyeon222/M-SOLAR-10.7B-v1.0-DPO/result_2024-01-05 04:59:11.json new file mode 100644 index 0000000000000000000000000000000000000000..989bc6a5734a64382f1f0eef7781a96ece612a78 --- /dev/null +++ b/Jaehyeon222/M-SOLAR-10.7B-v1.0-DPO/result_2024-01-05 04:59:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.47440273037542663, + "acc_stderr": 0.014592230885298964, + "acc_norm": 0.5119453924914675, + "acc_norm_stderr": 0.014607220340597167 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45180242979486157, + "acc_stderr": 0.004966544724452223, + "acc_norm": 0.6199960167297351, + "acc_norm_stderr": 0.004843954338451447 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.03722965741385539, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.03722965741385539 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.04541609446503948, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.04541609446503948 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6564495530012772, + "acc_stderr": 0.016982145632652473, + "acc_norm": 0.6564495530012772, + "acc_norm_stderr": 0.016982145632652473 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5106382978723404, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.5106382978723404, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6077170418006431, + "acc_stderr": 0.02773125864701199, + "acc_norm": 0.6077170418006431, + "acc_norm_stderr": 0.02773125864701199 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5695067264573991, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.5695067264573991, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465918, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465918 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107224, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107224 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.031041941304059288, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.031041941304059288 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5666666666666667, + "acc_stderr": 0.0251246535258851, + "acc_norm": 0.5666666666666667, + "acc_norm_stderr": 0.0251246535258851 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5903225806451613, + "acc_stderr": 0.027976054915347368, + "acc_norm": 0.5903225806451613, + "acc_norm_stderr": 0.027976054915347368 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.026453508054040342, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.026453508054040342 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5509433962264151, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.5509433962264151, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251976, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251976 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.4304635761589404, + "acc_stderr": 0.04042809961395634, + "acc_norm": 0.4304635761589404, + "acc_norm_stderr": 0.04042809961395634 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.03809342081273956, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.03809342081273956 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02568056464005688, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02568056464005688 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6127167630057804, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.6127167630057804, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6141975308641975, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.6141975308641975, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7357512953367875, + "acc_stderr": 0.03182155050916648, + "acc_norm": 0.7357512953367875, + "acc_norm_stderr": 0.03182155050916648 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.691743119266055, + "acc_stderr": 0.019798366698367254, + "acc_norm": 0.691743119266055, + "acc_norm_stderr": 0.019798366698367254 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.0439025926537756, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.0439025926537756 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.028358956313423545, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.028358956313423545 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.020219083895133924, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.020219083895133924 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.02927553215970473, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.02927553215970473 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.014102223623152594, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.014102223623152594 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5220588235294118, + "acc_stderr": 0.03034326422421352, + "acc_norm": 0.5220588235294118, + "acc_norm_stderr": 0.03034326422421352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5877551020408164, + "acc_stderr": 0.03151236044674269, + "acc_norm": 0.5877551020408164, + "acc_norm_stderr": 0.03151236044674269 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7721518987341772, + "acc_stderr": 0.02730348459906941, + "acc_norm": 0.7721518987341772, + "acc_norm_stderr": 0.02730348459906941 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.408735332464146, + "acc_stderr": 0.01255570134670339, + "acc_norm": 0.408735332464146, + "acc_norm_stderr": 0.01255570134670339 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.03354092437591519, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.03354092437591519 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.036462049632538136, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.036462049632538136 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35495716034271724, + "mc1_stderr": 0.016750862381375905, + "mc2": 0.5241101036055598, + "mc2_stderr": 0.015594380871291398 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5537190082644629, + "acc_stderr": 0.017090852631668332, + "acc_norm": 0.6044864226682408, + "acc_norm_stderr": 0.01681081590220604 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jaehyeon222/M-SOLAR-10.7B-v1.0-DPO", + "model_sha": "ab271d7e826dec3cbca538446d2c4f61af6fa30f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jaehyeon222/ME-MOE-7Bx2_test/result_2024-01-29 05:19:04.json b/Jaehyeon222/ME-MOE-7Bx2_test/result_2024-01-29 05:19:04.json new file mode 100644 index 0000000000000000000000000000000000000000..d845cb519861301afcca26486ea634081e441e7e --- /dev/null +++ b/Jaehyeon222/ME-MOE-7Bx2_test/result_2024-01-29 05:19:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41723549488054607, + "acc_stderr": 0.014409825518403077, + "acc_norm": 0.47696245733788395, + "acc_norm_stderr": 0.014595873205358278 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4078868751244772, + "acc_stderr": 0.004904375631128866, + "acc_norm": 0.5408285202150966, + "acc_norm_stderr": 0.004973117975062487 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5363984674329502, + "acc_stderr": 0.01783252407959326, + "acc_norm": 0.5363984674329502, + "acc_norm_stderr": 0.01783252407959326 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033582, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033582 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685516, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685516 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03427308652999936, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03427308652999936 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5840336134453782, + "acc_stderr": 0.03201650100739611, + "acc_norm": 0.5840336134453782, + "acc_norm_stderr": 0.03201650100739611 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.02535100632816969, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02535100632816969 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.035107665979592154, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.035107665979592154 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5387096774193548, + "acc_stderr": 0.02835863485983695, + "acc_norm": 0.5387096774193548, + "acc_norm_stderr": 0.02835863485983695 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.029318203645206858, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.029318203645206858 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206174, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5246913580246914, + "acc_stderr": 0.027786800931427443, + "acc_norm": 0.5246913580246914, + "acc_norm_stderr": 0.027786800931427443 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5651376146788991, + "acc_stderr": 0.021254631465609283, + "acc_norm": 0.5651376146788991, + "acc_norm_stderr": 0.021254631465609283 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685741, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685741 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596147, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687765, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687765 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6, + "acc_stderr": 0.03136250240935894, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03136250240935894 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6877637130801688, + "acc_stderr": 0.03016513786784701, + "acc_norm": 0.6877637130801688, + "acc_norm_stderr": 0.03016513786784701 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3624511082138201, + "acc_stderr": 0.01227751253325248, + "acc_norm": 0.3624511082138201, + "acc_norm_stderr": 0.01227751253325248 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3390452876376989, + "mc1_stderr": 0.016571797910626632, + "mc2": 0.5072999736915416, + "mc2_stderr": 0.015639239247920277 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42384887839433294, + "acc_stderr": 0.01698981083462826, + "acc_norm": 0.44037780401416765, + "acc_norm_stderr": 0.01706769977431298 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jaehyeon222/ME-MOE-7Bx2_test", + "model_sha": "e12b0107110febe4f1e6a555b0637743763d5161", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jaewoo1/Foundation_Platypus_data/result_2023-10-18 09:16:14.json b/Jaewoo1/Foundation_Platypus_data/result_2023-10-18 09:16:14.json new file mode 100644 index 0000000000000000000000000000000000000000..6e3729e8a31e7e93ea1f229eae47620244c415e0 --- /dev/null +++ b/Jaewoo1/Foundation_Platypus_data/result_2023-10-18 09:16:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30204778156996587, + "acc_stderr": 0.013417519144716417, + "acc_norm": 0.3174061433447099, + "acc_norm_stderr": 0.01360223908803817 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3450507866958773, + "acc_stderr": 0.004744132825391515, + "acc_norm": 0.41196972714598684, + "acc_norm_stderr": 0.00491183773058221 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041692, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041692 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39719029374201786, + "acc_stderr": 0.017497905037159377, + "acc_norm": 0.39719029374201786, + "acc_norm_stderr": 0.017497905037159377 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534422, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534422 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511114, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511114 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.37942122186495175, + "acc_stderr": 0.027559949802347817, + "acc_norm": 0.37942122186495175, + "acc_norm_stderr": 0.027559949802347817 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459156, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459156 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3282828282828283, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.3282828282828283, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419034, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419034 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179964, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179964 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3384615384615385, + "acc_stderr": 0.023991500500313036, + "acc_norm": 0.3384615384615385, + "acc_norm_stderr": 0.023991500500313036 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3419354838709677, + "acc_stderr": 0.026985289576552732, + "acc_norm": 0.3419354838709677, + "acc_norm_stderr": 0.026985289576552732 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5341880341880342, + "acc_stderr": 0.03267942734081228, + "acc_norm": 0.5341880341880342, + "acc_norm_stderr": 0.03267942734081228 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33962264150943394, + "acc_stderr": 0.029146904747798342, + "acc_norm": 0.33962264150943394, + "acc_norm_stderr": 0.029146904747798342 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652457, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652457 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4129353233830846, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.4129353233830846, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463087, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463087 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.037161774375660164, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.037161774375660164 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.026296227915613663, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.026296227915613663 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3487654320987654, + "acc_stderr": 0.02651759772446501, + "acc_norm": 0.3487654320987654, + "acc_norm_stderr": 0.02651759772446501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.037124548537213684, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.037124548537213684 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3522935779816514, + "acc_stderr": 0.020480568843999004, + "acc_norm": 0.3522935779816514, + "acc_norm_stderr": 0.020480568843999004 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.045604560863872365, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.045604560863872365 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32189542483660133, + "acc_stderr": 0.01890101532209309, + "acc_norm": 0.32189542483660133, + "acc_norm_stderr": 0.01890101532209309 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101373, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101373 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952688, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952688 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.01424263007057489, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.01424263007057489 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.26838235294117646, + "acc_stderr": 0.026917481224377246, + "acc_norm": 0.26838235294117646, + "acc_norm_stderr": 0.026917481224377246 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.31020408163265306, + "acc_stderr": 0.029613459872484375, + "acc_norm": 0.31020408163265306, + "acc_norm_stderr": 0.029613459872484375 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5274261603375527, + "acc_stderr": 0.03249822718301303, + "acc_norm": 0.5274261603375527, + "acc_norm_stderr": 0.03249822718301303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.303129074315515, + "acc_stderr": 0.0117386699512543, + "acc_norm": 0.303129074315515, + "acc_norm_stderr": 0.0117386699512543 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.038049136539710114, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.038049136539710114 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006509, + "mc2": 0.4249328187172098, + "mc2_stderr": 0.016337088601279814 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2550177095631641, + "acc_stderr": 0.014985559533428554, + "acc_norm": 0.30932703659976385, + "acc_norm_stderr": 0.015891320505520893 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jaewoo1/Foundation_Platypus_data", + "model_sha": "63fbecee8df6cc694880299e37b7cd8f8140942e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jaewoo1/KoT-Platypus2_foundation/result_2023-10-16 07:12:51.json b/Jaewoo1/KoT-Platypus2_foundation/result_2023-10-16 07:12:51.json new file mode 100644 index 0000000000000000000000000000000000000000..7de61f7a15a36f9e0fa9621a6b744ee5b65345a1 --- /dev/null +++ b/Jaewoo1/KoT-Platypus2_foundation/result_2023-10-16 07:12:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27047781569965873, + "acc_stderr": 0.012980954547659556, + "acc_norm": 0.3319112627986348, + "acc_norm_stderr": 0.013760988200880541 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3505277833100976, + "acc_stderr": 0.004761601303258889, + "acc_norm": 0.44722166899024096, + "acc_norm_stderr": 0.0049619049491713965 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.03599335771456027, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.03599335771456027 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161549, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161549 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.351213282247765, + "acc_stderr": 0.01706998205149943, + "acc_norm": 0.351213282247765, + "acc_norm_stderr": 0.01706998205149943 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.031489558297455304, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.031489558297455304 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31511254019292606, + "acc_stderr": 0.026385273703464496, + "acc_norm": 0.31511254019292606, + "acc_norm_stderr": 0.026385273703464496 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.040103589424622034, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.040103589424622034 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732524, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732524 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03960933549451207, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03960933549451207 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.27310924369747897, + "acc_stderr": 0.028942004040998167, + "acc_norm": 0.27310924369747897, + "acc_norm_stderr": 0.028942004040998167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.02255655101013235, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.02255655101013235 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733552, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733552 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042767, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042767 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4658119658119658, + "acc_stderr": 0.03267942734081228, + "acc_norm": 0.4658119658119658, + "acc_norm_stderr": 0.03267942734081228 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3320754716981132, + "acc_stderr": 0.028985455652334395, + "acc_norm": 0.3320754716981132, + "acc_norm_stderr": 0.028985455652334395 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.025644108639267645, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.025644108639267645 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03333333333333336, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03333333333333336 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594295, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918428, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918428 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624576, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624576 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.02440517393578323, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.02440517393578323 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.31790123456790126, + "acc_stderr": 0.025910063528240865, + "acc_norm": 0.31790123456790126, + "acc_norm_stderr": 0.025910063528240865 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3155963302752294, + "acc_stderr": 0.019926117513869666, + "acc_norm": 0.3155963302752294, + "acc_norm_stderr": 0.019926117513869666 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.026256053835718964, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.026256053835718964 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.035834961763610625, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.035834961763610625 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.01766784161237899, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.01766784161237899 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.025518731049537773, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.025518731049537773 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.044939490686135404, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.044939490686135404 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.028765111718046972, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.028765111718046972 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22793296089385476, + "acc_stderr": 0.014030149950805095, + "acc_norm": 0.22793296089385476, + "acc_norm_stderr": 0.014030149950805095 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682486, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19183673469387755, + "acc_stderr": 0.025206963154225395, + "acc_norm": 0.19183673469387755, + "acc_norm_stderr": 0.025206963154225395 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3037974683544304, + "acc_stderr": 0.0299366963871386, + "acc_norm": 0.3037974683544304, + "acc_norm_stderr": 0.0299366963871386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2666232073011734, + "acc_stderr": 0.01129383603161213, + "acc_norm": 0.2666232073011734, + "acc_norm_stderr": 0.01129383603161213 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.03283472056108567, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.03283472056108567 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4297360873033464, + "mc2_stderr": 0.016304548005749996 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.23258559622195984, + "acc_stderr": 0.014525169182416493, + "acc_norm": 0.27508854781582054, + "acc_norm_stderr": 0.015353010757952649 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jaewoo1/KoT-Platypus2_foundation", + "model_sha": "7e97a65b825f9aa4691fe2bebf14696d80ba831d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jaewoo1/Llama2-7B-Blend-3rd-dup-Active-LoRA/result_2023-10-04 03:17:08.json b/Jaewoo1/Llama2-7B-Blend-3rd-dup-Active-LoRA/result_2023-10-04 03:17:08.json new file mode 100644 index 0000000000000000000000000000000000000000..113f7460903568d5c1a03b126f52055cdbab6d56 --- /dev/null +++ b/Jaewoo1/Llama2-7B-Blend-3rd-dup-Active-LoRA/result_2023-10-04 03:17:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.257679180887372, + "acc_stderr": 0.0127807705627684, + "acc_norm": 0.3003412969283277, + "acc_norm_stderr": 0.01339590930995701 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3263294164509062, + "acc_stderr": 0.004679111783653908, + "acc_norm": 0.385381398127863, + "acc_norm_stderr": 0.00485690647371939 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3959131545338442, + "acc_stderr": 0.01748824700697927, + "acc_norm": 0.3959131545338442, + "acc_norm_stderr": 0.01748824700697927 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.030251237579213167, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.030251237579213167 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3729903536977492, + "acc_stderr": 0.0274666102131401, + "acc_norm": 0.3729903536977492, + "acc_norm_stderr": 0.0274666102131401 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168264, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3319327731092437, + "acc_stderr": 0.030588697013783667, + "acc_norm": 0.3319327731092437, + "acc_norm_stderr": 0.030588697013783667 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3282051282051282, + "acc_stderr": 0.023807633198657262, + "acc_norm": 0.3282051282051282, + "acc_norm_stderr": 0.023807633198657262 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3387096774193548, + "acc_stderr": 0.02692344605930284, + "acc_norm": 0.3387096774193548, + "acc_norm_stderr": 0.02692344605930284 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5470085470085471, + "acc_stderr": 0.0326109987309862, + "acc_norm": 0.5470085470085471, + "acc_norm_stderr": 0.0326109987309862 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.029445175328199593, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.029445175328199593 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340496, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.44776119402985076, + "acc_stderr": 0.03516184772952166, + "acc_norm": 0.44776119402985076, + "acc_norm_stderr": 0.03516184772952166 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267437, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267437 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.025992472029306386, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.025992472029306386 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3395061728395062, + "acc_stderr": 0.026348564412011624, + "acc_norm": 0.3395061728395062, + "acc_norm_stderr": 0.026348564412011624 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.034998072761933396, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.034998072761933396 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3339449541284404, + "acc_stderr": 0.020220554196736403, + "acc_norm": 0.3339449541284404, + "acc_norm_stderr": 0.020220554196736403 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.02742047766262925, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.02742047766262925 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.512396694214876, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351587, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351587 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29248366013071897, + "acc_stderr": 0.018403415710109797, + "acc_norm": 0.29248366013071897, + "acc_norm_stderr": 0.018403415710109797 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307857, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307857 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289784, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289784 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.26838235294117646, + "acc_stderr": 0.026917481224377243, + "acc_norm": 0.26838235294117646, + "acc_norm_stderr": 0.026917481224377243 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3469387755102041, + "acc_stderr": 0.030472526026726492, + "acc_norm": 0.3469387755102041, + "acc_norm_stderr": 0.030472526026726492 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3670886075949367, + "acc_stderr": 0.03137624072561618, + "acc_norm": 0.3670886075949367, + "acc_norm_stderr": 0.03137624072561618 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28552803129074317, + "acc_stderr": 0.011535751586665673, + "acc_norm": 0.28552803129074317, + "acc_norm_stderr": 0.011535751586665673 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476196, + "mc2": 0.41968593595047643, + "mc2_stderr": 0.016254999867947123 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2384887839433294, + "acc_stderr": 0.014651663985271578, + "acc_norm": 0.29279811097992914, + "acc_norm_stderr": 0.015644823205401334 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jaewoo1/Llama2-7B-Blend-3rd-dup-Active-LoRA", + "model_sha": "cbb72323bf2db6eb9ea591a4a882d02964d53eed", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jaewoo1/Llama2-7B-ShareGPT-Wiki_noprompt-News_noprompt-CoT-blending-circulus/result_2023-10-04 09:05:17.json b/Jaewoo1/Llama2-7B-ShareGPT-Wiki_noprompt-News_noprompt-CoT-blending-circulus/result_2023-10-04 09:05:17.json new file mode 100644 index 0000000000000000000000000000000000000000..507d14683459004ae3b0f17d264e96971a437be9 --- /dev/null +++ b/Jaewoo1/Llama2-7B-ShareGPT-Wiki_noprompt-News_noprompt-CoT-blending-circulus/result_2023-10-04 09:05:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29692832764505117, + "acc_stderr": 0.013352025976725222, + "acc_norm": 0.34812286689419797, + "acc_norm_stderr": 0.013921008595179342 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35311690898227444, + "acc_stderr": 0.004769618829196517, + "acc_norm": 0.42939653455486954, + "acc_norm_stderr": 0.0049397843114489855 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.42718446601941745, + "acc_stderr": 0.04897957737781169, + "acc_norm": 0.42718446601941745, + "acc_norm_stderr": 0.04897957737781169 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41507024265644954, + "acc_stderr": 0.017620137003655265, + "acc_norm": 0.41507024265644954, + "acc_norm_stderr": 0.017620137003655265 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.43434343434343436, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.43434343434343436, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149351, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149351 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3739495798319328, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.3739495798319328, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.024121125416941173, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.024121125416941173 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3774193548387097, + "acc_stderr": 0.02757596072327824, + "acc_norm": 0.3774193548387097, + "acc_norm_stderr": 0.02757596072327824 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5726495726495726, + "acc_stderr": 0.03240847393516326, + "acc_norm": 0.5726495726495726, + "acc_norm_stderr": 0.03240847393516326 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37358490566037733, + "acc_stderr": 0.02977308271331988, + "acc_norm": 0.37358490566037733, + "acc_norm_stderr": 0.02977308271331988 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.02763490726417854, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.02763490726417854 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4925373134328358, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.4925373134328358, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736412, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548914, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548914 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.027402042040269955, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.027402042040269955 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3834862385321101, + "acc_stderr": 0.020847156641915984, + "acc_norm": 0.3834862385321101, + "acc_norm_stderr": 0.020847156641915984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.027780141207023337, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.027780141207023337 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775089, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775089 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.038035102483515854, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.038035102483515854 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.01899970738316267, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.01899970738316267 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101362, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101362 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03246887243637648, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03246887243637648 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260657, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260657 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335314, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4219409282700422, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.4219409282700422, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271817, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271817 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353383, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353383 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.01607750926613303, + "mc2": 0.4750714543386988, + "mc2_stderr": 0.016159472828434183 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2727272727272727, + "acc_stderr": 0.015311853110300352, + "acc_norm": 0.34946871310507677, + "acc_norm_stderr": 0.01639279708576985 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jaewoo1/Llama2-7B-ShareGPT-Wiki_noprompt-News_noprompt-CoT-blending-circulus", + "model_sha": "1c97acb58f2a740d7994d1ea7b0c02c234bbde3a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jaewoo1/Platypus7B_Follow_FT/result_2023-10-21 14:41:08.json b/Jaewoo1/Platypus7B_Follow_FT/result_2023-10-21 14:41:08.json new file mode 100644 index 0000000000000000000000000000000000000000..2a38e96f33d447a72d36aae0212ec045cf617ca5 --- /dev/null +++ b/Jaewoo1/Platypus7B_Follow_FT/result_2023-10-21 14:41:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.197098976109215, + "acc_stderr": 0.011625047669880612, + "acc_norm": 0.26535836177474403, + "acc_norm_stderr": 0.012902554762313964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.29904401513642703, + "acc_stderr": 0.0045690346133326004, + "acc_norm": 0.36675960963951404, + "acc_norm_stderr": 0.0048093520750089385 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.0398913985953177, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.0398913985953177 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2796934865900383, + "acc_stderr": 0.016050792148036536, + "acc_norm": 0.2796934865900383, + "acc_norm_stderr": 0.016050792148036536 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.027678452578212387, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.027678452578212387 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21084337349397592, + "acc_stderr": 0.031755547866299194, + "acc_norm": 0.21084337349397592, + "acc_norm_stderr": 0.031755547866299194 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2765273311897106, + "acc_stderr": 0.02540383297817961, + "acc_norm": 0.2765273311897106, + "acc_norm_stderr": 0.02540383297817961 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.030216831011508762, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.030216831011508762 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728742, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728742 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.03154449888270285, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.03154449888270285 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863786, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2641025641025641, + "acc_stderr": 0.022352193737453285, + "acc_norm": 0.2641025641025641, + "acc_norm_stderr": 0.022352193737453285 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.029678333141444444, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.029678333141444444 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517414, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517414 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.030882736974138663, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.030882736974138663 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2641509433962264, + "acc_stderr": 0.0271342916287417, + "acc_norm": 0.2641509433962264, + "acc_norm_stderr": 0.0271342916287417 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.038950910157241364, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.038950910157241364 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.29850746268656714, + "acc_stderr": 0.032357437893550424, + "acc_norm": 0.29850746268656714, + "acc_norm_stderr": 0.032357437893550424 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071128, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071128 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899616, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899616 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.024569223600460845, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.024569223600460845 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565318, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565318 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28807339449541286, + "acc_stderr": 0.019416445892636015, + "acc_norm": 0.28807339449541286, + "acc_norm_stderr": 0.019416445892636015 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.025646863097137908, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.025646863097137908 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2236842105263158, + "acc_stderr": 0.03391160934343602, + "acc_norm": 0.2236842105263158, + "acc_norm_stderr": 0.03391160934343602 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.01784808957491323, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.01784808957491323 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012386, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012386 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225605, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225605 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411962, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411962 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174913, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174913 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3291139240506329, + "acc_stderr": 0.03058732629470236, + "acc_norm": 0.3291139240506329, + "acc_norm_stderr": 0.03058732629470236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2522816166883963, + "acc_stderr": 0.011092789056875248, + "acc_norm": 0.2522816166883963, + "acc_norm_stderr": 0.011092789056875248 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087288, + "mc2": 0.4755864114164748, + "mc2_stderr": 0.016657423214439165 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.1912632821723731, + "acc_stderr": 0.013521790445859333, + "acc_norm": 0.3659976387249115, + "acc_norm_stderr": 0.016561489664895686 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jaewoo1/Platypus7B_Follow_FT", + "model_sha": "ac5c77ab817d2d9b0a4f3fc7c609dce3770428d8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jaewoo1/Platypus7B_Follow_LoRA/result_2023-10-22 15:04:14.json b/Jaewoo1/Platypus7B_Follow_LoRA/result_2023-10-22 15:04:14.json new file mode 100644 index 0000000000000000000000000000000000000000..84ed866e2ad5042a51e9082891296c28c7bb199c --- /dev/null +++ b/Jaewoo1/Platypus7B_Follow_LoRA/result_2023-10-22 15:04:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27986348122866894, + "acc_stderr": 0.013119040897725923, + "acc_norm": 0.3506825938566553, + "acc_norm_stderr": 0.013944635930726089 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3648675562636925, + "acc_stderr": 0.004804091708812553, + "acc_norm": 0.4856602270464051, + "acc_norm_stderr": 0.004987728900897601 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.03762738699917055, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.03762738699917055 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.017268607560005776, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.017268607560005776 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.029241883869628817, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.029241883869628817 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.03571609230053481, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.03571609230053481 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3987138263665595, + "acc_stderr": 0.0278093225857745, + "acc_norm": 0.3987138263665595, + "acc_norm_stderr": 0.0278093225857745 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416828, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416828 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3838383838383838, + "acc_stderr": 0.03464881675016339, + "acc_norm": 0.3838383838383838, + "acc_norm_stderr": 0.03464881675016339 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03156663099215416, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03156663099215416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28717948717948716, + "acc_stderr": 0.022939925418530613, + "acc_norm": 0.28717948717948716, + "acc_norm_stderr": 0.022939925418530613 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733555, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733555 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.027666182075539652, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.027666182075539652 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.49572649572649574, + "acc_stderr": 0.032754892643821316, + "acc_norm": 0.49572649572649574, + "acc_norm_stderr": 0.032754892643821316 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3320754716981132, + "acc_stderr": 0.02898545565233439, + "acc_norm": 0.3320754716981132, + "acc_norm_stderr": 0.02898545565233439 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.02549753263960955, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.02549753263960955 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4527363184079602, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.4527363184079602, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.022101128787415412, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.022101128787415412 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.025992472029306386, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.025992472029306386 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3374233128834356, + "acc_stderr": 0.03714908409935575, + "acc_norm": 0.3374233128834356, + "acc_norm_stderr": 0.03714908409935575 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38580246913580246, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.38580246913580246, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38860103626943004, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.38860103626943004, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4018348623853211, + "acc_stderr": 0.021020106172997013, + "acc_norm": 0.4018348623853211, + "acc_norm_stderr": 0.021020106172997013 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423556, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423556 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.49586776859504134, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952925, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952925 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.01887568293806944, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.01887568293806944 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.02997280717046463, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.02997280717046463 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.33877551020408164, + "acc_stderr": 0.030299506562154185, + "acc_norm": 0.33877551020408164, + "acc_norm_stderr": 0.030299506562154185 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4810126582278481, + "acc_stderr": 0.03252375148090448, + "acc_norm": 0.4810126582278481, + "acc_norm_stderr": 0.03252375148090448 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3135593220338983, + "acc_stderr": 0.011849234291459324, + "acc_norm": 0.3135593220338983, + "acc_norm_stderr": 0.011849234291459324 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.03426712349247272, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.03426712349247272 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752332, + "mc2": 0.3857319099407924, + "mc2_stderr": 0.015181937276962347 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2066115702479339, + "acc_stderr": 0.013919866463909341, + "acc_norm": 0.2987012987012987, + "acc_norm_stderr": 0.015735657391438278 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jaewoo1/Platypus7B_Follow_LoRA", + "model_sha": "b963d09e5db0e791858e56e3fafac7e066328014", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JaeyeonKang/CCK-v1.0.0-DPO/result_2024-01-04 09:09:53.json b/JaeyeonKang/CCK-v1.0.0-DPO/result_2024-01-04 09:09:53.json new file mode 100644 index 0000000000000000000000000000000000000000..f0746e82027e8322ded6d166541a24d1e6413a4d --- /dev/null +++ b/JaeyeonKang/CCK-v1.0.0-DPO/result_2024-01-04 09:09:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46245733788395904, + "acc_stderr": 0.014570144495075574, + "acc_norm": 0.5247440273037542, + "acc_norm_stderr": 0.014593487694937743 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45598486357299345, + "acc_stderr": 0.004970410081009441, + "acc_norm": 0.6325433180641307, + "acc_norm_stderr": 0.0048112699754506005 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6526181353767561, + "acc_stderr": 0.01702667174865573, + "acc_norm": 0.6526181353767561, + "acc_norm_stderr": 0.01702667174865573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033583, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033583 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6109324758842444, + "acc_stderr": 0.027690337536485376, + "acc_norm": 0.6109324758842444, + "acc_norm_stderr": 0.027690337536485376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5650224215246636, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.5650224215246636, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.042607351576445594, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.042607351576445594 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956909, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956909 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.702020202020202, + "acc_stderr": 0.032586303838365555, + "acc_norm": 0.702020202020202, + "acc_norm_stderr": 0.032586303838365555 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.592436974789916, + "acc_stderr": 0.03191863374478466, + "acc_norm": 0.592436974789916, + "acc_norm_stderr": 0.03191863374478466 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5820512820512821, + "acc_stderr": 0.025007329882461203, + "acc_norm": 0.5820512820512821, + "acc_norm_stderr": 0.025007329882461203 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036544, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036544 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5774193548387097, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.5774193548387097, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.027236013946196673, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.027236013946196673 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.03070948699255655, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.03070948699255655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066482, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066482 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.746268656716418, + "acc_stderr": 0.030769444967296014, + "acc_norm": 0.746268656716418, + "acc_norm_stderr": 0.030769444967296014 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4312169312169312, + "acc_stderr": 0.025506481698138215, + "acc_norm": 0.4312169312169312, + "acc_norm_stderr": 0.025506481698138215 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5838150289017341, + "acc_stderr": 0.026538189104705474, + "acc_norm": 0.5838150289017341, + "acc_norm_stderr": 0.026538189104705474 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5895061728395061, + "acc_stderr": 0.027371350925124768, + "acc_norm": 0.5895061728395061, + "acc_norm_stderr": 0.027371350925124768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6623853211009174, + "acc_stderr": 0.020275265986638914, + "acc_norm": 0.6623853211009174, + "acc_norm_stderr": 0.020275265986638914 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.04065771002562605, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.04065771002562605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4918300653594771, + "acc_stderr": 0.020225134343057265, + "acc_norm": 0.4918300653594771, + "acc_norm_stderr": 0.020225134343057265 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41843971631205673, + "acc_stderr": 0.02942799403941999, + "acc_norm": 0.41843971631205673, + "acc_norm_stderr": 0.02942799403941999 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.03407632093854054, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.03407632093854054 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30502793296089387, + "acc_stderr": 0.015398723510916715, + "acc_norm": 0.30502793296089387, + "acc_norm_stderr": 0.015398723510916715 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5073529411764706, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.5073529411764706, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556165, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.02798569938703641, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.02798569938703641 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4282920469361147, + "acc_stderr": 0.012638223880313175, + "acc_norm": 0.4282920469361147, + "acc_norm_stderr": 0.012638223880313175 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.03296245110172229, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.03296245110172229 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7212121212121212, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.7212121212121212, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4749082007343941, + "mc1_stderr": 0.017481446804103996, + "mc2": 0.6332905645893946, + "mc2_stderr": 0.015824976924307316 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5938606847697757, + "acc_stderr": 0.0168847495031914, + "acc_norm": 0.6103896103896104, + "acc_norm_stderr": 0.01676616167189351 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JaeyeonKang/CCK-v1.0.0-DPO", + "model_sha": "e33c9c9dc96d5fe2ffdb910640925e02c236dae8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JaeyeonKang/CCK-v1.0.0-DPOM/result_2024-01-04 09:58:29.json b/JaeyeonKang/CCK-v1.0.0-DPOM/result_2024-01-04 09:58:29.json new file mode 100644 index 0000000000000000000000000000000000000000..73fcfeae9bc480440e1ff2aa1e54af798b88e7f1 --- /dev/null +++ b/JaeyeonKang/CCK-v1.0.0-DPOM/result_2024-01-04 09:58:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4138225255972696, + "acc_stderr": 0.014392730009221007, + "acc_norm": 0.4863481228668942, + "acc_norm_stderr": 0.014605943429860957 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4230233021310496, + "acc_stderr": 0.004930293787545614, + "acc_norm": 0.5758812985461064, + "acc_norm_stderr": 0.004931984642695337 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.648786717752235, + "acc_stderr": 0.017069982051499427, + "acc_norm": 0.648786717752235, + "acc_norm_stderr": 0.017069982051499427 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6205787781350482, + "acc_stderr": 0.02755994980234782, + "acc_norm": 0.6205787781350482, + "acc_norm_stderr": 0.02755994980234782 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5829596412556054, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.5829596412556054, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.702020202020202, + "acc_stderr": 0.032586303838365555, + "acc_norm": 0.702020202020202, + "acc_norm_stderr": 0.032586303838365555 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.047240073523838876, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.047240073523838876 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5487179487179488, + "acc_stderr": 0.02523038123893484, + "acc_norm": 0.5487179487179488, + "acc_norm_stderr": 0.02523038123893484 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.02804098138076153, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.02804098138076153 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.027778835904935427, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.027778835904935427 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5358490566037736, + "acc_stderr": 0.030693675018458006, + "acc_norm": 0.5358490566037736, + "acc_norm_stderr": 0.030693675018458006 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815642, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815642 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681682, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681682 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.031343283582089536, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.031343283582089536 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02559185776138219, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02559185776138219 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.569364161849711, + "acc_stderr": 0.02665880027367238, + "acc_norm": 0.569364161849711, + "acc_norm_stderr": 0.02665880027367238 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.027237415094592488, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.027237415094592488 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.694300518134715, + "acc_stderr": 0.033248379397581594, + "acc_norm": 0.694300518134715, + "acc_norm_stderr": 0.033248379397581594 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.046774730044912005, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.046774730044912005 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6605504587155964, + "acc_stderr": 0.02030210934266235, + "acc_norm": 0.6605504587155964, + "acc_norm_stderr": 0.02030210934266235 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.02835895631342355, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.02835895631342355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591206, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591206 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.020227402794434864, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.020227402794434864 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41843971631205673, + "acc_stderr": 0.02942799403941999, + "acc_norm": 0.41843971631205673, + "acc_norm_stderr": 0.02942799403941999 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5509259259259259, + "acc_stderr": 0.033922384053216154, + "acc_norm": 0.5509259259259259, + "acc_norm_stderr": 0.033922384053216154 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3016759776536313, + "acc_stderr": 0.015350767572220285, + "acc_norm": 0.3016759776536313, + "acc_norm_stderr": 0.015350767572220285 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5183823529411765, + "acc_stderr": 0.03035230339535196, + "acc_norm": 0.5183823529411765, + "acc_norm_stderr": 0.03035230339535196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5959183673469388, + "acc_stderr": 0.031414708025865885, + "acc_norm": 0.5959183673469388, + "acc_norm_stderr": 0.031414708025865885 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422647, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422647 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4198174706649283, + "acc_stderr": 0.01260496081608737, + "acc_norm": 0.4198174706649283, + "acc_norm_stderr": 0.01260496081608737 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.03296245110172229, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.03296245110172229 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.703030303030303, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.703030303030303, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4222766217870257, + "mc1_stderr": 0.017290733254248167, + "mc2": 0.6005885460566108, + "mc2_stderr": 0.016071484830998732 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6257378984651711, + "acc_stderr": 0.016637917789798746, + "acc_norm": 0.6375442739079102, + "acc_norm_stderr": 0.016527131240453696 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JaeyeonKang/CCK-v1.0.0-DPOM", + "model_sha": "f9c1e428a37fd392a37abc4461fd286978aedc99", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JaeyeonKang/CCK-v1.0.0-M/result_2024-01-04 09:08:53.json b/JaeyeonKang/CCK-v1.0.0-M/result_2024-01-04 09:08:53.json new file mode 100644 index 0000000000000000000000000000000000000000..003ffacf8cd49ab5dcc2896911a494ccc9f6f4f6 --- /dev/null +++ b/JaeyeonKang/CCK-v1.0.0-M/result_2024-01-04 09:08:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46928327645051193, + "acc_stderr": 0.014583792546304037, + "acc_norm": 0.5307167235494881, + "acc_norm_stderr": 0.014583792546304037 + }, + "harness|ko_hellaswag|10": { + "acc": 0.474407488548098, + "acc_stderr": 0.004983240744101368, + "acc_norm": 0.6338378809002191, + "acc_norm_stderr": 0.004807699539973412 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280041, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280041 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6768837803320562, + "acc_stderr": 0.016723726512343044, + "acc_norm": 0.6768837803320562, + "acc_norm_stderr": 0.016723726512343044 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033583, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033583 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6559485530546624, + "acc_stderr": 0.026981478043648036, + "acc_norm": 0.6559485530546624, + "acc_norm_stderr": 0.026981478043648036 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.648854961832061, + "acc_stderr": 0.0418644516301375, + "acc_norm": 0.648854961832061, + "acc_norm_stderr": 0.0418644516301375 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.031911782267135445, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.031911782267135445 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.0416656757710158, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.0416656757710158 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.031041941304059285, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.031041941304059285 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5666666666666667, + "acc_stderr": 0.0251246535258851, + "acc_norm": 0.5666666666666667, + "acc_norm_stderr": 0.0251246535258851 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6161290322580645, + "acc_stderr": 0.027666182075539662, + "acc_norm": 0.6161290322580645, + "acc_norm_stderr": 0.027666182075539662 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.02704685763071666, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.02704685763071666 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5358490566037736, + "acc_stderr": 0.030693675018458006, + "acc_norm": 0.5358490566037736, + "acc_norm_stderr": 0.030693675018458006 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066485, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919795, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.03809342081273957, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.03809342081273957 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.02535574126305526, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.02535574126305526 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6011560693641619, + "acc_stderr": 0.026362437574546545, + "acc_norm": 0.6011560693641619, + "acc_norm_stderr": 0.026362437574546545 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5828220858895705, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.5828220858895705, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.02751374728437942, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.02751374728437942 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7772020725388601, + "acc_stderr": 0.03003114797764154, + "acc_norm": 0.7772020725388601, + "acc_norm_stderr": 0.03003114797764154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6880733944954128, + "acc_stderr": 0.019862967976707245, + "acc_norm": 0.6880733944954128, + "acc_norm_stderr": 0.019862967976707245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04426266681379909, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04426266681379909 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.02811092849280907, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.02811092849280907 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591206, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591206 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.02020665318788479, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.02020665318788479 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41843971631205673, + "acc_stderr": 0.02942799403941999, + "acc_norm": 0.41843971631205673, + "acc_norm_stderr": 0.02942799403941999 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5324074074074074, + "acc_stderr": 0.034028015813589656, + "acc_norm": 0.5324074074074074, + "acc_norm_stderr": 0.034028015813589656 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.014149575348976267, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.014149575348976267 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5183823529411765, + "acc_stderr": 0.03035230339535196, + "acc_norm": 0.5183823529411765, + "acc_norm_stderr": 0.03035230339535196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556165, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7637130801687764, + "acc_stderr": 0.027652153144159267, + "acc_norm": 0.7637130801687764, + "acc_norm_stderr": 0.027652153144159267 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4165580182529335, + "acc_stderr": 0.012591153245057388, + "acc_norm": 0.4165580182529335, + "acc_norm_stderr": 0.012591153245057388 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.032834720561085606, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.032834720561085606 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3537331701346389, + "mc1_stderr": 0.01673781435884615, + "mc2": 0.5232581175829188, + "mc2_stderr": 0.0157222979804249 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5537190082644629, + "acc_stderr": 0.017090852631668332, + "acc_norm": 0.5749704840613932, + "acc_norm_stderr": 0.016996016308362883 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JaeyeonKang/CCK-v1.0.0-M", + "model_sha": "3bc3789502ffc176237e798ad3a4511cd1bc546b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JaeyeonKang/CCK-v1.0.0/result_2024-01-04 06:27:52.json b/JaeyeonKang/CCK-v1.0.0/result_2024-01-04 06:27:52.json new file mode 100644 index 0000000000000000000000000000000000000000..622b90c68390cc36372da46c0da439951ce5b1ba --- /dev/null +++ b/JaeyeonKang/CCK-v1.0.0/result_2024-01-04 06:27:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46245733788395904, + "acc_stderr": 0.01457014449507558, + "acc_norm": 0.523037542662116, + "acc_norm_stderr": 0.014595873205358266 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4697271459868552, + "acc_stderr": 0.004980627287147588, + "acc_norm": 0.6201951802429795, + "acc_norm_stderr": 0.0048434625459435 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.03786720706234214, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.03786720706234214 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.04656147110012352, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.04656147110012352 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.016857391247472545, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.016857391247472545 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.02736807824397162, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.02736807824397162 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5829596412556054, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.5829596412556054, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.04243869242230523, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.04243869242230523 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786753, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786753 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.041641887201693775, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.041641887201693775 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.031566630992154156, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.031566630992154156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.541025641025641, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.541025641025641, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.03502544650845872, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.03502544650845872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.027869320571664625, + "acc_norm": 0.6, + "acc_norm_stderr": 0.027869320571664625 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417607, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6865671641791045, + "acc_stderr": 0.03280188205348642, + "acc_norm": 0.6865671641791045, + "acc_norm_stderr": 0.03280188205348642 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.02522545028406788, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.02522545028406788 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.73, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5924855491329479, + "acc_stderr": 0.0264545781469315, + "acc_norm": 0.5924855491329479, + "acc_norm_stderr": 0.0264545781469315 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5709876543209876, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.5709876543209876, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7512953367875648, + "acc_stderr": 0.031195840877700293, + "acc_norm": 0.7512953367875648, + "acc_norm_stderr": 0.031195840877700293 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6788990825688074, + "acc_stderr": 0.020018149772733747, + "acc_norm": 0.6788990825688074, + "acc_norm_stderr": 0.020018149772733747 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6052631578947368, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.6052631578947368, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.020219083895133917, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.020219083895133917 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.02909767559946393, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.02909767559946393 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2122905027932961, + "acc_stderr": 0.013676644685831733, + "acc_norm": 0.2122905027932961, + "acc_norm_stderr": 0.013676644685831733 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5, + "acc_stderr": 0.030372836961539352, + "acc_norm": 0.5, + "acc_norm_stderr": 0.030372836961539352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7468354430379747, + "acc_stderr": 0.0283046579430353, + "acc_norm": 0.7468354430379747, + "acc_norm_stderr": 0.0283046579430353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3970013037809648, + "acc_stderr": 0.012496346982909554, + "acc_norm": 0.3970013037809648, + "acc_norm_stderr": 0.012496346982909554 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.032834720561085606, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.032834720561085606 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.037131580674819135, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.037131580674819135 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3157894736842105, + "mc1_stderr": 0.01627228795791693, + "mc2": 0.48289628695498954, + "mc2_stderr": 0.015622894442385828 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5289256198347108, + "acc_stderr": 0.01716156394991635, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.017090852631668332 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JaeyeonKang/CCK-v1.0.0", + "model_sha": "8de60cd20f19c5ccf8fabe223f41d13b159d5267", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JaeyeonKang/CCK-v1.0.1-DPO/result_2024-01-04 21:14:48.json b/JaeyeonKang/CCK-v1.0.1-DPO/result_2024-01-04 21:14:48.json new file mode 100644 index 0000000000000000000000000000000000000000..27431c573f62a5ce2cf675efb6b37ce2517a34ac --- /dev/null +++ b/JaeyeonKang/CCK-v1.0.1-DPO/result_2024-01-04 21:14:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.47525597269624575, + "acc_stderr": 0.014593487694937736, + "acc_norm": 0.5401023890784983, + "acc_norm_stderr": 0.014564318856924848 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46026687910774744, + "acc_stderr": 0.004974001515580958, + "acc_norm": 0.6369249153555069, + "acc_norm_stderr": 0.004799034356969394 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.644955300127714, + "acc_stderr": 0.017112085772772984, + "acc_norm": 0.644955300127714, + "acc_norm_stderr": 0.017112085772772984 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.03266204299064677, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.03266204299064677 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751468, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751468 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5919282511210763, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.5919282511210763, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.04243869242230524, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.04243869242230524 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7121212121212122, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.7121212121212122, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.03163145807552379, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.03163145807552379 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5820512820512821, + "acc_stderr": 0.025007329882461203, + "acc_norm": 0.5820512820512821, + "acc_norm_stderr": 0.025007329882461203 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5709677419354838, + "acc_stderr": 0.028156036538233193, + "acc_norm": 0.5709677419354838, + "acc_norm_stderr": 0.028156036538233193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.027046857630716657, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.027046857630716657 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389184, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066485, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919795, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43915343915343913, + "acc_stderr": 0.02555992055053101, + "acc_norm": 0.43915343915343913, + "acc_norm_stderr": 0.02555992055053101 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.026636539741116093, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.026636539741116093 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.03922378290610988, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.03922378290610988 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5987654320987654, + "acc_stderr": 0.027272582849839796, + "acc_norm": 0.5987654320987654, + "acc_norm_stderr": 0.027272582849839796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6807339449541284, + "acc_stderr": 0.01998782906975001, + "acc_norm": 0.6807339449541284, + "acc_norm_stderr": 0.01998782906975001 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.044359328928514664, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.044359328928514664 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.028304576673141114, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.028304576673141114 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.020217030653186457, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.020217030653186457 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.029275532159704725, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.029275532159704725 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.03400603625538271, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.03400603625538271 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31843575418994413, + "acc_stderr": 0.015581008080360276, + "acc_norm": 0.31843575418994413, + "acc_norm_stderr": 0.015581008080360276 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.03030625772246831, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.03030625772246831 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.031680911612338825, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.031680911612338825 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422647, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422647 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4172099087353325, + "acc_stderr": 0.012593959992906426, + "acc_norm": 0.4172099087353325, + "acc_norm_stderr": 0.012593959992906426 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.0332057461294543, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.0332057461294543 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7333333333333333, + "acc_stderr": 0.03453131801885415, + "acc_norm": 0.7333333333333333, + "acc_norm_stderr": 0.03453131801885415 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4614443084455324, + "mc1_stderr": 0.017451384104637455, + "mc2": 0.6263434682289961, + "mc2_stderr": 0.015915686940283662 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5171192443919717, + "acc_stderr": 0.01718027524608563, + "acc_norm": 0.5348288075560803, + "acc_norm_stderr": 0.017148598015747422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JaeyeonKang/CCK-v1.0.1-DPO", + "model_sha": "e33c5d975e5ed484e3d98909ee29dc7930225946", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JaeyeonKang/CCK-v1.1.0-DPO/result_2024-01-14 01:18:44.json b/JaeyeonKang/CCK-v1.1.0-DPO/result_2024-01-14 01:18:44.json new file mode 100644 index 0000000000000000000000000000000000000000..331ccf4ffa44e51e867b6d61ba12a1582b0f5b09 --- /dev/null +++ b/JaeyeonKang/CCK-v1.1.0-DPO/result_2024-01-14 01:18:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5042662116040956, + "acc_stderr": 0.014610858923956952, + "acc_norm": 0.552901023890785, + "acc_norm_stderr": 0.014529380160526848 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4878510256920932, + "acc_stderr": 0.00498830823468727, + "acc_norm": 0.650866361282613, + "acc_norm_stderr": 0.004757220449283696 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.016857391247472545, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.016857391247472545 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5063829787234042, + "acc_stderr": 0.03268335899936336, + "acc_norm": 0.5063829787234042, + "acc_norm_stderr": 0.03268335899936336 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.02760468902858199, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.02760468902858199 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5829596412556054, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.5829596412556054, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.042764865428145914, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.042764865428145914 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.031544498882702866, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.031544498882702866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.558974358974359, + "acc_stderr": 0.025174048384000718, + "acc_norm": 0.558974358974359, + "acc_norm_stderr": 0.025174048384000718 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5741935483870968, + "acc_stderr": 0.0281291127091659, + "acc_norm": 0.5741935483870968, + "acc_norm_stderr": 0.0281291127091659 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.811965811965812, + "acc_stderr": 0.02559819368665225, + "acc_norm": 0.811965811965812, + "acc_norm_stderr": 0.02559819368665225 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.031157150869355582, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.031157150869355582 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.038073017265045125, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.038073017265045125 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137605, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137605 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5838150289017341, + "acc_stderr": 0.02653818910470547, + "acc_norm": 0.5838150289017341, + "acc_norm_stderr": 0.02653818910470547 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6049382716049383, + "acc_stderr": 0.027201117666925657, + "acc_norm": 0.6049382716049383, + "acc_norm_stderr": 0.027201117666925657 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7512953367875648, + "acc_stderr": 0.031195840877700293, + "acc_norm": 0.7512953367875648, + "acc_norm_stderr": 0.031195840877700293 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6880733944954128, + "acc_stderr": 0.019862967976707245, + "acc_norm": 0.6880733944954128, + "acc_norm_stderr": 0.019862967976707245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5816993464052288, + "acc_stderr": 0.02824513402438729, + "acc_norm": 0.5816993464052288, + "acc_norm_stderr": 0.02824513402438729 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874143, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874143 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5179738562091504, + "acc_stderr": 0.020214761037872404, + "acc_norm": 0.5179738562091504, + "acc_norm_stderr": 0.020214761037872404 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.028999080904806178, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.028999080904806178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.046695106638751926, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.046695106638751926 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.01473692638376197, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.01473692638376197 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5257352941176471, + "acc_stderr": 0.030332578094555033, + "acc_norm": 0.5257352941176471, + "acc_norm_stderr": 0.030332578094555033 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7383966244725738, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.7383966244725738, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.40352020860495436, + "acc_stderr": 0.012530241301193195, + "acc_norm": 0.40352020860495436, + "acc_norm_stderr": 0.012530241301193195 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6862745098039216, + "acc_stderr": 0.032566854844603886, + "acc_norm": 0.6862745098039216, + "acc_norm_stderr": 0.032566854844603886 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.42105263157894735, + "mc1_stderr": 0.017283936248136504, + "mc2": 0.575539142333608, + "mc2_stderr": 0.016146771321331445 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5289256198347108, + "acc_stderr": 0.017161563949916348, + "acc_norm": 0.5360094451003542, + "acc_norm_stderr": 0.017145715365486664 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JaeyeonKang/CCK-v1.1.0-DPO", + "model_sha": "0965df1d44772b35487684c79cecf9401cb9e76e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JaeyeonKang/CCK-v1.1.0/result_2024-01-14 00:43:41.json b/JaeyeonKang/CCK-v1.1.0/result_2024-01-14 00:43:41.json new file mode 100644 index 0000000000000000000000000000000000000000..83a0eb1e0496a239a95012adf9b21a33031f81e7 --- /dev/null +++ b/JaeyeonKang/CCK-v1.1.0/result_2024-01-14 00:43:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4564846416382253, + "acc_stderr": 0.014555949760496439, + "acc_norm": 0.5068259385665529, + "acc_norm_stderr": 0.014610029151379813 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46126269667396935, + "acc_stderr": 0.004974783753309692, + "acc_norm": 0.6171081457876917, + "acc_norm_stderr": 0.0048509882151675345 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6257309941520468, + "acc_stderr": 0.03711601185389481, + "acc_norm": 0.6257309941520468, + "acc_norm_stderr": 0.03711601185389481 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6462324393358876, + "acc_stderr": 0.017098184708161903, + "acc_norm": 0.6462324393358876, + "acc_norm_stderr": 0.017098184708161903 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5259259259259259, + "acc_stderr": 0.043135316967505756, + "acc_norm": 0.5259259259259259, + "acc_norm_stderr": 0.043135316967505756 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.027604689028581993, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.027604689028581993 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5695067264573991, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.5695067264573991, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.648854961832061, + "acc_stderr": 0.0418644516301375, + "acc_norm": 0.648854961832061, + "acc_norm_stderr": 0.0418644516301375 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.03154449888270286, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.03154449888270286 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.031566630992154156, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.031566630992154156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5282051282051282, + "acc_stderr": 0.025310639254933855, + "acc_norm": 0.5282051282051282, + "acc_norm_stderr": 0.025310639254933855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5774193548387097, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.5774193548387097, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.811965811965812, + "acc_stderr": 0.025598193686652247, + "acc_norm": 0.811965811965812, + "acc_norm_stderr": 0.025598193686652247 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.029318203645206865, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.029318203645206865 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.746268656716418, + "acc_stderr": 0.030769444967296007, + "acc_norm": 0.746268656716418, + "acc_norm_stderr": 0.030769444967296007 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.026261677607806642, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.026261677607806642 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.558282208588957, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.558282208588957, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6049382716049383, + "acc_stderr": 0.027201117666925654, + "acc_norm": 0.6049382716049383, + "acc_norm_stderr": 0.027201117666925654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7461139896373057, + "acc_stderr": 0.03141024780565319, + "acc_norm": 0.7461139896373057, + "acc_norm_stderr": 0.03141024780565319 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6697247706422018, + "acc_stderr": 0.020164466336342977, + "acc_norm": 0.6697247706422018, + "acc_norm_stderr": 0.020164466336342977 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0436031486007746, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0436031486007746 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.04008973785779205, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.04008973785779205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.020219083895133917, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.020219083895133917 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22793296089385476, + "acc_stderr": 0.014030149950805097, + "acc_norm": 0.22793296089385476, + "acc_norm_stderr": 0.014030149950805097 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5, + "acc_stderr": 0.030372836961539352, + "acc_norm": 0.5, + "acc_norm_stderr": 0.030372836961539352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.03164209487942942, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.03164209487942942 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7341772151898734, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.7341772151898734, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4041720990873533, + "acc_stderr": 0.012533504046491367, + "acc_norm": 0.4041720990873533, + "acc_norm_stderr": 0.012533504046491367 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.0328347205610856, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.0328347205610856 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31211750305997554, + "mc1_stderr": 0.01622075676952091, + "mc2": 0.47693449176935354, + "mc2_stderr": 0.015381480297496484 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5041322314049587, + "acc_stderr": 0.017189767032130817, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JaeyeonKang/CCK-v1.1.0", + "model_sha": "d0485d83b3f6d4738e239f9d15eb3edab9583770", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JaeyeonKang/CCK-v1.2.0-DPO/result_2024-01-14 01:13:24.json b/JaeyeonKang/CCK-v1.2.0-DPO/result_2024-01-14 01:13:24.json new file mode 100644 index 0000000000000000000000000000000000000000..f4df787a5ac3b531acaa789f348fb35182f44005 --- /dev/null +++ b/JaeyeonKang/CCK-v1.2.0-DPO/result_2024-01-14 01:13:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4991467576791809, + "acc_stderr": 0.014611369529813279, + "acc_norm": 0.5460750853242321, + "acc_norm_stderr": 0.014549221105171865 + }, + "harness|ko_hellaswag|10": { + "acc": 0.477096195976897, + "acc_stderr": 0.004984543540932338, + "acc_norm": 0.6516630153355906, + "acc_norm_stderr": 0.0047546970133549565 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6602809706257982, + "acc_stderr": 0.01693639411430163, + "acc_norm": 0.6602809706257982, + "acc_norm_stderr": 0.01693639411430163 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6302250803858521, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.6302250803858521, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.042764865428145914, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.042764865428145914 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7474747474747475, + "acc_stderr": 0.03095405547036592, + "acc_norm": 0.7474747474747475, + "acc_norm_stderr": 0.03095405547036592 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5756302521008403, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.5756302521008403, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5641025641025641, + "acc_stderr": 0.025141801511177488, + "acc_norm": 0.5641025641025641, + "acc_norm_stderr": 0.025141801511177488 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5806451612903226, + "acc_stderr": 0.028071588901091845, + "acc_norm": 0.5806451612903226, + "acc_norm_stderr": 0.028071588901091845 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.026246772946890467, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.026246772946890467 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.030767394707808093, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.030767394707808093 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935558, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935558 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3941798941798942, + "acc_stderr": 0.025167982333894143, + "acc_norm": 0.3941798941798942, + "acc_norm_stderr": 0.025167982333894143 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5895953757225434, + "acc_stderr": 0.026483392042098177, + "acc_norm": 0.5895953757225434, + "acc_norm_stderr": 0.026483392042098177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138938, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138938 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6049382716049383, + "acc_stderr": 0.027201117666925657, + "acc_norm": 0.6049382716049383, + "acc_norm_stderr": 0.027201117666925657 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7461139896373057, + "acc_stderr": 0.03141024780565319, + "acc_norm": 0.7461139896373057, + "acc_norm_stderr": 0.03141024780565319 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.046570472605949646, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.046570472605949646 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6807339449541284, + "acc_stderr": 0.019987829069750017, + "acc_norm": 0.6807339449541284, + "acc_norm_stderr": 0.019987829069750017 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.028358956313423556, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.028358956313423556 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249034, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249034 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.020203517280261436, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.020203517280261436 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.02949482760014437, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.02949482760014437 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2927374301675978, + "acc_stderr": 0.015218109544410177, + "acc_norm": 0.2927374301675978, + "acc_norm_stderr": 0.015218109544410177 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5073529411764706, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.5073529411764706, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.03186785930004129, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.03186785930004129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7341772151898734, + "acc_stderr": 0.028756799629658335, + "acc_norm": 0.7341772151898734, + "acc_norm_stderr": 0.028756799629658335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41590612777053454, + "acc_stderr": 0.012588323850313613, + "acc_norm": 0.41590612777053454, + "acc_norm_stderr": 0.012588323850313613 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7009803921568627, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.7009803921568627, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.41615667074663404, + "mc1_stderr": 0.017255657502903036, + "mc2": 0.5774162487065844, + "mc2_stderr": 0.01599641293358398 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5301062573789846, + "acc_stderr": 0.017159163590170223, + "acc_norm": 0.5407319952774499, + "acc_norm_stderr": 0.017133218276537666 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JaeyeonKang/CCK-v1.2.0-DPO", + "model_sha": "e4f493f35de33804870bf0ede5b87022b18bbe6b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JaeyeonKang/CCK-v1.2.0/result_2024-01-14 01:15:00.json b/JaeyeonKang/CCK-v1.2.0/result_2024-01-14 01:15:00.json new file mode 100644 index 0000000000000000000000000000000000000000..efd3bd546cf38efbf5f8cbbea5f6b1541f73f105 --- /dev/null +++ b/JaeyeonKang/CCK-v1.2.0/result_2024-01-14 01:15:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4445392491467577, + "acc_stderr": 0.01452122640562708, + "acc_norm": 0.5093856655290102, + "acc_norm_stderr": 0.014608816322065003 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45498904600677154, + "acc_stderr": 0.004969521827957953, + "acc_norm": 0.6136227843059151, + "acc_norm_stderr": 0.004859236191579798 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6257309941520468, + "acc_stderr": 0.03711601185389481, + "acc_norm": 0.6257309941520468, + "acc_norm_stderr": 0.03711601185389481 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6436781609195402, + "acc_stderr": 0.0171258537627559, + "acc_norm": 0.6436781609195402, + "acc_norm_stderr": 0.0171258537627559 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6237942122186495, + "acc_stderr": 0.02751392568354943, + "acc_norm": 0.6237942122186495, + "acc_norm_stderr": 0.02751392568354943 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5829596412556054, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.5829596412556054, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6717557251908397, + "acc_stderr": 0.04118438565806298, + "acc_norm": 0.6717557251908397, + "acc_norm_stderr": 0.04118438565806298 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.031566630992154156, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.031566630992154156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5307692307692308, + "acc_stderr": 0.025302958890850158, + "acc_norm": 0.5307692307692308, + "acc_norm_stderr": 0.025302958890850158 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406795, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406795 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5935483870967742, + "acc_stderr": 0.027941727346256308, + "acc_norm": 0.5935483870967742, + "acc_norm_stderr": 0.027941727346256308 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.026246772946890474, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.026246772946890474 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808093, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808093 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857406, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857406 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.030965903123573037, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.030965903123573037 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562417, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6213872832369942, + "acc_stderr": 0.02611374936131034, + "acc_norm": 0.6213872832369942, + "acc_norm_stderr": 0.02611374936131034 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5987654320987654, + "acc_stderr": 0.0272725828498398, + "acc_norm": 0.5987654320987654, + "acc_norm_stderr": 0.0272725828498398 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7202072538860104, + "acc_stderr": 0.03239637046735703, + "acc_norm": 0.7202072538860104, + "acc_norm_stderr": 0.03239637046735703 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6660550458715596, + "acc_stderr": 0.020220554196736407, + "acc_norm": 0.6660550458715596, + "acc_norm_stderr": 0.020220554196736407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.02827549015679145, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.02827549015679145 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041019, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041019 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.020223946005074295, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.020223946005074295 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.029097675599463926, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.029097675599463926 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.014102223623152593, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.014102223623152593 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.030306257722468314, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.030306257722468314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7426160337552743, + "acc_stderr": 0.028458820991460302, + "acc_norm": 0.7426160337552743, + "acc_norm_stderr": 0.028458820991460302 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.409387222946545, + "acc_stderr": 0.012558780895570752, + "acc_norm": 0.409387222946545, + "acc_norm_stderr": 0.012558780895570752 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.03283472056108561, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.03283472056108561 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33047735618115054, + "mc1_stderr": 0.01646676961369829, + "mc2": 0.49260717941012344, + "mc2_stderr": 0.015517299276204722 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5218417945690673, + "acc_stderr": 0.017173944474294375, + "acc_norm": 0.5312868949232585, + "acc_norm_stderr": 0.017156666859785463 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JaeyeonKang/CCK-v1.2.0", + "model_sha": "0c19d8b2fac4de75e83d611adeb6c08b31402f81", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JaeyeonKang/CCK-v1.3.0-DPO/result_2024-01-14 01:09:49.json b/JaeyeonKang/CCK-v1.3.0-DPO/result_2024-01-14 01:09:49.json new file mode 100644 index 0000000000000000000000000000000000000000..d6b84307c769e4ebdda327b26e3be030d88775a5 --- /dev/null +++ b/JaeyeonKang/CCK-v1.3.0-DPO/result_2024-01-14 01:09:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.48378839590443684, + "acc_stderr": 0.014603708567414947, + "acc_norm": 0.5460750853242321, + "acc_norm_stderr": 0.014549221105171867 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45956980681139215, + "acc_stderr": 0.00497344206074162, + "acc_norm": 0.6365265883290181, + "acc_norm_stderr": 0.004800164434233249 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6513409961685823, + "acc_stderr": 0.01704124314349098, + "acc_norm": 0.6513409961685823, + "acc_norm_stderr": 0.01704124314349098 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.032600385118357715, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.032600385118357715 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6237942122186495, + "acc_stderr": 0.027513925683549427, + "acc_norm": 0.6237942122186495, + "acc_norm_stderr": 0.027513925683549427 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6868686868686869, + "acc_stderr": 0.033042050878136525, + "acc_norm": 0.6868686868686869, + "acc_norm_stderr": 0.033042050878136525 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929777, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6008403361344538, + "acc_stderr": 0.031811100324139245, + "acc_norm": 0.6008403361344538, + "acc_norm_stderr": 0.031811100324139245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5794871794871795, + "acc_stderr": 0.025028610276710862, + "acc_norm": 0.5794871794871795, + "acc_norm_stderr": 0.025028610276710862 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5774193548387097, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.5774193548387097, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417618, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417618 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5584905660377358, + "acc_stderr": 0.03056159042673184, + "acc_norm": 0.5584905660377358, + "acc_norm_stderr": 0.03056159042673184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066496, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935558, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935558 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41798941798941797, + "acc_stderr": 0.02540255550326091, + "acc_norm": 0.41798941798941797, + "acc_norm_stderr": 0.02540255550326091 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6049382716049383, + "acc_stderr": 0.02720111766692565, + "acc_norm": 0.6049382716049383, + "acc_norm_stderr": 0.02720111766692565 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7202072538860104, + "acc_stderr": 0.032396370467357036, + "acc_norm": 0.7202072538860104, + "acc_norm_stderr": 0.032396370467357036 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.046570472605949646, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.046570472605949646 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.673394495412844, + "acc_stderr": 0.020106990889937303, + "acc_norm": 0.673394495412844, + "acc_norm_stderr": 0.020106990889937303 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5081699346405228, + "acc_stderr": 0.02022513434305727, + "acc_norm": 0.5081699346405228, + "acc_norm_stderr": 0.02022513434305727 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4078014184397163, + "acc_stderr": 0.029316011776343555, + "acc_norm": 0.4078014184397163, + "acc_norm_stderr": 0.029316011776343555 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5324074074074074, + "acc_stderr": 0.034028015813589656, + "acc_norm": 0.5324074074074074, + "acc_norm_stderr": 0.034028015813589656 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3329608938547486, + "acc_stderr": 0.015761716178397556, + "acc_norm": 0.3329608938547486, + "acc_norm_stderr": 0.015761716178397556 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5220588235294118, + "acc_stderr": 0.03034326422421352, + "acc_norm": 0.5220588235294118, + "acc_norm_stderr": 0.03034326422421352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6040816326530613, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.6040816326530613, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7679324894514767, + "acc_stderr": 0.027479744550808524, + "acc_norm": 0.7679324894514767, + "acc_norm_stderr": 0.027479744550808524 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4322033898305085, + "acc_stderr": 0.01265229777711497, + "acc_norm": 0.4322033898305085, + "acc_norm_stderr": 0.01265229777711497 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03308611113236436, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03308611113236436 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7151515151515152, + "acc_stderr": 0.03524390844511781, + "acc_norm": 0.7151515151515152, + "acc_norm_stderr": 0.03524390844511781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.46511627906976744, + "mc1_stderr": 0.017460849975873962, + "mc2": 0.6250875981333179, + "mc2_stderr": 0.015880152200294977 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5985832349468713, + "acc_stderr": 0.01685290785872906, + "acc_norm": 0.6127508854781583, + "acc_norm_stderr": 0.01674757799164278 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JaeyeonKang/CCK-v1.3.0-DPO", + "model_sha": "86818a7076320a0d25d0374b0b6ea096bf4d3404", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JaeyeonKang/CCK-v1.4.0-DPO/result_2024-01-14 01:11:15.json b/JaeyeonKang/CCK-v1.4.0-DPO/result_2024-01-14 01:11:15.json new file mode 100644 index 0000000000000000000000000000000000000000..3f0d6a802a323a7021ad1f91e369b317e1eafd9f --- /dev/null +++ b/JaeyeonKang/CCK-v1.4.0-DPO/result_2024-01-14 01:11:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46245733788395904, + "acc_stderr": 0.014570144495075578, + "acc_norm": 0.5204778156996587, + "acc_norm_stderr": 0.014599131353035004 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4457279426409082, + "acc_stderr": 0.004960299952519394, + "acc_norm": 0.6199960167297351, + "acc_norm_stderr": 0.004843954338451443 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.648786717752235, + "acc_stderr": 0.017069982051499427, + "acc_norm": 0.648786717752235, + "acc_norm_stderr": 0.017069982051499427 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.032662042990646775, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.032662042990646775 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6205787781350482, + "acc_stderr": 0.02755994980234782, + "acc_norm": 0.6205787781350482, + "acc_norm_stderr": 0.02755994980234782 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5650224215246636, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.5650224215246636, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786753, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786753 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370332, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370332 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207763, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207763 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.031566630992154156, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.031566630992154156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5769230769230769, + "acc_stderr": 0.02504919787604236, + "acc_norm": 0.5769230769230769, + "acc_norm_stderr": 0.02504919787604236 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5870967741935483, + "acc_stderr": 0.028009138125400377, + "acc_norm": 0.5870967741935483, + "acc_norm_stderr": 0.028009138125400377 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.027778835904935427, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.027778835904935427 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.02813325257881563, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.02813325257881563 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.031343283582089536, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.031343283582089536 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.025542846817400513, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.025542846817400513 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5867052023121387, + "acc_stderr": 0.026511261369409244, + "acc_norm": 0.5867052023121387, + "acc_norm_stderr": 0.026511261369409244 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.595679012345679, + "acc_stderr": 0.027306625297327684, + "acc_norm": 0.595679012345679, + "acc_norm_stderr": 0.027306625297327684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7098445595854922, + "acc_stderr": 0.03275264467791516, + "acc_norm": 0.7098445595854922, + "acc_norm_stderr": 0.03275264467791516 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.046774730044912005, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.046774730044912005 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6642201834862386, + "acc_stderr": 0.02024808139675293, + "acc_norm": 0.6642201834862386, + "acc_norm_stderr": 0.02024808139675293 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5816993464052288, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.5816993464052288, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.511437908496732, + "acc_stderr": 0.02022254151561086, + "acc_norm": 0.511437908496732, + "acc_norm_stderr": 0.02022254151561086 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4078014184397163, + "acc_stderr": 0.029316011776343555, + "acc_norm": 0.4078014184397163, + "acc_norm_stderr": 0.029316011776343555 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875192, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875192 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.03395322726375797, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.03395322726375797 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30837988826815643, + "acc_stderr": 0.015445716910998879, + "acc_norm": 0.30837988826815643, + "acc_norm_stderr": 0.015445716910998879 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5073529411764706, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.5073529411764706, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6, + "acc_stderr": 0.031362502409358936, + "acc_norm": 0.6, + "acc_norm_stderr": 0.031362502409358936 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4152542372881356, + "acc_stderr": 0.012585471793400664, + "acc_norm": 0.4152542372881356, + "acc_norm_stderr": 0.012585471793400664 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.03283472056108561, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.03283472056108561 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.44063647490820074, + "mc1_stderr": 0.01737969755543745, + "mc2": 0.6120817623581549, + "mc2_stderr": 0.01578357530044301 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5985832349468713, + "acc_stderr": 0.01685290785872906, + "acc_norm": 0.6127508854781583, + "acc_norm_stderr": 0.01674757799164278 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JaeyeonKang/CCK-v1.4.0-DPO", + "model_sha": "2ab21e24f4b3533531d46ee202bd176dfd004a78", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JaeyeonKang/CCK-v2.0-DPO/result_2024-01-25 22:55:46.json b/JaeyeonKang/CCK-v2.0-DPO/result_2024-01-25 22:55:46.json new file mode 100644 index 0000000000000000000000000000000000000000..23d689190890ca7de38dc9a3c24f1c458c5a02a4 --- /dev/null +++ b/JaeyeonKang/CCK-v2.0-DPO/result_2024-01-25 22:55:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.45733788395904434, + "acc_stderr": 0.014558106543924063, + "acc_norm": 0.5179180887372014, + "acc_norm_stderr": 0.014602005585490976 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4643497311292571, + "acc_stderr": 0.004977081808179409, + "acc_norm": 0.6384186417048396, + "acc_norm_stderr": 0.004794764843685283 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.558109833971903, + "acc_stderr": 0.017758800534214407, + "acc_norm": 0.558109833971903, + "acc_norm_stderr": 0.017758800534214407 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.04203921040156278, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.04203921040156278 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.028333277109562797, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.028333277109562797 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6818181818181818, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.6818181818181818, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.04810840148082635, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.04810840148082635 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5798319327731093, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.5798319327731093, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.02533466708095489, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.02533466708095489 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5161290322580645, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.5161290322580645, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948485, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137285, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137285 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.027820214158594363, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.027820214158594363 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.035339990940656964, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.035339990940656964 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336938, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336938 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6036697247706422, + "acc_stderr": 0.02097146994790053, + "acc_norm": 0.6036697247706422, + "acc_norm_stderr": 0.02097146994790053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.02861462475280544, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.02861462475280544 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.042943408452120926, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.042943408452120926 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4395424836601307, + "acc_stderr": 0.020079420408087918, + "acc_norm": 0.4395424836601307, + "acc_norm_stderr": 0.020079420408087918 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40070921985815605, + "acc_stderr": 0.02923346574557309, + "acc_norm": 0.40070921985815605, + "acc_norm_stderr": 0.02923346574557309 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.03381200005643526, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.03381200005643526 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3340782122905028, + "acc_stderr": 0.015774911422381615, + "acc_norm": 0.3340782122905028, + "acc_norm_stderr": 0.015774911422381615 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47794117647058826, + "acc_stderr": 0.030343264224213528, + "acc_norm": 0.47794117647058826, + "acc_norm_stderr": 0.030343264224213528 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3644067796610169, + "acc_stderr": 0.01229169498305648, + "acc_norm": 0.3644067796610169, + "acc_norm_stderr": 0.01229169498305648 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5637254901960784, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.5637254901960784, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4602203182374541, + "mc1_stderr": 0.017448017223960884, + "mc2": 0.6361581041542254, + "mc2_stderr": 0.015832326258647025 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5182998819362455, + "acc_stderr": 0.017178836639177745, + "acc_norm": 0.5336481700118064, + "acc_norm_stderr": 0.017151384117131876 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JaeyeonKang/CCK-v2.0-DPO", + "model_sha": "84ceccea3e3cde7348a07f3e2bfb1f58b07b38ee", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JaeyeonKang/CCK-v3/result_2024-01-05 05:28:58.json b/JaeyeonKang/CCK-v3/result_2024-01-05 05:28:58.json new file mode 100644 index 0000000000000000000000000000000000000000..5620a4e38c27308b02397bd715592a4a6c404b00 --- /dev/null +++ b/JaeyeonKang/CCK-v3/result_2024-01-05 05:28:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35409556313993173, + "acc_stderr": 0.013975454122756557, + "acc_norm": 0.4249146757679181, + "acc_norm_stderr": 0.014445698968520767 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3908583947420832, + "acc_stderr": 0.004869455150933824, + "acc_norm": 0.563433578968333, + "acc_norm_stderr": 0.004949462563681344 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.037792759455032014, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.037792759455032014 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.04656147110012352, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.04656147110012352 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6283524904214559, + "acc_stderr": 0.017280802522133182, + "acc_norm": 0.6283524904214559, + "acc_norm_stderr": 0.017280802522133182 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04316378599511326, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04316378599511326 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5852090032154341, + "acc_stderr": 0.02798268045975956, + "acc_norm": 0.5852090032154341, + "acc_norm_stderr": 0.02798268045975956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.03348180017060306, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.03348180017060306 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.702020202020202, + "acc_stderr": 0.03258630383836556, + "acc_norm": 0.702020202020202, + "acc_norm_stderr": 0.03258630383836556 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5630252100840336, + "acc_stderr": 0.03221943636566197, + "acc_norm": 0.5630252100840336, + "acc_norm_stderr": 0.03221943636566197 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5102564102564102, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.5102564102564102, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199985, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199985 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.532258064516129, + "acc_stderr": 0.02838474778881334, + "acc_norm": 0.532258064516129, + "acc_norm_stderr": 0.02838474778881334 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924336, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924336 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5433962264150943, + "acc_stderr": 0.030656748696739438, + "acc_norm": 0.5433962264150943, + "acc_norm_stderr": 0.030656748696739438 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6865671641791045, + "acc_stderr": 0.03280188205348641, + "acc_norm": 0.6865671641791045, + "acc_norm_stderr": 0.03280188205348641 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41005291005291006, + "acc_stderr": 0.02533120243894443, + "acc_norm": 0.41005291005291006, + "acc_norm_stderr": 0.02533120243894443 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181617, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181617 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5751445086705202, + "acc_stderr": 0.026613350840261736, + "acc_norm": 0.5751445086705202, + "acc_norm_stderr": 0.026613350840261736 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5771604938271605, + "acc_stderr": 0.027487472980871595, + "acc_norm": 0.5771604938271605, + "acc_norm_stderr": 0.027487472980871595 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6373056994818653, + "acc_stderr": 0.034697137917043715, + "acc_norm": 0.6373056994818653, + "acc_norm_stderr": 0.034697137917043715 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6458715596330276, + "acc_stderr": 0.020504729013829114, + "acc_norm": 0.6458715596330276, + "acc_norm_stderr": 0.020504729013829114 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.042857142857142816, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.042857142857142816 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5620915032679739, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.5620915032679739, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.020200164564804588, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.020200164564804588 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.02904919034254346, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.02904919034254346 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.044939490686135404, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.044939490686135404 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.03395322726375798, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.03395322726375798 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2201117318435754, + "acc_stderr": 0.013856994024227179, + "acc_norm": 0.2201117318435754, + "acc_norm_stderr": 0.013856994024227179 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47794117647058826, + "acc_stderr": 0.030343264224213528, + "acc_norm": 0.47794117647058826, + "acc_norm_stderr": 0.030343264224213528 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7172995780590717, + "acc_stderr": 0.029312814153955924, + "acc_norm": 0.7172995780590717, + "acc_norm_stderr": 0.029312814153955924 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37614080834419816, + "acc_stderr": 0.0123722144305998, + "acc_norm": 0.37614080834419816, + "acc_norm_stderr": 0.0123722144305998 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6519607843137255, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.6519607843137255, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.036974422050315946, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.036974422050315946 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237035, + "mc2": 0.46444473625227906, + "mc2_stderr": 0.016132820386457677 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4085005903187721, + "acc_stderr": 0.016900062879427122, + "acc_norm": 0.51357733175915, + "acc_norm_stderr": 0.01718401506040145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JaeyeonKang/CCK-v3", + "model_sha": "2b1c13026f5b57ada825f937bbd9edd7142af375", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JaeyeonKang/CCK_Gony_v3/result_2024-01-25 22:03:56.json b/JaeyeonKang/CCK_Gony_v3/result_2024-01-25 22:03:56.json new file mode 100644 index 0000000000000000000000000000000000000000..a94c43b599733718778723fbb6d3fbffec476581 --- /dev/null +++ b/JaeyeonKang/CCK_Gony_v3/result_2024-01-25 22:03:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4232081911262799, + "acc_stderr": 0.014438036220848034, + "acc_norm": 0.4854948805460751, + "acc_norm_stderr": 0.014605241081370053 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43855805616411075, + "acc_stderr": 0.00495196413192131, + "acc_norm": 0.5791674965146385, + "acc_norm_stderr": 0.004926837572202163 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6140350877192983, + "acc_stderr": 0.03733756969066165, + "acc_norm": 0.6140350877192983, + "acc_norm_stderr": 0.03733756969066165 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.04689765937278132, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.04689765937278132 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5862068965517241, + "acc_stderr": 0.01761220408466376, + "acc_norm": 0.5862068965517241, + "acc_norm_stderr": 0.01761220408466376 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4851063829787234, + "acc_stderr": 0.03267151848924777, + "acc_norm": 0.4851063829787234, + "acc_norm_stderr": 0.03267151848924777 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.594855305466238, + "acc_stderr": 0.02788238379132596, + "acc_norm": 0.594855305466238, + "acc_norm_stderr": 0.02788238379132596 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.033456784227567773, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.033456784227567773 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.047240073523838876, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.047240073523838876 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5630252100840336, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.5630252100840336, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5666666666666667, + "acc_stderr": 0.0251246535258851, + "acc_norm": 0.5666666666666667, + "acc_norm_stderr": 0.0251246535258851 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4975369458128079, + "acc_stderr": 0.03517945038691063, + "acc_norm": 0.4975369458128079, + "acc_norm_stderr": 0.03517945038691063 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5612903225806452, + "acc_stderr": 0.02822949732031721, + "acc_norm": 0.5612903225806452, + "acc_norm_stderr": 0.02822949732031721 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.02514093595033543, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.02514093595033543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37407407407407406, + "acc_stderr": 0.02950286112895529, + "acc_norm": 0.37407407407407406, + "acc_norm_stderr": 0.02950286112895529 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.025699352832131796, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.025699352832131796 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6040462427745664, + "acc_stderr": 0.026329813341946243, + "acc_norm": 0.6040462427745664, + "acc_norm_stderr": 0.026329813341946243 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5524691358024691, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.5524691358024691, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6476683937823834, + "acc_stderr": 0.034474782864143565, + "acc_norm": 0.6476683937823834, + "acc_norm_stderr": 0.034474782864143565 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6587155963302752, + "acc_stderr": 0.020328612816592446, + "acc_norm": 0.6587155963302752, + "acc_norm_stderr": 0.020328612816592446 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.028304576673141114, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.028304576673141114 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.03941897526516303, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.03941897526516303 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6118421052631579, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.6118421052631579, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49836601307189543, + "acc_stderr": 0.020227726838150117, + "acc_norm": 0.49836601307189543, + "acc_norm_stderr": 0.020227726838150117 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41843971631205673, + "acc_stderr": 0.029427994039419994, + "acc_norm": 0.41843971631205673, + "acc_norm_stderr": 0.029427994039419994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31731843575418994, + "acc_stderr": 0.015566392630057027, + "acc_norm": 0.31731843575418994, + "acc_norm_stderr": 0.015566392630057027 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.03027332507734575, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.03027332507734575 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.673469387755102, + "acc_stderr": 0.030021056238440313, + "acc_norm": 0.673469387755102, + "acc_norm_stderr": 0.030021056238440313 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6919831223628692, + "acc_stderr": 0.030052389335605688, + "acc_norm": 0.6919831223628692, + "acc_norm_stderr": 0.030052389335605688 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41590612777053454, + "acc_stderr": 0.012588323850313613, + "acc_norm": 0.41590612777053454, + "acc_norm_stderr": 0.012588323850313613 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.03364487286088299, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.03364487286088299 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.40514075887392903, + "mc1_stderr": 0.017185611727753368, + "mc2": 0.572169800530284, + "mc2_stderr": 0.01661482568785123 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5218417945690673, + "acc_stderr": 0.017173944474294375, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.01716156394991635 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JaeyeonKang/CCK_Gony_v3", + "model_sha": "73b5302f1efc7ba87e123cfed0c9c998e098c16a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/JaeyeonKang/CCK_gony/result_2024-01-24 01:05:22.json b/JaeyeonKang/CCK_gony/result_2024-01-24 01:05:22.json new file mode 100644 index 0000000000000000000000000000000000000000..ee830e11ecaa79700502cc73c94668ff81951c52 --- /dev/null +++ b/JaeyeonKang/CCK_gony/result_2024-01-24 01:05:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4052901023890785, + "acc_stderr": 0.014346869060229321, + "acc_norm": 0.48890784982935154, + "acc_norm_stderr": 0.014607794914013044 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4147580163314081, + "acc_stderr": 0.004916733258140295, + "acc_norm": 0.5487950607448715, + "acc_norm_stderr": 0.004965963647210315 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5440613026819924, + "acc_stderr": 0.017810403925435345, + "acc_norm": 0.5440613026819924, + "acc_norm_stderr": 0.017810403925435345 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480864, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480864 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.032436186361081025, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.032436186361081025 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5498392282958199, + "acc_stderr": 0.02825666072336018, + "acc_norm": 0.5498392282958199, + "acc_norm_stderr": 0.02825666072336018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.02534267129380725, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.02534267129380725 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110224, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110224 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5129032258064516, + "acc_stderr": 0.02843453315268187, + "acc_norm": 0.5129032258064516, + "acc_norm_stderr": 0.02843453315268187 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.027046857630716667, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.027046857630716667 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808093, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808093 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333335, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333335 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.025355741263055266, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.025355741263055266 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.026830805998952247, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.026830805998952247 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5216049382716049, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.5216049382716049, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583703, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583703 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5779816513761468, + "acc_stderr": 0.02117499140776317, + "acc_norm": 0.5779816513761468, + "acc_norm_stderr": 0.02117499140776317 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5424836601307189, + "acc_stderr": 0.028526383452142638, + "acc_norm": 0.5424836601307189, + "acc_norm_stderr": 0.028526383452142638 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.03984979653302872, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.03984979653302872 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4395424836601307, + "acc_stderr": 0.020079420408087915, + "acc_norm": 0.4395424836601307, + "acc_norm_stderr": 0.020079420408087915 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.028838921471251458, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.028838921471251458 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.033953227263757976, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.033953227263757976 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.014655780837497736, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.014655780837497736 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6612244897959184, + "acc_stderr": 0.030299506562154185, + "acc_norm": 0.6612244897959184, + "acc_norm_stderr": 0.030299506562154185 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030685820596610822, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030685820596610822 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36897001303780963, + "acc_stderr": 0.012323936650174868, + "acc_norm": 0.36897001303780963, + "acc_norm_stderr": 0.012323936650174868 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.01582614243950234, + "mc2": 0.444630445301053, + "mc2_stderr": 0.015630530201548653 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42857142857142855, + "acc_stderr": 0.017014038119297487, + "acc_norm": 0.4498229043683589, + "acc_norm_stderr": 0.01710357334382571 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "JaeyeonKang/CCK_gony", + "model_sha": "b4aabee75e463b0a4caead1ac27b7fa13e16ffdd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jenti-Kaeri/ko-llama2-13b-OrcaPlatypus/result_2023-11-06 07:34:14.json b/Jenti-Kaeri/ko-llama2-13b-OrcaPlatypus/result_2023-11-06 07:34:14.json new file mode 100644 index 0000000000000000000000000000000000000000..7a6cc48abb238803e71b1c452511f4e9446d4eea --- /dev/null +++ b/Jenti-Kaeri/ko-llama2-13b-OrcaPlatypus/result_2023-11-06 07:34:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36860068259385664, + "acc_stderr": 0.014097810678042194, + "acc_norm": 0.4249146757679181, + "acc_norm_stderr": 0.014445698968520769 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40659231228838877, + "acc_stderr": 0.00490193651154613, + "acc_norm": 0.5416251742680741, + "acc_norm_stderr": 0.004972460206842306 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5070242656449553, + "acc_stderr": 0.017878199003432214, + "acc_norm": 0.5070242656449553, + "acc_norm_stderr": 0.017878199003432214 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197598, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197598 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37948717948717947, + "acc_stderr": 0.024603626924097413, + "acc_norm": 0.37948717948717947, + "acc_norm_stderr": 0.024603626924097413 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.594017094017094, + "acc_stderr": 0.03217180182641086, + "acc_norm": 0.594017094017094, + "acc_norm_stderr": 0.03217180182641086 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.0305032920133426, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.0305032920133426 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.02794045713622841, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02794045713622841 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.023865206836972592, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.023865206836972592 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.026636539741116072, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.026636539741116072 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.02775653525734767, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.02775653525734767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272436, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46972477064220186, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.46972477064220186, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883034, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883034 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.040516463428741406, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.040516463428741406 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31209150326797386, + "acc_stderr": 0.018745011201277657, + "acc_norm": 0.31209150326797386, + "acc_norm_stderr": 0.018745011201277657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169945, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169945 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.025336848563332338, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.025336848563332338 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29726205997392435, + "acc_stderr": 0.011673346173086048, + "acc_norm": 0.29726205997392435, + "acc_norm_stderr": 0.011673346173086048 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.034602283272391704, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.034602283272391704 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834559, + "mc2": 0.4313245637601363, + "mc2_stderr": 0.01494158153176466 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44510035419126326, + "acc_stderr": 0.017086417431005474, + "acc_norm": 0.5360094451003542, + "acc_norm_stderr": 0.017145715365486664 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jenti-Kaeri/ko-llama2-13b-OrcaPlatypus", + "model_sha": "80952bf913ab217ee77ee0328f3c9e68cc1abf22", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jenti-Kaeri/ko-llama2-13b-platypus/result_2023-11-06 11:48:16.json b/Jenti-Kaeri/ko-llama2-13b-platypus/result_2023-11-06 11:48:16.json new file mode 100644 index 0000000000000000000000000000000000000000..f7666ce48b813438ef3bba1ed27f43129ebb30f9 --- /dev/null +++ b/Jenti-Kaeri/ko-llama2-13b-platypus/result_2023-11-06 11:48:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3199658703071672, + "acc_stderr": 0.013631345807016195, + "acc_norm": 0.3779863481228669, + "acc_norm_stderr": 0.014169664520303101 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3586934873531169, + "acc_stderr": 0.004786368011500456, + "acc_norm": 0.4553873730332603, + "acc_norm_stderr": 0.0049698795328430865 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365778, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365778 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44189016602809705, + "acc_stderr": 0.01775880053421442, + "acc_norm": 0.44189016602809705, + "acc_norm_stderr": 0.01775880053421442 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3890675241157556, + "acc_stderr": 0.027690337536485372, + "acc_norm": 0.3890675241157556, + "acc_norm_stderr": 0.027690337536485372 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.031602951437766785, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.031602951437766785 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4595959595959596, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.4595959595959596, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.02478431694215637, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.02478431694215637 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233485, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4064516129032258, + "acc_stderr": 0.027941727346256315, + "acc_norm": 0.4064516129032258, + "acc_norm_stderr": 0.027941727346256315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5470085470085471, + "acc_stderr": 0.03261099873098619, + "acc_norm": 0.5470085470085471, + "acc_norm_stderr": 0.03261099873098619 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.02357760479165581, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.02357760479165581 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.026803720583206188, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.026803720583206188 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027125115513166865, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027125115513166865 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42752293577981654, + "acc_stderr": 0.02121091020430043, + "acc_norm": 0.42752293577981654, + "acc_norm_stderr": 0.02121091020430043 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791434, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275908, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275908 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.02678917235114023, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.02678917235114023 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347019, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347019 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569746, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842538, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842538 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2790091264667536, + "acc_stderr": 0.011455208832803538, + "acc_norm": 0.2790091264667536, + "acc_norm_stderr": 0.011455208832803538 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373616, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373616 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.0372820699868265, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.0372820699868265 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.46212361951327446, + "mc2_stderr": 0.015249362527618285 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3955135773317591, + "acc_stderr": 0.016810815902206046, + "acc_norm": 0.4757969303423849, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jenti-Kaeri/ko-llama2-13b-platypus", + "model_sha": "f20decdd9d1525560ce299352c7ee6421c5ec81d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jsoo/Llama3-beomi-Open-Ko-8B-Instruct-preview-test6/result_2024-06-05 08:36:41.json b/Jsoo/Llama3-beomi-Open-Ko-8B-Instruct-preview-test6/result_2024-06-05 08:36:41.json new file mode 100644 index 0000000000000000000000000000000000000000..9b285376d16a91f4d5f74920f45a1db65013af65 --- /dev/null +++ b/Jsoo/Llama3-beomi-Open-Ko-8B-Instruct-preview-test6/result_2024-06-05 08:36:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.22610921501706485, + "acc_stderr": 0.012224202097063265, + "acc_norm": 0.2773037542662116, + "acc_norm_stderr": 0.013082095839059376 + }, + "harness|ko_hellaswag|10": { + "acc": 0.291575383389763, + "acc_stderr": 0.0045355897592026535, + "acc_norm": 0.33260306711810395, + "acc_norm_stderr": 0.004701828071992637 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3391812865497076, + "acc_stderr": 0.03631053496488905, + "acc_norm": 0.3391812865497076, + "acc_norm_stderr": 0.03631053496488905 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.0398913985953177, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.0398913985953177 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.30779054916985954, + "acc_stderr": 0.016506045045155633, + "acc_norm": 0.30779054916985954, + "acc_norm_stderr": 0.016506045045155633 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.028957342788342347, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.028957342788342347 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.0357160923005348, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.0357160923005348 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3086816720257235, + "acc_stderr": 0.026236965881153266, + "acc_norm": 0.3086816720257235, + "acc_norm_stderr": 0.026236965881153266 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.1919191919191919, + "acc_stderr": 0.02805779167298901, + "acc_norm": 0.1919191919191919, + "acc_norm_stderr": 0.02805779167298901 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.028510251512341937, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.028510251512341937 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2512820512820513, + "acc_stderr": 0.021992016662370547, + "acc_norm": 0.2512820512820513, + "acc_norm_stderr": 0.021992016662370547 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24193548387096775, + "acc_stderr": 0.0243625996930311, + "acc_norm": 0.24193548387096775, + "acc_norm_stderr": 0.0243625996930311 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4017094017094017, + "acc_stderr": 0.03211693751051622, + "acc_norm": 0.4017094017094017, + "acc_norm_stderr": 0.03211693751051622 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.026199808807561925, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.026199808807561925 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721375, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721375 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276612, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276612 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.021851509822031708, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.021851509822031708 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.024105712607754307, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.024105712607754307 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.024383665531035454, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.024383665531035454 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909902, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909902 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.2018348623853211, + "acc_stderr": 0.017208579357787572, + "acc_norm": 0.2018348623853211, + "acc_norm_stderr": 0.017208579357787572 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.035670166752768635, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.035670166752768635 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.238562091503268, + "acc_stderr": 0.02440439492808787, + "acc_norm": 0.238562091503268, + "acc_norm_stderr": 0.02440439492808787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.041733491480835, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.041733491480835 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2630718954248366, + "acc_stderr": 0.017812676542320653, + "acc_norm": 0.2630718954248366, + "acc_norm_stderr": 0.017812676542320653 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590627, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590627 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915206, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915206 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.027467401804057993, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.027467401804057993 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1948529411764706, + "acc_stderr": 0.024060599423487424, + "acc_norm": 0.1948529411764706, + "acc_norm_stderr": 0.024060599423487424 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29957805907172996, + "acc_stderr": 0.029818024749753102, + "acc_norm": 0.29957805907172996, + "acc_norm_stderr": 0.029818024749753102 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26401564537157757, + "acc_stderr": 0.011258435537723821, + "acc_norm": 0.26401564537157757, + "acc_norm_stderr": 0.011258435537723821 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501943, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501943 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03477691162163659, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03477691162163659 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.3974651112099813, + "mc2_stderr": 0.015664751550314805 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27744982290436837, + "acc_stderr": 0.015393630236605971, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.01627295299701913 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jsoo/Llama3-beomi-Open-Ko-8B-Instruct-preview-test6", + "model_sha": "378d00041e2d36454baad215ed7066172a25d956", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jsoo/Saltware-solar-10.7b-v1.0/result_2024-06-05 08:34:02.json b/Jsoo/Saltware-solar-10.7b-v1.0/result_2024-06-05 08:34:02.json new file mode 100644 index 0000000000000000000000000000000000000000..63e7758e18eea05738c6cd856639b1832d512efe --- /dev/null +++ b/Jsoo/Saltware-solar-10.7b-v1.0/result_2024-06-05 08:34:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34726962457337884, + "acc_stderr": 0.013913034529620448, + "acc_norm": 0.40187713310580203, + "acc_norm_stderr": 0.014327268614578278 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38966341366261703, + "acc_stderr": 0.004866772373029929, + "acc_norm": 0.4992033459470225, + "acc_norm_stderr": 0.004989775077835655 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.545338441890166, + "acc_stderr": 0.017806304585052595, + "acc_norm": 0.545338441890166, + "acc_norm_stderr": 0.017806304585052595 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996793, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996793 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.0387862677100236, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.0387862677100236 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.0323854694875898, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.0323854694875898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5129032258064516, + "acc_stderr": 0.02843453315268187, + "acc_norm": 0.5129032258064516, + "acc_norm_stderr": 0.02843453315268187 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809446, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809446 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006114, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006114 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.0478200179138006, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.0478200179138006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871934, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871934 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119996, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119996 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.03765746693865152, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.03765746693865152 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.02450877752102842, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.02450877752102842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5, + "acc_stderr": 0.02782074420373286, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02782074420373286 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5724770642201835, + "acc_stderr": 0.021210910204300437, + "acc_norm": 0.5724770642201835, + "acc_norm_stderr": 0.021210910204300437 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04426266681379909, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04426266681379909 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805413, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.04060127035236395, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.04060127035236395 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.020062874243539128, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.020062874243539128 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.03381200005643527, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.03381200005643527 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26927374301675977, + "acc_stderr": 0.014835616582882594, + "acc_norm": 0.26927374301675977, + "acc_norm_stderr": 0.014835616582882594 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933105, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37027379400260757, + "acc_stderr": 0.012332930781256725, + "acc_norm": 0.37027379400260757, + "acc_norm_stderr": 0.012332930781256725 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03815494308688931, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03815494308688931 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627887, + "mc2": 0.44433902487660376, + "mc2_stderr": 0.016217425444196683 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6009445100354192, + "acc_stderr": 0.016836377292849303, + "acc_norm": 0.6056670602125147, + "acc_norm_stderr": 0.016802090674893216 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jsoo/Saltware-solar-10.7b-v1.0", + "model_sha": "562f9338e738fb9e417553453dc90df684a138d7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jsoo/solar-ko-common-merged/result_2024-06-23 13:17:07.json b/Jsoo/solar-ko-common-merged/result_2024-06-23 13:17:07.json new file mode 100644 index 0000000000000000000000000000000000000000..11e785c4a7f168a5e1d7e008bd99367e4e18df83 --- /dev/null +++ b/Jsoo/solar-ko-common-merged/result_2024-06-23 13:17:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910464, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955269 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4004182433778132, + "acc_stderr": 0.0048898174897396935, + "acc_norm": 0.5145389364668392, + "acc_norm_stderr": 0.00498767147864094 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.0177122289392998, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.0177122289392998 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.028333277109562786, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.028333277109562786 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.02531063925493391, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.02531063925493391 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.0336612448905145, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.0336612448905145 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456645, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456645 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114993, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114993 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.03419832608176007, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.03419832608176007 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.037940126746970296, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.037940126746970296 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851088, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851088 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6528497409326425, + "acc_stderr": 0.03435696168361355, + "acc_norm": 0.6528497409326425, + "acc_norm_stderr": 0.03435696168361355 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958215, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958215 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5651376146788991, + "acc_stderr": 0.021254631465609283, + "acc_norm": 0.5651376146788991, + "acc_norm_stderr": 0.021254631465609283 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4493464052287582, + "acc_stderr": 0.02012376652802727, + "acc_norm": 0.4493464052287582, + "acc_norm_stderr": 0.02012376652802727 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966342, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966342 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.029624663581159696, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.029624663581159696 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.03165867806410668, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.03165867806410668 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3748370273794003, + "acc_stderr": 0.012363652467551917, + "acc_norm": 0.3748370273794003, + "acc_norm_stderr": 0.012363652467551917 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.034760990605016355, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.034760990605016355 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070262, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070262 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3219094247246022, + "mc1_stderr": 0.01635556761196039, + "mc2": 0.4696226239246584, + "mc2_stderr": 0.016555383011879664 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4510035419126328, + "acc_stderr": 0.017107618859549346, + "acc_norm": 0.45336481700118064, + "acc_norm_stderr": 0.01711541822522687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jsoo/solar-ko-common-merged", + "model_sha": "a19a4fbd6d8870bbeb1268ea6447c18979187ee7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Jsoo/solar-ko-common-slerp/result_2024-06-25 23:28:07.json b/Jsoo/solar-ko-common-slerp/result_2024-06-25 23:28:07.json new file mode 100644 index 0000000000000000000000000000000000000000..15615aa30d62ad80ba77655b0d0afeaaffec6c46 --- /dev/null +++ b/Jsoo/solar-ko-common-slerp/result_2024-06-25 23:28:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3609215017064846, + "acc_stderr": 0.01403476138617545, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.014494421584256527 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38329018123879705, + "acc_stderr": 0.004851944170671265, + "acc_norm": 0.4963154750049791, + "acc_norm_stderr": 0.004989645929811445 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.037792759455032014, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.037792759455032014 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5836526181353767, + "acc_stderr": 0.017627948030430298, + "acc_norm": 0.5836526181353767, + "acc_norm_stderr": 0.017627948030430298 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685517, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685517 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6616161616161617, + "acc_stderr": 0.03371124142626303, + "acc_norm": 0.6616161616161617, + "acc_norm_stderr": 0.03371124142626303 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.041641887201693775, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.041641887201693775 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.025342671293807247, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.025342671293807247 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5225806451612903, + "acc_stderr": 0.02841498501970786, + "acc_norm": 0.5225806451612903, + "acc_norm_stderr": 0.02841498501970786 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809444, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809444 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723456, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723456 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871934, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871934 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.038118909889404126, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.038118909889404126 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5679012345679012, + "acc_stderr": 0.027563010971606672, + "acc_norm": 0.5679012345679012, + "acc_norm_stderr": 0.027563010971606672 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.689119170984456, + "acc_stderr": 0.03340361906276585, + "acc_norm": 0.689119170984456, + "acc_norm_stderr": 0.03340361906276585 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.04598188057816542, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.04598188057816542 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6201834862385321, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.6201834862385321, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249034, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249034 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.020196594933541194, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.020196594933541194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.02889395541211589, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.02889395541211589 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.033888571185023246, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.033888571185023246 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.288268156424581, + "acc_stderr": 0.015149132860209436, + "acc_norm": 0.288268156424581, + "acc_norm_stderr": 0.015149132860209436 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47794117647058826, + "acc_stderr": 0.030343264224213528, + "acc_norm": 0.47794117647058826, + "acc_norm_stderr": 0.030343264224213528 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.030862144921087555, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.030862144921087555 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.679324894514768, + "acc_stderr": 0.030381931949990403, + "acc_norm": 0.679324894514768, + "acc_norm_stderr": 0.030381931949990403 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38852672750977835, + "acc_stderr": 0.012448817838292367, + "acc_norm": 0.38852672750977835, + "acc_norm_stderr": 0.012448817838292367 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35495716034271724, + "mc1_stderr": 0.0167508623813759, + "mc2": 0.5235708947419819, + "mc2_stderr": 0.016760407206789008 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46162927981109797, + "acc_stderr": 0.01713966022184555, + "acc_norm": 0.4899645808736718, + "acc_norm_stderr": 0.01718689128689406 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Jsoo/solar-ko-common-slerp", + "model_sha": "3e33fda95193c74da1d6fbc0e650c4c2fdff4430", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Junmai/KIT-5.8b/result_2023-11-01 12:58:40.json b/Junmai/KIT-5.8b/result_2023-11-01 12:58:40.json new file mode 100644 index 0000000000000000000000000000000000000000..149e8a751e209a16597d56ee46c43985c606c5c5 --- /dev/null +++ b/Junmai/KIT-5.8b/result_2023-11-01 12:58:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2593856655290102, + "acc_stderr": 0.01280827357392708, + "acc_norm": 0.2841296928327645, + "acc_norm_stderr": 0.013179442447653887 + }, + "harness|ko_hellaswag|10": { + "acc": 0.359788886675961, + "acc_stderr": 0.004789575163418654, + "acc_norm": 0.4523003385779725, + "acc_norm_stderr": 0.004967023435680013 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.030267457554898465, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.030267457554898465 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.04541609446503948, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.04541609446503948 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20051085568326948, + "acc_stderr": 0.014317653708594209, + "acc_norm": 0.20051085568326948, + "acc_norm_stderr": 0.014317653708594209 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.037857144650666544, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.037857144650666544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838742, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838742 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.23493975903614459, + "acc_stderr": 0.03300533186128922, + "acc_norm": 0.23493975903614459, + "acc_norm_stderr": 0.03300533186128922 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.1031390134529148, + "acc_stderr": 0.020412564289839272, + "acc_norm": 0.1031390134529148, + "acc_norm_stderr": 0.020412564289839272 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358611, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358611 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.025819233256483727, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.025819233256483727 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3169811320754717, + "acc_stderr": 0.02863723563980091, + "acc_norm": 0.3169811320754717, + "acc_norm_stderr": 0.02863723563980091 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712163, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712163 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2328042328042328, + "acc_stderr": 0.02176596167215453, + "acc_norm": 0.2328042328042328, + "acc_norm_stderr": 0.02176596167215453 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757177, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.03351953879521272, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.03351953879521272 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3486238532110092, + "acc_stderr": 0.020431254090714328, + "acc_norm": 0.3486238532110092, + "acc_norm_stderr": 0.020431254090714328 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.02582916327275748, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.02582916327275748 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.19834710743801653, + "acc_stderr": 0.03640118271990947, + "acc_norm": 0.19834710743801653, + "acc_norm_stderr": 0.03640118271990947 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.037610708698674805, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.037610708698674805 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953776, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953776 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3224489795918367, + "acc_stderr": 0.029923100563683903, + "acc_norm": 0.3224489795918367, + "acc_norm_stderr": 0.029923100563683903 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2109704641350211, + "acc_stderr": 0.02655837250266192, + "acc_norm": 0.2109704641350211, + "acc_norm_stderr": 0.02655837250266192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24315514993481094, + "acc_stderr": 0.010956556654417358, + "acc_norm": 0.24315514993481094, + "acc_norm_stderr": 0.010956556654417358 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707693, + "mc2": 0.40433266036479987, + "mc2_stderr": 0.014934839595516874 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29279811097992914, + "acc_stderr": 0.015644823205401334, + "acc_norm": 0.3907910271546635, + "acc_norm_stderr": 0.01677529846510826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Junmai/KIT-5.8b", + "model_sha": "92023e894134ae843de46164fb793a8641447785", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Junmai/KIT-7B-v1/result_2023-11-07 06:23:03.json b/Junmai/KIT-7B-v1/result_2023-11-07 06:23:03.json new file mode 100644 index 0000000000000000000000000000000000000000..5a17deb2872b6721a6022708ac936ff2448f9e19 --- /dev/null +++ b/Junmai/KIT-7B-v1/result_2023-11-07 06:23:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19112627986348124, + "acc_stderr": 0.0114900552927786, + "acc_norm": 0.23890784982935154, + "acc_norm_stderr": 0.012461071376316621 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25253933479386576, + "acc_stderr": 0.0043358096144803055, + "acc_norm": 0.2394941246763593, + "acc_norm_stderr": 0.004259025448541511 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.035650796707083106, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.035650796707083106 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2541507024265645, + "acc_stderr": 0.01556925469204578, + "acc_norm": 0.2541507024265645, + "acc_norm_stderr": 0.01556925469204578 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386684, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.03329394119073528, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.03329394119073528 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.18971061093247588, + "acc_stderr": 0.02226819625878322, + "acc_norm": 0.18971061093247588, + "acc_norm_stderr": 0.02226819625878322 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.32286995515695066, + "acc_stderr": 0.03138147637575498, + "acc_norm": 0.32286995515695066, + "acc_norm_stderr": 0.03138147637575498 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2878787878787879, + "acc_stderr": 0.03225883512300992, + "acc_norm": 0.2878787878787879, + "acc_norm_stderr": 0.03225883512300992 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931666, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931666 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21182266009852216, + "acc_stderr": 0.028748983689941048, + "acc_norm": 0.21182266009852216, + "acc_norm_stderr": 0.028748983689941048 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.02606936229533513, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02606936229533513 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.20754716981132076, + "acc_stderr": 0.024959918028911274, + "acc_norm": 0.20754716981132076, + "acc_norm_stderr": 0.024959918028911274 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.041723430387053825, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.041723430387053825 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21957671957671956, + "acc_stderr": 0.021320018599770372, + "acc_norm": 0.21957671957671956, + "acc_norm_stderr": 0.021320018599770372 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.02335736578587403, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.02335736578587403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.034089978868575295, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.034089978868575295 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.022409674547304175, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.022409674547304175 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565319, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565319 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26972477064220185, + "acc_stderr": 0.01902848671111545, + "acc_norm": 0.26972477064220185, + "acc_norm_stderr": 0.01902848671111545 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.20261437908496732, + "acc_stderr": 0.023015446877985672, + "acc_norm": 0.20261437908496732, + "acc_norm_stderr": 0.023015446877985672 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.03690677986137282, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.03690677986137282 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.017479487001364764, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.017479487001364764 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23049645390070922, + "acc_stderr": 0.025123739226872405, + "acc_norm": 0.23049645390070922, + "acc_norm_stderr": 0.025123739226872405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.04327040932578728, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.04327040932578728 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3306122448979592, + "acc_stderr": 0.03011642629654057, + "acc_norm": 0.3306122448979592, + "acc_norm_stderr": 0.03011642629654057 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594703, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594703 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2503259452411995, + "acc_stderr": 0.011064151027165433, + "acc_norm": 0.2503259452411995, + "acc_norm_stderr": 0.011064151027165433 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.029331162294251735, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.029331162294251735 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2, + "acc_stderr": 0.03123475237772118, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03123475237772118 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.01572313952460875, + "mc2": 0.4606951019662925, + "mc2_stderr": 0.015545587074280528 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.06729634002361275, + "acc_stderr": 0.00861355401775773, + "acc_norm": 0.27390791027154665, + "acc_norm_stderr": 0.01533249947479102 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Junmai/KIT-7B-v1", + "model_sha": "fc0e83e20d93b8bfb763205022dee78f36e01e60", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Junmai/KIT-7B-v2/result_2023-11-07 06:54:57.json b/Junmai/KIT-7B-v2/result_2023-11-07 06:54:57.json new file mode 100644 index 0000000000000000000000000000000000000000..26a4104ace84311415d0137804bb730001971313 --- /dev/null +++ b/Junmai/KIT-7B-v2/result_2023-11-07 06:54:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.17406143344709898, + "acc_stderr": 0.011080177129482213, + "acc_norm": 0.23464163822525597, + "acc_norm_stderr": 0.01238387356076867 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2528380800637323, + "acc_stderr": 0.004337506344899915, + "acc_norm": 0.23889663413662618, + "acc_norm_stderr": 0.004255380050015134 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.015818450894777576, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.015818450894777576 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2, + "acc_stderr": 0.03455473702325436, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03455473702325436 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.20257234726688103, + "acc_stderr": 0.022827317491059682, + "acc_norm": 0.20257234726688103, + "acc_norm_stderr": 0.022827317491059682 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134988, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134988 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082395, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082395 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20707070707070707, + "acc_stderr": 0.028869778460267045, + "acc_norm": 0.20707070707070707, + "acc_norm_stderr": 0.028869778460267045 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18719211822660098, + "acc_stderr": 0.027444924966882618, + "acc_norm": 0.18719211822660098, + "acc_norm_stderr": 0.027444924966882618 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764805, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764805 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.02544786382510862, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.02544786382510862 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721375, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721375 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.032578473844367746, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.032578473844367746 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22388059701492538, + "acc_stderr": 0.029475250236017197, + "acc_norm": 0.22388059701492538, + "acc_norm_stderr": 0.029475250236017197 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.030631145539198816, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.030631145539198816 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.0329109957861577, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.0329109957861577 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.02357688174400571, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.02357688174400571 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.017923087667803053, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.017923087667803053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790607, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790607 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023805186524888142, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023805186524888142 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.03823428969926605, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.03823428969926605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.016992723465466226, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.016992723465466226 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729906, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729906 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.0443280405529152, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.0443280405529152 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.014102223623152593, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.014102223623152593 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3183673469387755, + "acc_stderr": 0.02982253379398209, + "acc_norm": 0.3183673469387755, + "acc_norm_stderr": 0.02982253379398209 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460302, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460302 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.029331162294251735, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.029331162294251735 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.20606060606060606, + "acc_stderr": 0.0315841532404771, + "acc_norm": 0.20606060606060606, + "acc_norm_stderr": 0.0315841532404771 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087304, + "mc2": 0.4632137219137931, + "mc2_stderr": 0.015329765440912904 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.07319952774498228, + "acc_stderr": 0.008954927647725423, + "acc_norm": 0.28689492325855964, + "acc_norm_stderr": 0.015550809966781778 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Junmai/KIT-7B-v2", + "model_sha": "20725892ef31e719453654c06747f48934ccdd57", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Junmai/KIT-7B-v3/result_2023-11-09 02:14:48.json b/Junmai/KIT-7B-v3/result_2023-11-09 02:14:48.json new file mode 100644 index 0000000000000000000000000000000000000000..700975749a3237fa6e87084a494bbae04013a689 --- /dev/null +++ b/Junmai/KIT-7B-v3/result_2023-11-09 02:14:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21416382252559726, + "acc_stderr": 0.011988383205966494, + "acc_norm": 0.26535836177474403, + "acc_norm_stderr": 0.012902554762313964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.253734315873332, + "acc_stderr": 0.004342580277662732, + "acc_norm": 0.2401911969727146, + "acc_norm_stderr": 0.004263263933601555 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3300970873786408, + "acc_stderr": 0.046561471100123514, + "acc_norm": 0.3300970873786408, + "acc_norm_stderr": 0.046561471100123514 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.227330779054917, + "acc_stderr": 0.014987270640946015, + "acc_norm": 0.227330779054917, + "acc_norm_stderr": 0.014987270640946015 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174023, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174023 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.1829787234042553, + "acc_stderr": 0.02527604100044997, + "acc_norm": 0.1829787234042553, + "acc_norm_stderr": 0.02527604100044997 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.15060240963855423, + "acc_stderr": 0.02784386378726433, + "acc_norm": 0.15060240963855423, + "acc_norm_stderr": 0.02784386378726433 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2508038585209003, + "acc_stderr": 0.024619771956697165, + "acc_norm": 0.2508038585209003, + "acc_norm_stderr": 0.024619771956697165 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.16591928251121077, + "acc_stderr": 0.02496755319654716, + "acc_norm": 0.16591928251121077, + "acc_norm_stderr": 0.02496755319654716 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.0384487613978527, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.0384487613978527 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036843, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036843 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.03383201223244442, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.03383201223244442 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924812, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924812 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2815126050420168, + "acc_stderr": 0.029213549414372177, + "acc_norm": 0.2815126050420168, + "acc_norm_stderr": 0.029213549414372177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.0231193627582323, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.0231193627582323 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.031618563353586086, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.031618563353586086 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.025988500792411898, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.025988500792411898 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.20512820512820512, + "acc_stderr": 0.026453508054040335, + "acc_norm": 0.20512820512820512, + "acc_norm_stderr": 0.026453508054040335 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.027495663683724064, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.027495663683724064 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878284, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878284 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145654, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145654 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.19900497512437812, + "acc_stderr": 0.02823136509275841, + "acc_norm": 0.19900497512437812, + "acc_norm_stderr": 0.02823136509275841 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.02335736578587404, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.02335736578587404 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02378858355165854, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02378858355165854 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30458715596330277, + "acc_stderr": 0.019732299420354038, + "acc_norm": 0.30458715596330277, + "acc_norm_stderr": 0.019732299420354038 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242557, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242557 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.19008264462809918, + "acc_stderr": 0.035817969517092825, + "acc_norm": 0.19008264462809918, + "acc_norm_stderr": 0.035817969517092825 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.038035102483515854, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.038035102483515854 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.01747948700136476, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.01747948700136476 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340461004, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340461004 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.15178571428571427, + "acc_stderr": 0.03405702838185693, + "acc_norm": 0.15178571428571427, + "acc_norm_stderr": 0.03405702838185693 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631296, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631296 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2681564245810056, + "acc_stderr": 0.014816119635317003, + "acc_norm": 0.2681564245810056, + "acc_norm_stderr": 0.014816119635317003 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.26838235294117646, + "acc_stderr": 0.02691748122437722, + "acc_norm": 0.26838235294117646, + "acc_norm_stderr": 0.02691748122437722 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2897959183673469, + "acc_stderr": 0.029043088683304328, + "acc_norm": 0.2897959183673469, + "acc_norm_stderr": 0.029043088683304328 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2489451476793249, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.2489451476793249, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.035465630196243346, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.035465630196243346 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.01489627744104183, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.0885478158205431, + "acc_stderr": 0.00976721370275642, + "acc_norm": 0.19952774498229045, + "acc_norm_stderr": 0.013740090947621325 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Junmai/KIT-7B-v3", + "model_sha": "17167805a31f62fa72d3a5c4dc2abf7201a3395d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Junmai/KIT-7b-v2/result_2023-11-07 06:26:48.json b/Junmai/KIT-7b-v2/result_2023-11-07 06:26:48.json new file mode 100644 index 0000000000000000000000000000000000000000..10802510a8094f8a333be058bbea26a5725f48a2 --- /dev/null +++ b/Junmai/KIT-7b-v2/result_2023-11-07 06:26:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.17406143344709898, + "acc_stderr": 0.011080177129482213, + "acc_norm": 0.23464163822525597, + "acc_norm_stderr": 0.01238387356076867 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2528380800637323, + "acc_stderr": 0.004337506344899915, + "acc_norm": 0.23889663413662618, + "acc_norm_stderr": 0.004255380050015134 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.015818450894777576, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.015818450894777576 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2, + "acc_stderr": 0.03455473702325436, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03455473702325436 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.20257234726688103, + "acc_stderr": 0.022827317491059682, + "acc_norm": 0.20257234726688103, + "acc_norm_stderr": 0.022827317491059682 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134988, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134988 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082395, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082395 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20707070707070707, + "acc_stderr": 0.028869778460267045, + "acc_norm": 0.20707070707070707, + "acc_norm_stderr": 0.028869778460267045 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18719211822660098, + "acc_stderr": 0.027444924966882618, + "acc_norm": 0.18719211822660098, + "acc_norm_stderr": 0.027444924966882618 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764805, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764805 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.02544786382510862, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.02544786382510862 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721375, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721375 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.032578473844367746, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.032578473844367746 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22388059701492538, + "acc_stderr": 0.029475250236017197, + "acc_norm": 0.22388059701492538, + "acc_norm_stderr": 0.029475250236017197 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.030631145539198816, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.030631145539198816 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.0329109957861577, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.0329109957861577 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.02357688174400571, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.02357688174400571 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.017923087667803053, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.017923087667803053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790607, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790607 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023805186524888142, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023805186524888142 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.03823428969926605, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.03823428969926605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.016992723465466226, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.016992723465466226 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729906, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729906 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.0443280405529152, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.0443280405529152 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.014102223623152593, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.014102223623152593 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3183673469387755, + "acc_stderr": 0.02982253379398209, + "acc_norm": 0.3183673469387755, + "acc_norm_stderr": 0.02982253379398209 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460302, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460302 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.029331162294251735, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.029331162294251735 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.20606060606060606, + "acc_stderr": 0.0315841532404771, + "acc_norm": 0.20606060606060606, + "acc_norm_stderr": 0.0315841532404771 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087304, + "mc2": 0.463216438419055, + "mc2_stderr": 0.015329707584198729 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.07319952774498228, + "acc_stderr": 0.008954927647725423, + "acc_norm": 0.28689492325855964, + "acc_norm_stderr": 0.015550809966781778 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Junmai/KIT-7b-v2", + "model_sha": "20725892ef31e719453654c06747f48934ccdd57", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/KRAFTON/KORani-v1-13B/result_2023-10-17 13:34:31.json b/KRAFTON/KORani-v1-13B/result_2023-10-17 13:34:31.json new file mode 100644 index 0000000000000000000000000000000000000000..229475938b92c3acce42d7b313d93d56abe180e4 --- /dev/null +++ b/KRAFTON/KORani-v1-13B/result_2023-10-17 13:34:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30802047781569963, + "acc_stderr": 0.01349142951729204, + "acc_norm": 0.3515358361774744, + "acc_norm_stderr": 0.013952413699600938 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39533957379008167, + "acc_stderr": 0.004879242848473461, + "acc_norm": 0.5114519020115514, + "acc_norm_stderr": 0.0049884724594180165 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.0398913985953177, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.0398913985953177 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2567049808429119, + "acc_stderr": 0.015620480263064533, + "acc_norm": 0.2567049808429119, + "acc_norm_stderr": 0.015620480263064533 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.03633384414073465, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.03633384414073465 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.02895734278834235, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.02895734278834235 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.034605799075530276, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.034605799075530276 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24437299035369775, + "acc_stderr": 0.0244061620946689, + "acc_norm": 0.24437299035369775, + "acc_norm_stderr": 0.0244061620946689 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.22869955156950672, + "acc_stderr": 0.028188240046929196, + "acc_norm": 0.22869955156950672, + "acc_norm_stderr": 0.028188240046929196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.18686868686868688, + "acc_stderr": 0.02777253333421899, + "acc_norm": 0.18686868686868688, + "acc_norm_stderr": 0.02777253333421899 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.03664666337225256, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.03664666337225256 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.035240689515674495, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.035240689515674495 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.026265024608275882, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.026265024608275882 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2076923076923077, + "acc_stderr": 0.020567539567246787, + "acc_norm": 0.2076923076923077, + "acc_norm_stderr": 0.020567539567246787 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.1724137931034483, + "acc_stderr": 0.026577672183036572, + "acc_norm": 0.1724137931034483, + "acc_norm_stderr": 0.026577672183036572 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.20967741935483872, + "acc_stderr": 0.02315787934908353, + "acc_norm": 0.20967741935483872, + "acc_norm_stderr": 0.02315787934908353 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2863247863247863, + "acc_stderr": 0.02961432369045665, + "acc_norm": 0.2863247863247863, + "acc_norm_stderr": 0.02961432369045665 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21132075471698114, + "acc_stderr": 0.025125766484827845, + "acc_norm": 0.21132075471698114, + "acc_norm_stderr": 0.025125766484827845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072776, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072776 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.025644108639267645, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.025644108639267645 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008936, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008936 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.029929415408348377, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.029929415408348377 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21693121693121692, + "acc_stderr": 0.02122708244944504, + "acc_norm": 0.21693121693121692, + "acc_norm_stderr": 0.02122708244944504 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.02378620325550829, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.02378620325550829 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.033220157957767414, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.033220157957767414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25308641975308643, + "acc_stderr": 0.024191808600712992, + "acc_norm": 0.25308641975308643, + "acc_norm_stderr": 0.024191808600712992 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21243523316062177, + "acc_stderr": 0.029519282616817247, + "acc_norm": 0.21243523316062177, + "acc_norm_stderr": 0.029519282616817247 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21100917431192662, + "acc_stderr": 0.017493922404112648, + "acc_norm": 0.21100917431192662, + "acc_norm_stderr": 0.017493922404112648 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.024954184324879905, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.024954184324879905 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516304, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516304 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.017986615304030312, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.017986615304030312 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432407, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432407 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.025416428388767474, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.025416428388767474 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961459, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961459 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.024398192986654924, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.024398192986654924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.0265370453121453, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.0265370453121453 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2489451476793249, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.2489451476793249, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.011005971399927234, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.011005971399927234 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.029331162294251735, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.029331162294251735 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.19393939393939394, + "acc_stderr": 0.030874145136562097, + "acc_norm": 0.19393939393939394, + "acc_norm_stderr": 0.030874145136562097 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.0151274270965207, + "mc2": 0.40538205465914606, + "mc2_stderr": 0.01537488137847706 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3789846517119244, + "acc_stderr": 0.01667926068422928, + "acc_norm": 0.4734356552538371, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "KRAFTON/KORani-v1-13B", + "model_sha": "a699d0cebc4815f33854bc83065a03fc9008473c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/KRAFTON/KORani-v2-13B/result_2023-10-17 13:34:06.json b/KRAFTON/KORani-v2-13B/result_2023-10-17 13:34:06.json new file mode 100644 index 0000000000000000000000000000000000000000..a1f8e76463faca8844cda24f4d5dc7629b05dd18 --- /dev/null +++ b/KRAFTON/KORani-v2-13B/result_2023-10-17 13:34:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2960750853242321, + "acc_stderr": 0.013340916085246263, + "acc_norm": 0.3370307167235495, + "acc_norm_stderr": 0.013813476652902265 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35241983668591914, + "acc_stderr": 0.004767475366689779, + "acc_norm": 0.42252539334793865, + "acc_norm_stderr": 0.004929517011508216 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.037712831076265434, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.037712831076265434 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37547892720306514, + "acc_stderr": 0.01731661319718279, + "acc_norm": 0.37547892720306514, + "acc_norm_stderr": 0.01731661319718279 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.031068985963122145, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.031068985963122145 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.0362933532994786, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.0362933532994786 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4115755627009646, + "acc_stderr": 0.027950481494401266, + "acc_norm": 0.4115755627009646, + "acc_norm_stderr": 0.027950481494401266 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929188, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929188 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732524, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732524 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.03077805742293167, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.03077805742293167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33589743589743587, + "acc_stderr": 0.02394672474156397, + "acc_norm": 0.33589743589743587, + "acc_norm_stderr": 0.02394672474156397 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970187, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970187 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3258064516129032, + "acc_stderr": 0.026662010578567107, + "acc_norm": 0.3258064516129032, + "acc_norm_stderr": 0.026662010578567107 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5683760683760684, + "acc_stderr": 0.0324483553531149, + "acc_norm": 0.5683760683760684, + "acc_norm_stderr": 0.0324483553531149 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37358490566037733, + "acc_stderr": 0.029773082713319878, + "acc_norm": 0.37358490566037733, + "acc_norm_stderr": 0.029773082713319878 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.42786069651741293, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.42786069651741293, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.035676037996391685, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.035676037996391685 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02141168439369418, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02141168439369418 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.40173410404624277, + "acc_stderr": 0.026394104177643634, + "acc_norm": 0.40173410404624277, + "acc_norm_stderr": 0.026394104177643634 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292404, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292404 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.026406145973625658, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.026406145973625658 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30642201834862387, + "acc_stderr": 0.019765517220458523, + "acc_norm": 0.30642201834862387, + "acc_norm_stderr": 0.019765517220458523 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791438, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791438 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.045641987674327526, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.045641987674327526 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.034597776068105365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.034597776068105365 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.019023726160724553, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.019023726160724553 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460994, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605617, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.02667925227010312, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.02667925227010312 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.39662447257383965, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.39662447257383965, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27640156453715775, + "acc_stderr": 0.011422153194553567, + "acc_norm": 0.27640156453715775, + "acc_norm_stderr": 0.011422153194553567 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.033321399446680854, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.033321399446680854 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087305, + "mc2": 0.44326975161880294, + "mc2_stderr": 0.015781962014868475 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31995277449822906, + "acc_stderr": 0.016037153840280538, + "acc_norm": 0.3955135773317591, + "acc_norm_stderr": 0.016810815902206042 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "KRAFTON/KORani-v2-13B", + "model_sha": "12dbb4046d3fabb3b64c3eab2ecc91faec1af9e9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/KRAFTON/KORani-v3-13B/result_2023-10-17 13:33:45.json b/KRAFTON/KORani-v3-13B/result_2023-10-17 13:33:45.json new file mode 100644 index 0000000000000000000000000000000000000000..01fee2eaf1fd7baf8f0b8fa8fd2f7034bfe7a7a7 --- /dev/null +++ b/KRAFTON/KORani-v3-13B/result_2023-10-17 13:33:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3046075085324232, + "acc_stderr": 0.01344952210993249, + "acc_norm": 0.34726962457337884, + "acc_norm_stderr": 0.013913034529620442 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3494323839872535, + "acc_stderr": 0.004758162967997396, + "acc_norm": 0.4313881696873133, + "acc_norm_stderr": 0.004942578520987348 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.03722965741385539, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.03722965741385539 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.04846748253977239, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.04846748253977239 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3665389527458493, + "acc_stderr": 0.01723124462679705, + "acc_norm": 0.3665389527458493, + "acc_norm_stderr": 0.01723124462679705 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501117, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501117 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610334, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610334 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.37942122186495175, + "acc_stderr": 0.027559949802347817, + "acc_norm": 0.37942122186495175, + "acc_norm_stderr": 0.027559949802347817 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416827, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416827 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3282828282828283, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.3282828282828283, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.32413793103448274, + "acc_stderr": 0.03900432069185553, + "acc_norm": 0.32413793103448274, + "acc_norm_stderr": 0.03900432069185553 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.03163145807552378, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.03163145807552378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34102564102564104, + "acc_stderr": 0.024035489676335044, + "acc_norm": 0.34102564102564104, + "acc_norm_stderr": 0.024035489676335044 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.02770935967503249, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.02770935967503249 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5811965811965812, + "acc_stderr": 0.03232128912157792, + "acc_norm": 0.5811965811965812, + "acc_norm_stderr": 0.03232128912157792 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33584905660377357, + "acc_stderr": 0.029067220146644826, + "acc_norm": 0.33584905660377357, + "acc_norm_stderr": 0.029067220146644826 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276611, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276611 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43781094527363185, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.43781094527363185, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.0339175032232166, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.0339175032232166 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113935, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113935 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38271604938271603, + "acc_stderr": 0.027044538138402616, + "acc_norm": 0.38271604938271603, + "acc_norm_stderr": 0.027044538138402616 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3174311926605505, + "acc_stderr": 0.0199571521984605, + "acc_norm": 0.3174311926605505, + "acc_norm_stderr": 0.0199571521984605 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403165, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403165 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.01877168389352817, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.01877168389352817 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.02746470844202213, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.02746470844202213 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605607, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605607 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859933, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859933 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21691176470588236, + "acc_stderr": 0.025035845227711254, + "acc_norm": 0.21691176470588236, + "acc_norm_stderr": 0.025035845227711254 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3924050632911392, + "acc_stderr": 0.03178471874564729, + "acc_norm": 0.3924050632911392, + "acc_norm_stderr": 0.03178471874564729 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.011759939618085451, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.011759939618085451 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.03374499356319355, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.03374499356319355 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834562, + "mc2": 0.44032476462099357, + "mc2_stderr": 0.015871156864559203 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29515938606847697, + "acc_stderr": 0.015681535229192186, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.01661661284322494 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "KRAFTON/KORani-v3-13B", + "model_sha": "d6479f9de126caf02a770e5e8db4524a0ccb4db7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/KT-AI/midm-bitext-S-7B-inst-v1/result_2023-10-30 15:41:34.json b/KT-AI/midm-bitext-S-7B-inst-v1/result_2023-10-30 15:41:34.json new file mode 100644 index 0000000000000000000000000000000000000000..b4f328e8b65821b1f50e57fd044887ad7f5c0868 --- /dev/null +++ b/KT-AI/midm-bitext-S-7B-inst-v1/result_2023-10-30 15:41:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.013847460518892981, + "acc_norm": 0.4249146757679181, + "acc_norm_stderr": 0.014445698968520769 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4137621987651862, + "acc_stderr": 0.004915003499517832, + "acc_norm": 0.5510854411471818, + "acc_norm_stderr": 0.004963669199433383 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.04944901092973781, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.04944901092973781 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5593869731800766, + "acc_stderr": 0.01775339697390848, + "acc_norm": 0.5593869731800766, + "acc_norm_stderr": 0.01775339697390848 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.0424463323835323, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.0424463323835323 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956281, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956281 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.035094383488796295, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.035094383488796295 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.0311669573672359, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.0311669573672359 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389184, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.1962962962962963, + "acc_stderr": 0.024217421327417155, + "acc_norm": 0.1962962962962963, + "acc_norm_stderr": 0.024217421327417155 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.0379401267469703, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.0379401267469703 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.0236369759961018, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.0236369759961018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.02668013476167922, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.02668013476167922 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.02131133500970858, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.02131133500970858 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127155, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127155 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978252, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978252 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.019431775677037313, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.019431775677037313 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.03154696285656629, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.03154696285656629 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29608938547486036, + "acc_stderr": 0.015268677317602286, + "acc_norm": 0.29608938547486036, + "acc_norm_stderr": 0.015268677317602286 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3469387755102041, + "acc_stderr": 0.0304725260267265, + "acc_norm": 0.3469387755102041, + "acc_norm_stderr": 0.0304725260267265 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31681877444589307, + "acc_stderr": 0.011882349954722997, + "acc_norm": 0.31681877444589307, + "acc_norm_stderr": 0.011882349954722997 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350194, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350194 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.18787878787878787, + "acc_stderr": 0.030501934059429144, + "acc_norm": 0.18787878787878787, + "acc_norm_stderr": 0.030501934059429144 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589657, + "mc2": 0.4574707149506456, + "mc2_stderr": 0.015369860749341643 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5844155844155844, + "acc_stderr": 0.016943586313076575, + "acc_norm": 0.5997638724911453, + "acc_norm_stderr": 0.016844693510505052 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "KT-AI/midm-bitext-S-7B-inst-v1", + "model_sha": "88545caeab1463c83a15c23f5282cd8ea781dd0b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/KT-AI/midm-bitext-S-7B-inst-v2/result_2024-01-15 07:08:32Z.json b/KT-AI/midm-bitext-S-7B-inst-v2/result_2024-01-15 07:08:32Z.json new file mode 100644 index 0000000000000000000000000000000000000000..8893a189a9e6eb546fbc6203fda38f75eb976fef --- /dev/null +++ b/KT-AI/midm-bitext-S-7B-inst-v2/result_2024-01-15 07:08:32Z.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.014175915490000322, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955264 + }, + "harness|ko_hellaswag|10": { + "acc": 0.433877713602868, + "acc_stderr": 0.004945956744943811, + "acc_norm": 0.5880302728540131, + "acc_norm_stderr": 0.004911837730582209 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6608187134502924, + "acc_stderr": 0.03631053496488904, + "acc_norm": 0.6608187134502924, + "acc_norm_stderr": 0.03631053496488904 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6526181353767561, + "acc_stderr": 0.01702667174865574, + "acc_norm": 0.6526181353767561, + "acc_norm_stderr": 0.01702667174865574 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.0332085274234831, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.0332085274234831 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5387096774193548, + "acc_stderr": 0.028358634859836935, + "acc_norm": 0.5387096774193548, + "acc_norm_stderr": 0.028358634859836935 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.029343114798094462, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.029343114798094462 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.03807301726504513, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.03807301726504513 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.02418049716437691, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.02418049716437691 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670788, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670788 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5, + "acc_stderr": 0.02782074420373286, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02782074420373286 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6217616580310881, + "acc_stderr": 0.03499807276193339, + "acc_norm": 0.6217616580310881, + "acc_norm_stderr": 0.03499807276193339 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6238532110091743, + "acc_stderr": 0.02076923196820508, + "acc_norm": 0.6238532110091743, + "acc_norm_stderr": 0.02076923196820508 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.019944914136873583, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.019944914136873583 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833586, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833586 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353604, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353604 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.33519553072625696, + "acc_stderr": 0.01578800719018589, + "acc_norm": 0.33519553072625696, + "acc_norm_stderr": 0.01578800719018589 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767105, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6624472573839663, + "acc_stderr": 0.030781549102026223, + "acc_norm": 0.6624472573839663, + "acc_norm_stderr": 0.030781549102026223 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31681877444589307, + "acc_stderr": 0.011882349954722997, + "acc_norm": 0.31681877444589307, + "acc_norm_stderr": 0.011882349954722997 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.01578537085839672, + "mc2": 0.4359830362647055, + "mc2_stderr": 0.015231569782976262 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5785123966942148, + "acc_stderr": 0.016977101932601518, + "acc_norm": 0.5855962219598583, + "acc_norm_stderr": 0.016936583383943615 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "KT-AI/midm-bitext-S-7B-inst-v2", + "model_sha": "a1d976b7852265bdac94f05a9ce34a48add84236", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kaeri-Jenti/LDCC-with-korca/result_2023-11-06 01:29:42.json b/Kaeri-Jenti/LDCC-with-korca/result_2023-11-06 01:29:42.json new file mode 100644 index 0000000000000000000000000000000000000000..1a35cc825d9cd29e265edc23c2dcf0a09c0e20bc --- /dev/null +++ b/Kaeri-Jenti/LDCC-with-korca/result_2023-11-06 01:29:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3993174061433447, + "acc_stderr": 0.014312094557946704, + "acc_norm": 0.4496587030716723, + "acc_norm_stderr": 0.014537144444284738 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42202748456482775, + "acc_stderr": 0.004928735103635845, + "acc_norm": 0.5664210316669986, + "acc_norm_stderr": 0.004945558069852528 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5466155810983397, + "acc_stderr": 0.01780208713585031, + "acc_norm": 0.5466155810983397, + "acc_norm_stderr": 0.01780208713585031 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587952, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587952 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.03536085947529481, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.03536085947529481 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126177, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126177 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969481, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969481 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.03047144586718324, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.03047144586718324 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.02708037281514565, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.02708037281514565 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.03419832608176007, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.03419832608176007 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02391998416404774, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02391998416404774 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5669724770642202, + "acc_stderr": 0.021244146569074345, + "acc_norm": 0.5669724770642202, + "acc_norm_stderr": 0.021244146569074345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.01984828016840116, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.01984828016840116 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.028838921471251458, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.028838921471251458 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802749, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670736, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670736 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35723598435462844, + "acc_stderr": 0.012238615750316498, + "acc_norm": 0.35723598435462844, + "acc_norm_stderr": 0.012238615750316498 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.01574402724825605, + "mc2": 0.4486998920807941, + "mc2_stderr": 0.015146223309438359 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46989374262101535, + "acc_stderr": 0.017159163590170213, + "acc_norm": 0.6127508854781583, + "acc_norm_stderr": 0.016747577991642792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kaeri-Jenti/LDCC-with-korca", + "model_sha": "50bca191d06902b5359abb3b1007b8106eff41f6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kaeri-Jenti/LDCC-with-openorca-and-korca/result_2023-11-06 11:07:08.json b/Kaeri-Jenti/LDCC-with-openorca-and-korca/result_2023-11-06 11:07:08.json new file mode 100644 index 0000000000000000000000000000000000000000..3d4cf313ced5cde007c2356b78329a12989d6351 --- /dev/null +++ b/Kaeri-Jenti/LDCC-with-openorca-and-korca/result_2023-11-06 11:07:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4035836177474403, + "acc_stderr": 0.014337158914268436, + "acc_norm": 0.45563139931740615, + "acc_norm_stderr": 0.014553749939306864 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42162915753833896, + "acc_stderr": 0.004928105880776079, + "acc_norm": 0.566122286397132, + "acc_norm_stderr": 0.004945956744943813 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5440613026819924, + "acc_stderr": 0.017810403925435342, + "acc_norm": 0.5440613026819924, + "acc_norm_stderr": 0.017810403925435342 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.0433643270799318, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.0433643270799318 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764194, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.031804252043840985, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.031804252043840985 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776285, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776285 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982022, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982022 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.03602573571288442, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.03602573571288442 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.563302752293578, + "acc_stderr": 0.021264820158714205, + "acc_norm": 0.563302752293578, + "acc_norm_stderr": 0.021264820158714205 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.028036092273891765, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.028036092273891765 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.019898412717635892, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.019898412717635892 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.031141447823536048, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.031141447823536048 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335314, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.031996152328062875, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.031996152328062875 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3683181225554107, + "acc_stderr": 0.012319403369564642, + "acc_norm": 0.3683181225554107, + "acc_norm_stderr": 0.012319403369564642 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.448398942069094, + "mc2_stderr": 0.015159190515111855 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4923258559622196, + "acc_stderr": 0.01718832921965428, + "acc_norm": 0.6221959858323495, + "acc_norm_stderr": 0.01666908284069498 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kaeri-Jenti/LDCC-with-openorca-and-korca", + "model_sha": "7f845005dc24e13e4fe380e32aa1b0f649b85743", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kaeri-Jenti/LDCC-with-openorca/result_2023-11-05 09:08:43.json b/Kaeri-Jenti/LDCC-with-openorca/result_2023-11-05 09:08:43.json new file mode 100644 index 0000000000000000000000000000000000000000..47c5106c0172f0215bc1da826db315361bdfa3bd --- /dev/null +++ b/Kaeri-Jenti/LDCC-with-openorca/result_2023-11-05 09:08:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4044368600682594, + "acc_stderr": 0.014342036483436177, + "acc_norm": 0.46331058020477817, + "acc_norm_stderr": 0.014572000527756994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4217287392949612, + "acc_stderr": 0.0049282634946167326, + "acc_norm": 0.5668193586934873, + "acc_norm_stderr": 0.0049450236570322765 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5389527458492975, + "acc_stderr": 0.017825621793239002, + "acc_norm": 0.5389527458492975, + "acc_norm_stderr": 0.017825621793239002 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828064, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828064 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009794, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009794 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184407, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184407 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5, + "acc_stderr": 0.02782074420373286, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02782074420373286 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5853211009174312, + "acc_stderr": 0.021122903208602592, + "acc_norm": 0.5853211009174312, + "acc_norm_stderr": 0.021122903208602592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.02803609227389177, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.02803609227389177 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437538, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437538 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.02878222756134724, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.02878222756134724 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553974, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553974 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.03198761546763125, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.03198761546763125 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36114732724902215, + "acc_stderr": 0.012267935477519032, + "acc_norm": 0.36114732724902215, + "acc_norm_stderr": 0.012267935477519032 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.44749695382551585, + "mc2_stderr": 0.015166538817685267 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.017182864434998567, + "acc_norm": 0.6162927981109799, + "acc_norm_stderr": 0.016718924637231826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kaeri-Jenti/LDCC-with-openorca", + "model_sha": "f2d0734b7c42df6a4c4cd53aed9f5950b28a7546", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kaeri-Jenti/LDCC-with-openorca2/result_2023-11-07 14:53:29.json b/Kaeri-Jenti/LDCC-with-openorca2/result_2023-11-07 14:53:29.json new file mode 100644 index 0000000000000000000000000000000000000000..8c84374cfd35fbc04951c9ae13067f2361f46f12 --- /dev/null +++ b/Kaeri-Jenti/LDCC-with-openorca2/result_2023-11-07 14:53:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4044368600682594, + "acc_stderr": 0.014342036483436177, + "acc_norm": 0.46245733788395904, + "acc_norm_stderr": 0.014570144495075576 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42182832105158335, + "acc_stderr": 0.004928420903026553, + "acc_norm": 0.5668193586934873, + "acc_norm_stderr": 0.0049450236570322765 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.541507024265645, + "acc_stderr": 0.017818248603465585, + "acc_norm": 0.541507024265645, + "acc_norm_stderr": 0.017818248603465585 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.03193705726200293, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.03193705726200293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.03028500925900979, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.03028500925900979 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712163, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712163 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518026, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518026 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.02339382650048487, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.02339382650048487 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5853211009174312, + "acc_stderr": 0.021122903208602592, + "acc_norm": 0.5853211009174312, + "acc_norm_stderr": 0.021122903208602592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449848, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449848 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553974, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553974 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681417, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681417 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301847, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301847 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36114732724902215, + "acc_stderr": 0.01226793547751903, + "acc_norm": 0.36114732724902215, + "acc_norm_stderr": 0.01226793547751903 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.035091433756067866, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.035091433756067866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.4478974507988722, + "mc2_stderr": 0.015169839199333743 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4817001180637544, + "acc_stderr": 0.017178836639177745, + "acc_norm": 0.6139315230224321, + "acc_norm_stderr": 0.016738130760321757 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kaeri-Jenti/LDCC-with-openorca2", + "model_sha": "e0f7d47f657361c2fffd4a67428b5ab523b84261", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kaeri-Jenti/Llama-2-kor-13B/result_2023-11-24 00:19:01.json b/Kaeri-Jenti/Llama-2-kor-13B/result_2023-11-24 00:19:01.json new file mode 100644 index 0000000000000000000000000000000000000000..7e3013c8878c12ffe672f09f2aec6a0daabd3d34 --- /dev/null +++ b/Kaeri-Jenti/Llama-2-kor-13B/result_2023-11-24 00:19:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3984641638225256, + "acc_stderr": 0.014306946052735565, + "acc_norm": 0.45819112627986347, + "acc_norm_stderr": 0.0145602203087147 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4231228838876718, + "acc_stderr": 0.004930448527146665, + "acc_norm": 0.5640310695080661, + "acc_norm_stderr": 0.004948696280312416 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5542784163473818, + "acc_stderr": 0.017774297282479503, + "acc_norm": 0.5542784163473818, + "acc_norm_stderr": 0.017774297282479503 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.03526552724601198, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.03526552724601198 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.0251418015111775, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.0251418015111775 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.028434533152681848, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.028434533152681848 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03088273697413866, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03088273697413866 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205608, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844065, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.02293097307163335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.02293097307163335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.026907849856282532, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.026907849856282532 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5706422018348624, + "acc_stderr": 0.021222286397236508, + "acc_norm": 0.5706422018348624, + "acc_norm_stderr": 0.021222286397236508 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147127, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147127 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.027996723180631466, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.027996723180631466 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.019610851474880283, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.019610851474880283 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828979, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828979 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933105, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34028683181225555, + "acc_stderr": 0.012101217610223798, + "acc_norm": 0.34028683181225555, + "acc_norm_stderr": 0.012101217610223798 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.01605899902610062, + "mc2": 0.45367177115043, + "mc2_stderr": 0.015134250403335572 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43919716646989376, + "acc_stderr": 0.0170627757447807, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.01711541822522687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kaeri-Jenti/Llama-2-kor-13B", + "model_sha": "de4f458a28b96221babb7655c994221ea3d27c6f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kaeri-Jenti/llama-2-koen-13b-v1.2/result_2023-11-09 02:00:27.json b/Kaeri-Jenti/llama-2-koen-13b-v1.2/result_2023-11-09 02:00:27.json new file mode 100644 index 0000000000000000000000000000000000000000..19fbb2367aa42f1539eeca24b1c2795266ec661f --- /dev/null +++ b/Kaeri-Jenti/llama-2-koen-13b-v1.2/result_2023-11-09 02:00:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3890784982935154, + "acc_stderr": 0.014247309976045607, + "acc_norm": 0.45819112627986347, + "acc_norm_stderr": 0.014560220308714702 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4206333399721171, + "acc_stderr": 0.004926518439372259, + "acc_norm": 0.5676160127464649, + "acc_norm_stderr": 0.004943945069611462 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.017850410794380173, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.017850410794380173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.043182754919779756, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.043182754919779756 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.02491524398598784, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.02491524398598784 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280458, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280458 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761005, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761005 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5811965811965812, + "acc_stderr": 0.03232128912157792, + "acc_norm": 0.5811965811965812, + "acc_norm_stderr": 0.03232128912157792 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138938, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138938 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.021436420955529424, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.021436420955529424 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.01943177567703731, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.01943177567703731 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.028139689444859672, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.028139689444859672 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.0314147080258659, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.0314147080258659 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.011759939618085455, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.011759939618085455 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766368, + "mc2": 0.4100851120970672, + "mc2_stderr": 0.014797143070922393 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5029515938606848, + "acc_stderr": 0.017190054580194694, + "acc_norm": 0.5914994096812278, + "acc_norm_stderr": 0.016900062879427125 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kaeri-Jenti/llama-2-koen-13b-v1.2", + "model_sha": "cb9e8ff37d427ab588d666b5c6994498a10084de", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kaeri-Jenti/llama-2-koen-13b-v1.3/result_2023-11-27 00:37:09.json b/Kaeri-Jenti/llama-2-koen-13b-v1.3/result_2023-11-27 00:37:09.json new file mode 100644 index 0000000000000000000000000000000000000000..494045527def032eb4a8f4d6f221357de8e506cc --- /dev/null +++ b/Kaeri-Jenti/llama-2-koen-13b-v1.3/result_2023-11-27 00:37:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.014175915490000326, + "acc_norm": 0.4522184300341297, + "acc_norm_stderr": 0.014544519880633832 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4208325034853615, + "acc_stderr": 0.004926837572202162, + "acc_norm": 0.563433578968333, + "acc_norm_stderr": 0.00494946256368134 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5261813537675607, + "acc_stderr": 0.017855434554041993, + "acc_norm": 0.5261813537675607, + "acc_norm_stderr": 0.017855434554041993 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.0354413249194797, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.0354413249194797 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.02517404838400078, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.02517404838400078 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342596, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673281, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673281 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422704, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422704 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5431192660550459, + "acc_stderr": 0.02135745878522621, + "acc_norm": 0.5431192660550459, + "acc_norm_stderr": 0.02135745878522621 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392868, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392868 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626567, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626567 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.019249785691717217, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.019249785691717217 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289784, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289784 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.011759939618085455, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.011759939618085455 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842883, + "mc2": 0.4216743604441881, + "mc2_stderr": 0.014868064514296196 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45454545454545453, + "acc_stderr": 0.017119172208061504, + "acc_norm": 0.5395513577331759, + "acc_norm_stderr": 0.017136487626049846 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kaeri-Jenti/llama-2-koen-13b-v1.3", + "model_sha": "a926510aca20383788b1d49fc2a16edac5919f2c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kaeri-Jenti/llama-2-koen-13b-with-ko-wiki/result_2023-11-08 11:54:25.json b/Kaeri-Jenti/llama-2-koen-13b-with-ko-wiki/result_2023-11-08 11:54:25.json new file mode 100644 index 0000000000000000000000000000000000000000..882ada039b0ae5be244fcb0af16f5494750bb6f6 --- /dev/null +++ b/Kaeri-Jenti/llama-2-koen-13b-with-ko-wiki/result_2023-11-08 11:54:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.363481228668942, + "acc_stderr": 0.014056207319068285, + "acc_norm": 0.439419795221843, + "acc_norm_stderr": 0.014503747823580122 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3769169488149771, + "acc_stderr": 0.004836234143655416, + "acc_norm": 0.498406691894045, + "acc_norm_stderr": 0.004989756076956349 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.017879948914431672, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.017879948914431672 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.02821768355665231, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.02821768355665231 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.0316314580755238, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.0316314580755238 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938152, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938152 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.02779187875313227, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.02779187875313227 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051621, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051621 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.02983280811479601, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.02983280811479601 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.046737523336702384, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.046737523336702384 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.037143259063020635, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.037143259063020635 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.026817718130348913, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.026817718130348913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607718, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607718 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.03499807276193338, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.03499807276193338 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41651376146788993, + "acc_stderr": 0.021136376504030874, + "acc_norm": 0.41651376146788993, + "acc_norm_stderr": 0.021136376504030874 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.02824513402438729, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.02824513402438729 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35130718954248363, + "acc_stderr": 0.019312676065786554, + "acc_norm": 0.35130718954248363, + "acc_norm_stderr": 0.019312676065786554 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101366, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101366 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510934, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510934 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.02866199620233531, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.02866199620233531 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.03181425118197787, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.03181425118197787 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.03254462010767859, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.03254462010767859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3005215123859192, + "acc_stderr": 0.011709918883039124, + "acc_norm": 0.3005215123859192, + "acc_norm_stderr": 0.011709918883039124 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.033086111132364336, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.033086111132364336 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22888616891064872, + "mc1_stderr": 0.014706994909055027, + "mc2": 0.3806541455843524, + "mc2_stderr": 0.014913115418195339 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3990554899645809, + "acc_stderr": 0.016836377292849296, + "acc_norm": 0.4793388429752066, + "acc_norm_stderr": 0.01717567127983645 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kaeri-Jenti/llama-2-koen-13b-with-ko-wiki", + "model_sha": "c04aefa73af3678c5fd2df2750199589dbec1216", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Korabbit/llama-2-ko-7b-bilingual/result_2023-10-26 04:30:26.json b/Korabbit/llama-2-ko-7b-bilingual/result_2023-10-26 04:30:26.json new file mode 100644 index 0000000000000000000000000000000000000000..22add5fa0e2bf69dcc18090af66bc81bf8f2fa2e --- /dev/null +++ b/Korabbit/llama-2-ko-7b-bilingual/result_2023-10-26 04:30:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3387372013651877, + "acc_stderr": 0.013830568927974332, + "acc_norm": 0.40017064846416384, + "acc_norm_stderr": 0.014317197787809174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4023102967536347, + "acc_stderr": 0.004893617014975314, + "acc_norm": 0.5194184425413264, + "acc_norm_stderr": 0.004986016938678532 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.34951456310679613, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.34951456310679613, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.421455938697318, + "acc_stderr": 0.017657976412654857, + "acc_norm": 0.421455938697318, + "acc_norm_stderr": 0.017657976412654857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231004, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231004 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3987138263665595, + "acc_stderr": 0.0278093225857745, + "acc_norm": 0.3987138263665595, + "acc_norm_stderr": 0.0278093225857745 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.043171711948702535, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.043171711948702535 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3787878787878788, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.3787878787878788, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.30256410256410254, + "acc_stderr": 0.023290888053772725, + "acc_norm": 0.30256410256410254, + "acc_norm_stderr": 0.023290888053772725 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233484, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233484 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34516129032258064, + "acc_stderr": 0.027045746573534327, + "acc_norm": 0.34516129032258064, + "acc_norm_stderr": 0.027045746573534327 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5427350427350427, + "acc_stderr": 0.03263622596380688, + "acc_norm": 0.5427350427350427, + "acc_norm_stderr": 0.03263622596380688 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.02944517532819959, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.02944517532819959 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.02620276653465215, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.02620276653465215 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360383, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360383 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339193, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339193 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.034140140070440354, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.034140140070440354 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.0233306540545359, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.0233306540545359 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3439306358381503, + "acc_stderr": 0.025574123786546672, + "acc_norm": 0.3439306358381503, + "acc_norm_stderr": 0.025574123786546672 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3374233128834356, + "acc_stderr": 0.03714908409935575, + "acc_norm": 0.3374233128834356, + "acc_norm_stderr": 0.03714908409935575 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3395061728395062, + "acc_stderr": 0.026348564412011628, + "acc_norm": 0.3395061728395062, + "acc_norm_stderr": 0.026348564412011628 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37305699481865284, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.37305699481865284, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3706422018348624, + "acc_stderr": 0.02070745816435298, + "acc_norm": 0.3706422018348624, + "acc_norm_stderr": 0.02070745816435298 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.027245613047215355, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.027245613047215355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.018975427920507222, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.018975427920507222 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966727, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966727 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012393, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012393 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.026799562024887678, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.026799562024887678 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2897959183673469, + "acc_stderr": 0.02904308868330433, + "acc_norm": 0.2897959183673469, + "acc_norm_stderr": 0.02904308868330433 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45147679324894513, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.45147679324894513, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2940026075619296, + "acc_stderr": 0.011636062953698602, + "acc_norm": 0.2940026075619296, + "acc_norm_stderr": 0.011636062953698602 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.01517698502770769, + "mc2": 0.41272169126715796, + "mc2_stderr": 0.015689006867142138 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4332939787485242, + "acc_stderr": 0.0170366836418931, + "acc_norm": 0.5112160566706021, + "acc_norm_stderr": 0.017186028469489294 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Korabbit/llama-2-ko-7b-bilingual", + "model_sha": "3182f7a997a431a53f9157384c6fb742619f8fbc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Korabbit/llama-2-ko-7b-pru/result_2023-11-05 04:27:01.json b/Korabbit/llama-2-ko-7b-pru/result_2023-11-05 04:27:01.json new file mode 100644 index 0000000000000000000000000000000000000000..80245eb4b25442e46f683e1b6e7dfd096e64eb6e --- /dev/null +++ b/Korabbit/llama-2-ko-7b-pru/result_2023-11-05 04:27:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.22098976109215018, + "acc_stderr": 0.01212492920681826, + "acc_norm": 0.2790102389078498, + "acc_norm_stderr": 0.013106784883601346 + }, + "harness|ko_hellaswag|10": { + "acc": 0.27703644692292373, + "acc_stderr": 0.004466200055292544, + "acc_norm": 0.3209520015933081, + "acc_norm_stderr": 0.004658882929099516 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.18128654970760233, + "acc_stderr": 0.029547741687640024, + "acc_norm": 0.18128654970760233, + "acc_norm_stderr": 0.029547741687640024 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20945083014048532, + "acc_stderr": 0.014551310568143698, + "acc_norm": 0.20945083014048532, + "acc_norm_stderr": 0.014551310568143698 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.027678452578212377, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.027678452578212377 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.031069390260789396, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.031069390260789396 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.11210762331838565, + "acc_stderr": 0.0211748942063461, + "acc_norm": 0.11210762331838565, + "acc_norm_stderr": 0.0211748942063461 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847836, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847836 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.048108401480826346, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.048108401480826346 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36923076923076925, + "acc_stderr": 0.024468615241478912, + "acc_norm": 0.36923076923076925, + "acc_norm_stderr": 0.024468615241478912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.20512820512820512, + "acc_stderr": 0.02645350805404033, + "acc_norm": 0.20512820512820512, + "acc_norm_stderr": 0.02645350805404033 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3169811320754717, + "acc_stderr": 0.02863723563980092, + "acc_norm": 0.3169811320754717, + "acc_norm_stderr": 0.02863723563980092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.034564257450869995, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.034564257450869995 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.02402774515526501, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.02402774515526501 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.02357688174400572, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.02357688174400572 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.03499807276193338, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.03499807276193338 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3486238532110092, + "acc_stderr": 0.020431254090714328, + "acc_norm": 0.3486238532110092, + "acc_norm_stderr": 0.020431254090714328 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.02625605383571896, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.02625605383571896 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.18181818181818182, + "acc_stderr": 0.03520893951097653, + "acc_norm": 0.18181818181818182, + "acc_norm_stderr": 0.03520893951097653 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.03823428969926605, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.03823428969926605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.016774672365468517, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.016774672365468517 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953776, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953776 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.0314147080258659, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.0314147080258659 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20253164556962025, + "acc_stderr": 0.026160568246601464, + "acc_norm": 0.20253164556962025, + "acc_norm_stderr": 0.026160568246601464 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.01099615663514269, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.01099615663514269 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.029554292605695053, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.029554292605695053 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557961, + "mc2": 0.45977172073584577, + "mc2_stderr": 0.01635863808501988 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.23494687131050768, + "acc_stderr": 0.014576237948550175, + "acc_norm": 0.32585596221959856, + "acc_norm_stderr": 0.016114023894800326 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Korabbit/llama-2-ko-7b-pru", + "model_sha": "cc3233e29e5358863df06a27bc23e1f07ed994e5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Korabbit/llama-2-ko-7b/result_2023-11-05 04:27:18.json b/Korabbit/llama-2-ko-7b/result_2023-11-05 04:27:18.json new file mode 100644 index 0000000000000000000000000000000000000000..671268eaf4a045a8c8b73bd7c4b7d76418306c8d --- /dev/null +++ b/Korabbit/llama-2-ko-7b/result_2023-11-05 04:27:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2354948805460751, + "acc_stderr": 0.012399451855004748, + "acc_norm": 0.2713310580204778, + "acc_norm_stderr": 0.012993807727545792 + }, + "harness|ko_hellaswag|10": { + "acc": 0.28649671380203146, + "acc_stderr": 0.004512002459757949, + "acc_norm": 0.3359888468432583, + "acc_norm_stderr": 0.004713696694131676 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3269476372924649, + "acc_stderr": 0.016774908180131463, + "acc_norm": 0.3269476372924649, + "acc_norm_stderr": 0.016774908180131463 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.02951319662553935, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.02951319662553935 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3311897106109325, + "acc_stderr": 0.026730620728004917, + "acc_norm": 0.3311897106109325, + "acc_norm_stderr": 0.026730620728004917 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3452914798206278, + "acc_stderr": 0.03191100192835794, + "acc_norm": 0.3452914798206278, + "acc_norm_stderr": 0.03191100192835794 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728743, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728743 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.032424979581788166, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.032424979581788166 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416543, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416543 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.029472485833136084, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.029472485833136084 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2743589743589744, + "acc_stderr": 0.022622765767493214, + "acc_norm": 0.2743589743589744, + "acc_norm_stderr": 0.022622765767493214 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280458, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280458 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3709677419354839, + "acc_stderr": 0.02748054188795359, + "acc_norm": 0.3709677419354839, + "acc_norm_stderr": 0.02748054188795359 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.41452991452991456, + "acc_stderr": 0.03227396567623778, + "acc_norm": 0.41452991452991456, + "acc_norm_stderr": 0.03227396567623778 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3132075471698113, + "acc_stderr": 0.02854479331905533, + "acc_norm": 0.3132075471698113, + "acc_norm_stderr": 0.02854479331905533 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541053, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541053 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945287, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945287 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3482587064676617, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.3482587064676617, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708607, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708607 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624576, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624576 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924034, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924034 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.02577311116963045, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.02577311116963045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.033248379397581594, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.033248379397581594 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24587155963302754, + "acc_stderr": 0.018461940968708457, + "acc_norm": 0.24587155963302754, + "acc_norm_stderr": 0.018461940968708457 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276863, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276863 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.026336613469046637, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.026336613469046637 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.018433427649401903, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.018433427649401903 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.0271871270115038, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.0271871270115038 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293649, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293649 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.023529242185193106, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.023529242185193106 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2911392405063291, + "acc_stderr": 0.029571601065753374, + "acc_norm": 0.2911392405063291, + "acc_norm_stderr": 0.029571601065753374 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26597131681877445, + "acc_stderr": 0.011285033165551286, + "acc_norm": 0.26597131681877445, + "acc_norm_stderr": 0.011285033165551286 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350195, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.0364620496325381, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.0364620496325381 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237038, + "mc2": 0.4626002465688359, + "mc2_stderr": 0.016132004385948653 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.23376623376623376, + "acc_stderr": 0.01455078258710312, + "acc_norm": 0.3234946871310508, + "acc_norm_stderr": 0.016083627290483675 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Korabbit/llama-2-ko-7b", + "model_sha": "781e10378a374e3d2ecc7eaa71ffad5d912cc040", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Korabbit/my_model/result_2023-10-24 09:04:19.json b/Korabbit/my_model/result_2023-10-24 09:04:19.json new file mode 100644 index 0000000000000000000000000000000000000000..632575a8526a9fc49f3670d055e32dafd46c7097 --- /dev/null +++ b/Korabbit/my_model/result_2023-10-24 09:04:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32081911262798635, + "acc_stderr": 0.013640943091946524, + "acc_norm": 0.37372013651877134, + "acc_norm_stderr": 0.014137708601759095 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38767177853017326, + "acc_stderr": 0.004862232790041553, + "acc_norm": 0.5120493925512846, + "acc_norm_stderr": 0.004988332289642081 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.03762738699917055, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.03762738699917055 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161549, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161549 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.42528735632183906, + "acc_stderr": 0.017679225489431447, + "acc_norm": 0.42528735632183906, + "acc_norm_stderr": 0.017679225489431447 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.02802022627120022, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.02802022627120022 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.0362933532994786, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.0362933532994786 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.37942122186495175, + "acc_stderr": 0.02755994980234781, + "acc_norm": 0.37942122186495175, + "acc_norm_stderr": 0.02755994980234781 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168264, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.042258754519696386, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.042258754519696386 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.03383201223244443, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.03383201223244443 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.039417076320648906, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.039417076320648906 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.037932811853078084, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.037932811853078084 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.022282141204204426, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.022282141204204426 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04750077341199985, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04750077341199985 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.030712730070982592, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.030712730070982592 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.32903225806451614, + "acc_stderr": 0.026729499068349972, + "acc_norm": 0.32903225806451614, + "acc_norm_stderr": 0.026729499068349972 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5341880341880342, + "acc_stderr": 0.03267942734081228, + "acc_norm": 0.5341880341880342, + "acc_norm_stderr": 0.03267942734081228 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.02983280811479601, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.02983280811479601 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.417910447761194, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.417910447761194, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.034564257450869995, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.034564257450869995 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536934, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536934 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.02590663263101613, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.02590663263101613 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.038258255488486076, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.038258255488486076 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02622964917882116, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02622964917882116 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3316062176165803, + "acc_stderr": 0.03397636541089116, + "acc_norm": 0.3316062176165803, + "acc_norm_stderr": 0.03397636541089116 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.03712454853721368, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.03712454853721368 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3376146788990826, + "acc_stderr": 0.020275265986638914, + "acc_norm": 0.3376146788990826, + "acc_norm_stderr": 0.020275265986638914 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.0275300784471103, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.0275300784471103 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.045604560863872344, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.045604560863872344 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351586, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351586 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355445, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355445 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101362, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101362 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005344, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005344 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260664, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260664 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.02747227447323382, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.02747227447323382 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3551020408163265, + "acc_stderr": 0.030635655150387638, + "acc_norm": 0.3551020408163265, + "acc_norm_stderr": 0.030635655150387638 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29139504563233376, + "acc_stderr": 0.011605720214257605, + "acc_norm": 0.29139504563233376, + "acc_norm_stderr": 0.011605720214257605 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.033321399446680854, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.033321399446680854 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715008, + "mc2": 0.396242471455397, + "mc2_stderr": 0.01500796953934626 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3825265643447462, + "acc_stderr": 0.016709165387228817, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.017189767032130824 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Korabbit/my_model", + "model_sha": "4e31f162c656d46d38fb785707b02628c5ef5965", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kquant03/CognitiveFusion2-4x7B-BF16/result_2024-05-15 17:43:05.json b/Kquant03/CognitiveFusion2-4x7B-BF16/result_2024-05-15 17:43:05.json new file mode 100644 index 0000000000000000000000000000000000000000..eec6ab4019d3ef98eadf526f0b1e995d35214fd1 --- /dev/null +++ b/Kquant03/CognitiveFusion2-4x7B-BF16/result_2024-05-15 17:43:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3967576791808874, + "acc_stderr": 0.01429651302018063, + "acc_norm": 0.454778156996587, + "acc_norm_stderr": 0.014551507060836357 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39583748257319257, + "acc_stderr": 0.004880303863138504, + "acc_norm": 0.527185819557857, + "acc_norm_stderr": 0.0049824003689396745 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4559386973180077, + "acc_stderr": 0.01781040392543537, + "acc_norm": 0.4559386973180077, + "acc_norm_stderr": 0.01781040392543537 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029319, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029319 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42443729903536975, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.42443729903536975, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.02533900301010653, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.02533900301010653 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849738, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849738 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389167, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389167 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616255, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159784, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159784 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.03606065001832917, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.03606065001832917 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094597, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094597 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.015005762446786171, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.015005762446786171 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898438, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898438 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33833116036505867, + "acc_stderr": 0.012084265626344213, + "acc_norm": 0.33833116036505867, + "acc_norm_stderr": 0.012084265626344213 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4283965728274174, + "mc1_stderr": 0.017323088597314764, + "mc2": 0.6065501450950637, + "mc2_stderr": 0.016264199884541353 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4179456906729634, + "acc_stderr": 0.016957292005279713, + "acc_norm": 0.42266824085005905, + "acc_norm_stderr": 0.016983506079577604 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kquant03/CognitiveFusion2-4x7B-BF16", + "model_sha": "db45b86c462bb93db7ba4f2c3fe3517582c859a1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kukedlc/NeuralExperiment-7b-MagicCoder-v7.5/result_2024-07-27 03:40:26.json b/Kukedlc/NeuralExperiment-7b-MagicCoder-v7.5/result_2024-07-27 03:40:26.json new file mode 100644 index 0000000000000000000000000000000000000000..1934f9e9c0c9bc56421c23ade755990e06decf69 --- /dev/null +++ b/Kukedlc/NeuralExperiment-7b-MagicCoder-v7.5/result_2024-07-27 03:40:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3856655290102389, + "acc_stderr": 0.014224250973257177, + "acc_norm": 0.43430034129692835, + "acc_norm_stderr": 0.014484703048857355 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3913563035251942, + "acc_stderr": 0.004870563921220623, + "acc_norm": 0.5158334993029277, + "acc_norm_stderr": 0.004987278910505112 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4508301404853129, + "acc_stderr": 0.017793297572699058, + "acc_norm": 0.4508301404853129, + "acc_norm_stderr": 0.017793297572699058 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.0355580405176393, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.0355580405176393 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986486, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986486 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.0282863240755644, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.0282863240755644 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524575, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524575 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.0370385119309952, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.0370385119309952 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.02479606060269995, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.02479606060269995 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762633, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762633 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.028893955412115886, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.028893955412115886 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.033622774366080424, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.033622774366080424 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2659217877094972, + "acc_stderr": 0.014776765066438888, + "acc_norm": 0.2659217877094972, + "acc_norm_stderr": 0.014776765066438888 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983572, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983572 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687578, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.012134433741002574, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.012134433741002574 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.0346022832723917, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.0346022832723917 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.41370869033047736, + "mc1_stderr": 0.017240861812099804, + "mc2": 0.5683214760148058, + "mc2_stderr": 0.016219347188948595 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42739079102715466, + "acc_stderr": 0.017008129844823156, + "acc_norm": 0.44155844155844154, + "acc_norm_stderr": 0.017072525875563103 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kukedlc/NeuralExperiment-7b-MagicCoder-v7.5", + "model_sha": "43ea8d27d652dc15e4d27f665c5d636a5937780b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kukedlc/NeuralLLaMa-3-8b-DT-v0.1/result_2024-05-17 15:17:23.json b/Kukedlc/NeuralLLaMa-3-8b-DT-v0.1/result_2024-05-17 15:17:23.json new file mode 100644 index 0000000000000000000000000000000000000000..28da19ff029f103909dbf99dc05c8587c8d80336 --- /dev/null +++ b/Kukedlc/NeuralLLaMa-3-8b-DT-v0.1/result_2024-05-17 15:17:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4138225255972696, + "acc_stderr": 0.014392730009221007, + "acc_norm": 0.4803754266211604, + "acc_norm_stderr": 0.014600132075947094 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37950607448715395, + "acc_stderr": 0.004842723234022032, + "acc_norm": 0.5035849432383988, + "acc_norm_stderr": 0.004989653154272497 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6023391812865497, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.6023391812865497, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041695, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041695 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4661558109833972, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.4661558109833972, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4851063829787234, + "acc_stderr": 0.032671518489247764, + "acc_norm": 0.4851063829787234, + "acc_norm_stderr": 0.032671518489247764 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.02812534098397271, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.02812534098397271 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.035594435655639196, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.035594435655639196 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5793103448275863, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.5793103448275863, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.04878608714466996, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.04878608714466996 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5462184873949579, + "acc_stderr": 0.032339434681820885, + "acc_norm": 0.5462184873949579, + "acc_norm_stderr": 0.032339434681820885 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5153846153846153, + "acc_stderr": 0.025339003010106505, + "acc_norm": 0.5153846153846153, + "acc_norm_stderr": 0.025339003010106505 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5483870967741935, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.5483870967741935, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.027778835904935427, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.027778835904935427 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389184, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.029869605095316904, + "acc_norm": 0.4, + "acc_norm_stderr": 0.029869605095316904 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.039580272311215706, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.039580272311215706 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6965174129353234, + "acc_stderr": 0.03251006816458618, + "acc_norm": 0.6965174129353234, + "acc_norm_stderr": 0.03251006816458618 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.038073017265045105, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.038073017265045105 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3941798941798942, + "acc_stderr": 0.025167982333894143, + "acc_norm": 0.3941798941798942, + "acc_norm_stderr": 0.025167982333894143 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5635838150289018, + "acc_stderr": 0.026700545424943687, + "acc_norm": 0.5635838150289018, + "acc_norm_stderr": 0.026700545424943687 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5432098765432098, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.5432098765432098, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6055045871559633, + "acc_stderr": 0.02095464210858748, + "acc_norm": 0.6055045871559633, + "acc_norm_stderr": 0.02095464210858748 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.04065771002562603, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.04065771002562603 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.02011692534742242, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.02011692534742242 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759422, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759422 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219588, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219588 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.033888571185023246, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.033888571185023246 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3005586592178771, + "acc_stderr": 0.015334566806251155, + "acc_norm": 0.3005586592178771, + "acc_norm_stderr": 0.015334566806251155 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.02972215209928006, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.02972215209928006 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6448979591836734, + "acc_stderr": 0.030635655150387634, + "acc_norm": 0.6448979591836734, + "acc_norm_stderr": 0.030635655150387634 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6624472573839663, + "acc_stderr": 0.03078154910202621, + "acc_norm": 0.6624472573839663, + "acc_norm_stderr": 0.03078154910202621 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38722294654498046, + "acc_stderr": 0.012441155326854927, + "acc_norm": 0.38722294654498046, + "acc_norm_stderr": 0.012441155326854927 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.03476099060501635, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.03476099060501635 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3598531211750306, + "mc1_stderr": 0.016801860466677136, + "mc2": 0.5403860863126048, + "mc2_stderr": 0.015985130462394043 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5017709563164109, + "acc_stderr": 0.017190246276231863, + "acc_norm": 0.525383707201889, + "acc_norm_stderr": 0.01716818720142925 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kukedlc/NeuralLLaMa-3-8b-DT-v0.1", + "model_sha": "0d97a0b66d4c11915c100cde67f8889ca1821bdb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kukedlc/NeuralLLaMa-3-8b-ORPO-v0.3/result_2024-05-28 07:13:11.json b/Kukedlc/NeuralLLaMa-3-8b-ORPO-v0.3/result_2024-05-28 07:13:11.json new file mode 100644 index 0000000000000000000000000000000000000000..3b442d59d3c429707d9e2cf72c74bdf2294b01be --- /dev/null +++ b/Kukedlc/NeuralLLaMa-3-8b-ORPO-v0.3/result_2024-05-28 07:13:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41638225255972694, + "acc_stderr": 0.014405618279436178, + "acc_norm": 0.4803754266211604, + "acc_norm_stderr": 0.014600132075947096 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38329018123879705, + "acc_stderr": 0.004851944170671257, + "acc_norm": 0.5059749053973313, + "acc_norm_stderr": 0.0049894251333779055 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6140350877192983, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.6140350877192983, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.047504583990416946, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.047504583990416946 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.01787469866749135, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.01787469866749135 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936337, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5659163987138264, + "acc_stderr": 0.028150232244535608, + "acc_norm": 0.5659163987138264, + "acc_norm_stderr": 0.028150232244535608 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.035476014940069384, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.035476014940069384 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.593103448275862, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.593103448275862, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.04897104952726366, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.04897104952726366 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.03228410626716391, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.03228410626716391 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5282051282051282, + "acc_stderr": 0.02531063925493387, + "acc_norm": 0.5282051282051282, + "acc_norm_stderr": 0.02531063925493387 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5483870967741935, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.5483870967741935, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.027778835904935423, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.027778835904935423 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.03070948699255655, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.03070948699255655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.029773847012532967, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.029773847012532967 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681681, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681681 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6965174129353234, + "acc_stderr": 0.03251006816458619, + "acc_norm": 0.6965174129353234, + "acc_norm_stderr": 0.03251006816458619 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02510742548113729, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02510742548113729 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.026756255129663772, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.026756255129663772 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5339506172839507, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.5339506172839507, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6, + "acc_stderr": 0.021004201260420075, + "acc_norm": 0.6, + "acc_norm_stderr": 0.021004201260420075 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.04068590050224971, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.04068590050224971 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4526143790849673, + "acc_stderr": 0.020136790918492544, + "acc_norm": 0.4526143790849673, + "acc_norm_stderr": 0.020136790918492544 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125145, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125145 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.033888571185023246, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.033888571185023246 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3039106145251397, + "acc_stderr": 0.015382845587584524, + "acc_norm": 0.3039106145251397, + "acc_norm_stderr": 0.015382845587584524 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6571428571428571, + "acc_stderr": 0.03038726291954772, + "acc_norm": 0.6571428571428571, + "acc_norm_stderr": 0.03038726291954772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702368, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702368 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3833116036505867, + "acc_stderr": 0.012417603662901188, + "acc_norm": 0.3833116036505867, + "acc_norm_stderr": 0.012417603662901188 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03460228327239172, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03460228327239172 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3598531211750306, + "mc1_stderr": 0.016801860466677133, + "mc2": 0.5323892196869342, + "mc2_stderr": 0.01588909765492462 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4781582054309327, + "acc_stderr": 0.017173944474294378, + "acc_norm": 0.5053128689492326, + "acc_norm_stderr": 0.01718938362722971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kukedlc/NeuralLLaMa-3-8b-ORPO-v0.3", + "model_sha": "aa176c0db7791a1c09039135791145b0704a5f46", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kukedlc/NeuralLLaMa-3-8b-ORPO-v0.4/result_2024-06-15 15:34:24.json b/Kukedlc/NeuralLLaMa-3-8b-ORPO-v0.4/result_2024-06-15 15:34:24.json new file mode 100644 index 0000000000000000000000000000000000000000..f7abb4788311201a336c8ad9e929b47c9159b50d --- /dev/null +++ b/Kukedlc/NeuralLLaMa-3-8b-ORPO-v0.4/result_2024-06-15 15:34:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3984641638225256, + "acc_stderr": 0.014306946052735567, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37661820354511055, + "acc_stderr": 0.004835475957610932, + "acc_norm": 0.5002987452698665, + "acc_norm_stderr": 0.004989780520782245 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6023391812865497, + "acc_stderr": 0.03753638955761691, + "acc_norm": 0.6023391812865497, + "acc_norm_stderr": 0.03753638955761691 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5108556832694764, + "acc_stderr": 0.017875748840242407, + "acc_norm": 0.5108556832694764, + "acc_norm_stderr": 0.017875748840242407 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5148936170212766, + "acc_stderr": 0.032671518489247764, + "acc_norm": 0.5148936170212766, + "acc_norm_stderr": 0.032671518489247764 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5498392282958199, + "acc_stderr": 0.028256660723360177, + "acc_norm": 0.5498392282958199, + "acc_norm_stderr": 0.028256660723360177 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.03228410626716391, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.03228410626716391 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.02533466708095489, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.02533466708095489 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.03510766597959217, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.03510766597959217 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.02704685763071667, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.02704685763071667 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389188, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389188 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4111111111111111, + "acc_stderr": 0.029999923508706675, + "acc_norm": 0.4111111111111111, + "acc_norm_stderr": 0.029999923508706675 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.025279850397404907, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.025279850397404907 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.02663653974111608, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.02663653974111608 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5339506172839507, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.5339506172839507, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5779816513761468, + "acc_stderr": 0.02117499140776317, + "acc_norm": 0.5779816513761468, + "acc_norm_stderr": 0.02117499140776317 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5424836601307189, + "acc_stderr": 0.028526383452142638, + "acc_norm": 0.5424836601307189, + "acc_norm_stderr": 0.028526383452142638 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.020142974553795205, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.020142974553795205 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053757, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053757 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2670391061452514, + "acc_stderr": 0.014796502622562551, + "acc_norm": 0.2670391061452514, + "acc_norm_stderr": 0.014796502622562551 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6530612244897959, + "acc_stderr": 0.030472526026726496, + "acc_norm": 0.6530612244897959, + "acc_norm_stderr": 0.030472526026726496 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6624472573839663, + "acc_stderr": 0.030781549102026216, + "acc_norm": 0.6624472573839663, + "acc_norm_stderr": 0.030781549102026216 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3852672750977836, + "acc_stderr": 0.012429485434955189, + "acc_norm": 0.3852672750977836, + "acc_norm_stderr": 0.012429485434955189 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03460228327239172, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03460228327239172 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3268053855569155, + "mc1_stderr": 0.016419874731135046, + "mc2": 0.5048647715204716, + "mc2_stderr": 0.01568641542304603 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42621015348288077, + "acc_stderr": 0.01700212260948926, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.01718976703213082 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kukedlc/NeuralLLaMa-3-8b-ORPO-v0.4", + "model_sha": "3f82b5ec749ed3c2b14c55b9c7017018364387c7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kukedlc/NeuralSynthesis-7B-v0.1/result_2024-06-12 21:00:10.json b/Kukedlc/NeuralSynthesis-7B-v0.1/result_2024-06-12 21:00:10.json new file mode 100644 index 0000000000000000000000000000000000000000..8815674601154281b60f2792fcd70e8194784cd2 --- /dev/null +++ b/Kukedlc/NeuralSynthesis-7B-v0.1/result_2024-06-12 21:00:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938175, + "acc_norm": 0.45563139931740615, + "acc_norm_stderr": 0.01455374993930686 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3950408285202151, + "acc_stderr": 0.004878603699686037, + "acc_norm": 0.5270862378012349, + "acc_norm_stderr": 0.004982454383162071 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.01783579880629064, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.01783579880629064 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.41479099678456594, + "acc_stderr": 0.02798268045975956, + "acc_norm": 0.41479099678456594, + "acc_norm_stderr": 0.02798268045975956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616255, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.0350349092367328, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.0350349092367328 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137285, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137285 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5027522935779817, + "acc_stderr": 0.021436998359765317, + "acc_norm": 0.5027522935779817, + "acc_norm_stderr": 0.021436998359765317 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.044359328928514664, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.044359328928514664 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3937908496732026, + "acc_stderr": 0.019766211991073066, + "acc_norm": 0.3937908496732026, + "acc_norm_stderr": 0.019766211991073066 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28156424581005585, + "acc_stderr": 0.01504229017186611, + "acc_norm": 0.28156424581005585, + "acc_norm_stderr": 0.01504229017186611 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396563, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396563 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34224250325945244, + "acc_stderr": 0.01211793999870587, + "acc_norm": 0.34224250325945244, + "acc_norm_stderr": 0.01211793999870587 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.423500611995104, + "mc1_stderr": 0.01729742144853472, + "mc2": 0.6006281373017482, + "mc2_stderr": 0.016271469703386037 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42502951593860683, + "acc_stderr": 0.01699601630836289, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.01701984753597221 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kukedlc/NeuralSynthesis-7B-v0.1", + "model_sha": "547a5dc8963e127a9638256bb80eb3a36da1cc5d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kukedlc/NeuralSynthesis-7B-v0.3/result_2024-07-29 21:34:16.json b/Kukedlc/NeuralSynthesis-7B-v0.3/result_2024-07-29 21:34:16.json new file mode 100644 index 0000000000000000000000000000000000000000..fab2c159de2f70119c25d4c50779ab33e18f6b29 --- /dev/null +++ b/Kukedlc/NeuralSynthesis-7B-v0.3/result_2024-07-29 21:34:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3924914675767918, + "acc_stderr": 0.01426963463567071, + "acc_norm": 0.4590443686006826, + "acc_norm_stderr": 0.014562291073601227 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39583748257319257, + "acc_stderr": 0.004880303863138504, + "acc_norm": 0.5265883290181239, + "acc_norm_stderr": 0.004982721472407342 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46360153256704983, + "acc_stderr": 0.01783252407959326, + "acc_norm": 0.46360153256704983, + "acc_norm_stderr": 0.01783252407959326 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.41479099678456594, + "acc_stderr": 0.02798268045975956, + "acc_norm": 0.41479099678456594, + "acc_norm_stderr": 0.02798268045975956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48717948717948717, + "acc_stderr": 0.025342671293807267, + "acc_norm": 0.48717948717948717, + "acc_norm_stderr": 0.025342671293807267 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502737, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796183, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520196, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520196 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051208, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051208 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.044359328928514664, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.044359328928514664 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225882, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225882 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762637, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762637 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28156424581005585, + "acc_stderr": 0.01504229017186611, + "acc_norm": 0.28156424581005585, + "acc_norm_stderr": 0.01504229017186611 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.028418208619406794, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.028418208619406794 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585892, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585892 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674098, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674098 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4222766217870257, + "mc1_stderr": 0.017290733254248167, + "mc2": 0.5988874871363755, + "mc2_stderr": 0.016282516281031466 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42384887839433294, + "acc_stderr": 0.01698981083462825, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.01701403811929748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kukedlc/NeuralSynthesis-7B-v0.3", + "model_sha": "090fab29146f8e55066bce2f5f5859ab2d6027f4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kukedlc/NeuralSynthesis-7b-v0.4-slerp/result_2024-05-30 09:33:22.json b/Kukedlc/NeuralSynthesis-7b-v0.4-slerp/result_2024-05-30 09:33:22.json new file mode 100644 index 0000000000000000000000000000000000000000..2df8bfc8e77f700673e639831d14cdaa64373ea0 --- /dev/null +++ b/Kukedlc/NeuralSynthesis-7b-v0.4-slerp/result_2024-05-30 09:33:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39419795221843, + "acc_stderr": 0.014280522667467325, + "acc_norm": 0.45563139931740615, + "acc_norm_stderr": 0.014553749939306861 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3950408285202151, + "acc_stderr": 0.004878603699686037, + "acc_norm": 0.526090420235013, + "acc_norm_stderr": 0.004982983592459188 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45849297573435505, + "acc_stderr": 0.017818248603465557, + "acc_norm": 0.45849297573435505, + "acc_norm_stderr": 0.017818248603465557 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101736, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101736 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4180064308681672, + "acc_stderr": 0.02801365189199507, + "acc_norm": 0.4180064308681672, + "acc_norm_stderr": 0.02801365189199507 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502737, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749465, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749465 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.036030385453603826, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.036030385453603826 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.02507598176760168, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.02507598176760168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539277, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539277 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.036080032255696545, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.036080032255696545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.02143642095552942, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.02143642095552942 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142635, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142635 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.0197370089980946, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.0197370089980946 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28268156424581004, + "acc_stderr": 0.015060381730018104, + "acc_norm": 0.28268156424581004, + "acc_norm_stderr": 0.015060381730018104 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.028418208619406794, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.028418208619406794 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34224250325945244, + "acc_stderr": 0.01211793999870587, + "acc_norm": 0.34224250325945244, + "acc_norm_stderr": 0.01211793999870587 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.0346022832723917, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.0346022832723917 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524753, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524753 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.423500611995104, + "mc1_stderr": 0.017297421448534724, + "mc2": 0.5985025112028013, + "mc2_stderr": 0.016283467022800552 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42266824085005905, + "acc_stderr": 0.016983506079577604, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.01701984753597221 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kukedlc/NeuralSynthesis-7b-v0.4-slerp", + "model_sha": "bb3bd36fce162f472668dbd91960cd1525b45f30", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Kukedlc/Triunvirato-7b/result_2024-02-16 21:31:08.json b/Kukedlc/Triunvirato-7b/result_2024-02-16 21:31:08.json new file mode 100644 index 0000000000000000000000000000000000000000..2d39648197ad5c4494dabc38abed356a82e2de5d --- /dev/null +++ b/Kukedlc/Triunvirato-7b/result_2024-02-16 21:31:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.014175915490000322, + "acc_norm": 0.42918088737201365, + "acc_norm_stderr": 0.014464085894870655 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3957379008165704, + "acc_stderr": 0.004880092083408038, + "acc_norm": 0.518621788488349, + "acc_norm_stderr": 0.004986319587524962 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.04689765937278132, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.04689765937278132 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46998722860791825, + "acc_stderr": 0.017847723086649114, + "acc_norm": 0.46998722860791825, + "acc_norm_stderr": 0.017847723086649114 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611549, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611549 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923325, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923325 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.02535100632816969, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02535100632816969 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.028156036538233217, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.028156036538233217 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159795, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159795 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.02780165621232366, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.02780165621232366 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569653, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569653 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5192660550458715, + "acc_stderr": 0.02142140298254889, + "acc_norm": 0.5192660550458715, + "acc_norm_stderr": 0.02142140298254889 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775088, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41134751773049644, + "acc_stderr": 0.02935491115994097, + "acc_norm": 0.41134751773049644, + "acc_norm_stderr": 0.02935491115994097 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696044, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.015005762446786164, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.015005762446786164 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824866, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824866 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.0313762407256162, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.0313762407256162 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.012134433741002574, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.012134433741002574 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.39412484700122397, + "mc1_stderr": 0.017106588140700325, + "mc2": 0.5611074316819452, + "mc2_stderr": 0.01612052625366567 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45690672963400236, + "acc_stderr": 0.017126389093086784, + "acc_norm": 0.4651711924439197, + "acc_norm_stderr": 0.017148598015747425 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Kukedlc/Triunvirato-7b", + "model_sha": "b0ad1f15bef4dac9645a2815143ee55e56809a8d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.0/result_2023-10-29 22:16:00.json b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.0/result_2023-10-29 22:16:00.json new file mode 100644 index 0000000000000000000000000000000000000000..b8215aafe7269fe4986aa3958b599fea74422df3 --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.0/result_2023-10-29 22:16:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910471, + "acc_norm": 0.44880546075085326, + "acc_norm_stderr": 0.014534599585097667 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4251145190201155, + "acc_stderr": 0.0049335002616835944, + "acc_norm": 0.5650268870742879, + "acc_norm_stderr": 0.004947402907996247 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5606641123882503, + "acc_stderr": 0.0177478742456836, + "acc_norm": 0.5606641123882503, + "acc_norm_stderr": 0.0177478742456836 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933914, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933914 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969481, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969481 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762613, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762613 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6018348623853211, + "acc_stderr": 0.02098798942265426, + "acc_norm": 0.6018348623853211, + "acc_norm_stderr": 0.02098798942265426 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171566, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171566 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296559, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296559 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.019691459052354164, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.019691459052354164 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.033812000056435254, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.033812000056435254 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.03018753206032938, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.03018753206032938 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763127, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763127 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35723598435462844, + "acc_stderr": 0.012238615750316503, + "acc_norm": 0.35723598435462844, + "acc_norm_stderr": 0.012238615750316503 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.035091433756067866, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.035091433756067866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.01600265148736101, + "mc2": 0.45982516329816536, + "mc2_stderr": 0.014857750246900359 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46871310507674147, + "acc_stderr": 0.017156666859785463, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.017122829143292658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.0", + "model_sha": "9d704abe2ee490446c4bea6a94692841bdb92ddb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.1/result_2023-10-30 14:08:29.json b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.1/result_2023-10-30 14:08:29.json new file mode 100644 index 0000000000000000000000000000000000000000..de7646b43ee622919257a2307e0bc94874f49f21 --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.1/result_2023-10-30 14:08:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3993174061433447, + "acc_stderr": 0.014312094557946704, + "acc_norm": 0.4658703071672355, + "acc_norm_stderr": 0.014577311315231099 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42561242780322645, + "acc_stderr": 0.00493425039087978, + "acc_norm": 0.5683130850428202, + "acc_norm_stderr": 0.004942990623131125 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5568326947637292, + "acc_stderr": 0.017764085035348418, + "acc_norm": 0.5568326947637292, + "acc_norm_stderr": 0.017764085035348418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.03536085947529482, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.03536085947529482 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073835, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073835 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5963302752293578, + "acc_stderr": 0.02103570485657497, + "acc_norm": 0.5963302752293578, + "acc_norm_stderr": 0.02103570485657497 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.028304576673141107, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.028304576673141107 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647206, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647206 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003472, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003472 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.03137624072561619, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.03137624072561619 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35919165580182527, + "acc_stderr": 0.01225338618758425, + "acc_norm": 0.35919165580182527, + "acc_norm_stderr": 0.01225338618758425 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237033, + "mc2": 0.42117238466385504, + "mc2_stderr": 0.01460128908268072 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4769775678866588, + "acc_stderr": 0.017172121546727637, + "acc_norm": 0.5489964580873672, + "acc_norm_stderr": 0.017107618859549346 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.1", + "model_sha": "48bfd4b2fa3fbb12ba5cf4a7b07195f65c998aa7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.2/result_2023-10-31 01:25:10.json b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.2/result_2023-10-31 01:25:10.json new file mode 100644 index 0000000000000000000000000000000000000000..fa09deb23d91d236cefd1aa6ea4f7bc29801aca8 --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.2/result_2023-10-31 01:25:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40017064846416384, + "acc_stderr": 0.014317197787809169, + "acc_norm": 0.45307167235494883, + "acc_norm_stderr": 0.014546892052005628 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4222266480780721, + "acc_stderr": 0.0049290484827604515, + "acc_norm": 0.566620195180243, + "acc_norm_stderr": 0.004945291270072434 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5389527458492975, + "acc_stderr": 0.01782562179323901, + "acc_norm": 0.5389527458492975, + "acc_norm_stderr": 0.01782562179323901 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.03068302084323101, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.03068302084323101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.035594435655639196, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.035594435655639196 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.025088301454694834, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.025088301454694834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5854700854700855, + "acc_stderr": 0.03227396567623779, + "acc_norm": 0.5854700854700855, + "acc_norm_stderr": 0.03227396567623779 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.024180497164376896, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.024180497164376896 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670788, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670788 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377906, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377906 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.036080032255696545, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.036080032255696545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5798165137614679, + "acc_stderr": 0.0211624200482735, + "acc_norm": 0.5798165137614679, + "acc_norm_stderr": 0.0211624200482735 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.02824513402438729, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.02824513402438729 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.01952431674486635, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.01952431674486635 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.02872386385328128, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.02872386385328128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.03027332507734575, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.03027332507734575 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.03190080389473236, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.03190080389473236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3670143415906128, + "acc_stderr": 0.01231026424484213, + "acc_norm": 0.3670143415906128, + "acc_norm_stderr": 0.01231026424484213 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606785, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606785 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.01541524174023703, + "mc2": 0.41898060116595187, + "mc2_stderr": 0.014731537822096375 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4935064935064935, + "acc_stderr": 0.01718890435907731, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.016689333596980094 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.2", + "model_sha": "01fd24c5633c041fc150a92d285b67a58aa42d1b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.3/result_2023-10-31 15:28:25.json b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.3/result_2023-10-31 15:28:25.json new file mode 100644 index 0000000000000000000000000000000000000000..639da7d8276778a1ac22c3f512ecc31d80265dcf --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.3/result_2023-10-31 15:28:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4121160409556314, + "acc_stderr": 0.014383915302225398, + "acc_norm": 0.46245733788395904, + "acc_norm_stderr": 0.014570144495075574 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42780322644891455, + "acc_stderr": 0.004937490199489467, + "acc_norm": 0.5748854809798845, + "acc_norm_stderr": 0.004933500261683598 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.01778403453499242, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.01778403453499242 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.033764582465095665, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.033764582465095665 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051621, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051621 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911521, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911521 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101806, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101806 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5559633027522936, + "acc_stderr": 0.021302621211654518, + "acc_norm": 0.5559633027522936, + "acc_norm_stderr": 0.021302621211654518 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437538, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437538 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569746, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.031722950043323296, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.031722950043323296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3604954367666232, + "acc_stderr": 0.012263110237299235, + "acc_norm": 0.3604954367666232, + "acc_norm_stderr": 0.012263110237299235 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006514, + "mc2": 0.43540892594005776, + "mc2_stderr": 0.014927422551655146 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45454545454545453, + "acc_stderr": 0.0171191722080615, + "acc_norm": 0.5985832349468713, + "acc_norm_stderr": 0.01685290785872906 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.3", + "model_sha": "3eabc4d02efc859940fd78f95895c10376edfbae", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.4/result_2023-11-01 02:12:17.json b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.4/result_2023-11-01 02:12:17.json new file mode 100644 index 0000000000000000000000000000000000000000..d738434070d3e7d4388c9eeb42911a943c53232c --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.4/result_2023-11-01 02:12:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43600682593856654, + "acc_stderr": 0.014491225699230914, + "acc_norm": 0.4778156996587031, + "acc_norm_stderr": 0.014597001927076133 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4395538737303326, + "acc_stderr": 0.004953184534223989, + "acc_norm": 0.5835490938060147, + "acc_norm_stderr": 0.0049196263806455115 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.017797751493865636, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.017797751493865636 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056126, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056126 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.02520357177302834, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.02520357177302834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836928, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091265, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.026907849856282532, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.026907849856282532 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5908256880733945, + "acc_stderr": 0.021080670264433738, + "acc_norm": 0.5908256880733945, + "acc_norm_stderr": 0.021080670264433738 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424513, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424513 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437538, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437538 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289784, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289784 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35528031290743156, + "acc_stderr": 0.012223623364044043, + "acc_norm": 0.35528031290743156, + "acc_norm_stderr": 0.012223623364044043 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.36964504283965727, + "mc1_stderr": 0.0168981807069739, + "mc2": 0.5205477409426235, + "mc2_stderr": 0.01592635844376339 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4817001180637544, + "acc_stderr": 0.017178836639177752, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.4", + "model_sha": "784a5488ff350bcd9fde9d7aff59a0b9988acc2a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.5/result_2023-11-13 07:24:03.json b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.5/result_2023-11-13 07:24:03.json new file mode 100644 index 0000000000000000000000000000000000000000..e0037af29c65dfe14a16d6cf13be19100ad41749 --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.5/result_2023-11-13 07:24:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4274744027303754, + "acc_stderr": 0.01445686294465065, + "acc_norm": 0.4803754266211604, + "acc_norm_stderr": 0.014600132075947096 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42949611631149176, + "acc_stderr": 0.004939925958728876, + "acc_norm": 0.5841465843457478, + "acc_norm_stderr": 0.004918612098944032 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5542784163473818, + "acc_stderr": 0.017774297282479503, + "acc_norm": 0.5542784163473818, + "acc_norm_stderr": 0.017774297282479503 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.0283332771095628, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.0283332771095628 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.025158266016868543, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.025158266016868543 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.0478200179138006, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.0478200179138006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184407, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184407 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960719, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960719 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5834862385321101, + "acc_stderr": 0.021136376504030874, + "acc_norm": 0.5834862385321101, + "acc_norm_stderr": 0.021136376504030874 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928724, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928724 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.019643801557924806, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.019643801557924806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0321495214780275, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0321495214780275 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34485006518904826, + "acc_stderr": 0.012139881006287058, + "acc_norm": 0.34485006518904826, + "acc_norm_stderr": 0.012139881006287058 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.038881769216741, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.038881769216741 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31456548347613217, + "mc1_stderr": 0.01625524199317919, + "mc2": 0.46079664403216586, + "mc2_stderr": 0.01602078431352231 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38016528925619836, + "acc_stderr": 0.016689333596980112, + "acc_norm": 0.4510035419126328, + "acc_norm_stderr": 0.017107618859549357 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.5", + "model_sha": "0f783e7c9985f2de65016f414ad026ca7da56ad4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.6/result_2023-11-13 07:25:43.json b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.6/result_2023-11-13 07:25:43.json new file mode 100644 index 0000000000000000000000000000000000000000..c7e09c62e8737b3bcf4df84008b39ec563ff8d2a --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.6/result_2023-11-13 07:25:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4308873720136519, + "acc_stderr": 0.01447113339264246, + "acc_norm": 0.4803754266211604, + "acc_norm_stderr": 0.0146001320759471 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43328022306313485, + "acc_stderr": 0.004945157565218188, + "acc_norm": 0.5933081059549891, + "acc_norm_stderr": 0.004902125388002201 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5593869731800766, + "acc_stderr": 0.017753396973908493, + "acc_norm": 0.5593869731800766, + "acc_norm_stderr": 0.017753396973908493 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.03544132491947969, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.03544132491947969 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.032363611119519416, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.032363611119519416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.025158266016868547, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.025158266016868547 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5944954128440367, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.5944954128440367, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.01965992249362333, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.01965992249362333 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.032259413526312945, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.032259413526312945 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.0302114796091216, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.0302114796091216 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163907, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163907 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.03137624072561619, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.03137624072561619 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3435462842242503, + "acc_stderr": 0.012128961174190154, + "acc_norm": 0.3435462842242503, + "acc_norm_stderr": 0.012128961174190154 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.035091433756067866, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.035091433756067866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32802937576499386, + "mc1_stderr": 0.01643563293281504, + "mc2": 0.46940366768411657, + "mc2_stderr": 0.016167620517601608 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41912632821723733, + "acc_stderr": 0.016963995010862792, + "acc_norm": 0.4805194805194805, + "acc_norm_stderr": 0.01717730199234256 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.6", + "model_sha": "8ca05731176451a126cf07e06a97f08e735e21b4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.7/result_2023-12-11 07:43:12.json b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.7/result_2023-12-11 07:43:12.json new file mode 100644 index 0000000000000000000000000000000000000000..373637bb7b42b89901d13984e81443cba535e498 --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.7/result_2023-12-11 07:43:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41638225255972694, + "acc_stderr": 0.014405618279436178, + "acc_norm": 0.48378839590443684, + "acc_norm_stderr": 0.014603708567414936 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4486158135829516, + "acc_stderr": 0.004963362085275563, + "acc_norm": 0.6018721370244972, + "acc_norm_stderr": 0.00488511646555028 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5568326947637292, + "acc_stderr": 0.01776408503534842, + "acc_norm": 0.5568326947637292, + "acc_norm_stderr": 0.01776408503534842 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840622, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828064, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828064 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416544, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416544 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.025230381238934833, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.025230381238934833 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.02827241018621491, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.02827241018621491 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688166, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688166 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.037657466938651504, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.037657466938651504 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047732, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047732 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206184, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206184 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.03561587327685884, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.03561587327685884 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5834862385321101, + "acc_stderr": 0.021136376504030868, + "acc_norm": 0.5834862385321101, + "acc_norm_stderr": 0.021136376504030868 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685741, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685741 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.028538650028878645, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.028538650028878645 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176851, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176851 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.01446589382985994, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.01446589382985994 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763127, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763127 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.03048603938910531, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.03048603938910531 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.012198140605353593, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.012198140605353593 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.038154943086889305, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.038154943086889305 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33047735618115054, + "mc1_stderr": 0.016466769613698296, + "mc2": 0.5033109991126061, + "mc2_stderr": 0.015408807692069393 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.500590318772137, + "acc_stderr": 0.01719034212344859, + "acc_norm": 0.5560802833530106, + "acc_norm_stderr": 0.017081884623542543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-13B-v1.7", + "model_sha": "da2fe170b8fa2c32b922b10cc1f21e74e7fb2395", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-Instruct-Llama-2-ko-16B/result_2023-12-31 10:15:24.json b/LDCC/LDCC-Instruct-Llama-2-ko-16B/result_2023-12-31 10:15:24.json new file mode 100644 index 0000000000000000000000000000000000000000..1954fa4c4011369adcf850526adb646ece2e5923 --- /dev/null +++ b/LDCC/LDCC-Instruct-Llama-2-ko-16B/result_2023-12-31 10:15:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41552901023890787, + "acc_stderr": 0.014401366641216386, + "acc_norm": 0.492320819112628, + "acc_norm_stderr": 0.014609667440892567 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44214299940250945, + "acc_stderr": 0.004956262919324406, + "acc_norm": 0.6040629356701852, + "acc_norm_stderr": 0.0048805154313231605 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299798, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299798 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479637, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479637 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.040925639582376556, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.040925639582376556 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883233, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883233 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.031426169937919246, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.031426169937919246 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342596, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844065, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.0233306540545359, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.0233306540545359 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.02686462436675665, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.02686462436675665 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5216049382716049, + "acc_stderr": 0.02779476010500873, + "acc_norm": 0.5216049382716049, + "acc_norm_stderr": 0.02779476010500873 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6269430051813472, + "acc_stderr": 0.03490205592048573, + "acc_norm": 0.6269430051813472, + "acc_norm_stderr": 0.03490205592048573 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5688073394495413, + "acc_stderr": 0.021233365030319567, + "acc_norm": 0.5688073394495413, + "acc_norm_stderr": 0.021233365030319567 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.02000791273935935, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.02000791273935935 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.044642857142857116, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.044642857142857116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260659, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260659 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841197, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841197 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03068582059661081, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03068582059661081 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.01219814060535359, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.01219814060535359 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.34761321909424725, + "mc1_stderr": 0.016670769188897306, + "mc2": 0.5104539931249092, + "mc2_stderr": 0.01608799028808744 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46989374262101535, + "acc_stderr": 0.017159163590170216, + "acc_norm": 0.5159386068476978, + "acc_norm_stderr": 0.017181617837190192 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-Instruct-Llama-2-ko-16B", + "model_sha": "f7a3f41bb36b1e9b9d894512aa266fd30d4b5298", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-SOLAR-10.7B/result_2024-01-03 05:50:23.json b/LDCC/LDCC-SOLAR-10.7B/result_2024-01-03 05:50:23.json new file mode 100644 index 0000000000000000000000000000000000000000..33e572c31a374373b1890e50b00621f1083aebb1 --- /dev/null +++ b/LDCC/LDCC-SOLAR-10.7B/result_2024-01-03 05:50:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4069965870307167, + "acc_stderr": 0.014356399418009128, + "acc_norm": 0.47440273037542663, + "acc_norm_stderr": 0.014592230885298957 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42053375821549493, + "acc_stderr": 0.004926358564494573, + "acc_norm": 0.5816570404301932, + "acc_norm_stderr": 0.004922789247319876 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6432748538011696, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.6432748538011696, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6206896551724138, + "acc_stderr": 0.01735126811754445, + "acc_norm": 0.6206896551724138, + "acc_norm_stderr": 0.01735126811754445 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.032278345101462685, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.032278345101462685 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954953, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954953 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5419354838709678, + "acc_stderr": 0.028343787250540618, + "acc_norm": 0.5419354838709678, + "acc_norm_stderr": 0.028343787250540618 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417604, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562417, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723369, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723369 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6424870466321243, + "acc_stderr": 0.034588160421810114, + "acc_norm": 0.6424870466321243, + "acc_norm_stderr": 0.034588160421810114 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5743119266055046, + "acc_stderr": 0.0211992359724708, + "acc_norm": 0.5743119266055046, + "acc_norm_stderr": 0.0211992359724708 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.028614624752805427, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.028614624752805427 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.0200176292142131, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.0200176292142131 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347243, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347243 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22793296089385476, + "acc_stderr": 0.014030149950805097, + "acc_norm": 0.22793296089385476, + "acc_norm_stderr": 0.014030149950805097 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935893, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935893 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7426160337552743, + "acc_stderr": 0.02845882099146031, + "acc_norm": 0.7426160337552743, + "acc_norm_stderr": 0.02845882099146031 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3683181225554107, + "acc_stderr": 0.012319403369564646, + "acc_norm": 0.3683181225554107, + "acc_norm_stderr": 0.012319403369564646 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.03495624522015477, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.03495624522015477 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4173806609547124, + "mc1_stderr": 0.017262891063272185, + "mc2": 0.5957318937868689, + "mc2_stderr": 0.01607589491221927 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5159386068476978, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.5903187721369539, + "acc_norm_stderr": 0.01690756819221948 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-SOLAR-10.7B", + "model_sha": "43af13fd87ce9041d0a60489f7b0d357febf14de", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-SOLAR-10.7B/result_2024-01-04 06:09:06.json b/LDCC/LDCC-SOLAR-10.7B/result_2024-01-04 06:09:06.json new file mode 100644 index 0000000000000000000000000000000000000000..be54a561cb3873cc231ec6417fcab99cf1114490 --- /dev/null +++ b/LDCC/LDCC-SOLAR-10.7B/result_2024-01-04 06:09:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44880546075085326, + "acc_stderr": 0.014534599585097667, + "acc_norm": 0.5059726962457338, + "acc_norm_stderr": 0.014610348300255793 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4371639115714001, + "acc_stderr": 0.004950221546187574, + "acc_norm": 0.6050587532364071, + "acc_norm_stderr": 0.0048783902265917105 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6372924648786717, + "acc_stderr": 0.017192708674602302, + "acc_norm": 0.6372924648786717, + "acc_norm_stderr": 0.017192708674602302 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.03257901482099836, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.03257901482099836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.0389136449583582, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.0389136449583582 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6077170418006431, + "acc_stderr": 0.027731258647011994, + "acc_norm": 0.6077170418006431, + "acc_norm_stderr": 0.027731258647011994 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.03191178226713545, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.03191178226713545 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006718, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006718 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.03163145807552378, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.03163145807552378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.558974358974359, + "acc_stderr": 0.025174048384000718, + "acc_norm": 0.558974358974359, + "acc_norm_stderr": 0.025174048384000718 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5870967741935483, + "acc_stderr": 0.028009138125400374, + "acc_norm": 0.5870967741935483, + "acc_norm_stderr": 0.028009138125400374 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392926, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392926 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228402, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228402 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.031524391865554044, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.031524391865554044 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4417989417989418, + "acc_stderr": 0.02557625706125383, + "acc_norm": 0.4417989417989418, + "acc_norm_stderr": 0.02557625706125383 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.04177578950739993, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.04177578950739993 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5809248554913294, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.5809248554913294, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5864197530864198, + "acc_stderr": 0.02740204204026996, + "acc_norm": 0.5864197530864198, + "acc_norm_stderr": 0.02740204204026996 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.032922966391551414, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.032922966391551414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.046774730044912, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.046774730044912 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6623853211009174, + "acc_stderr": 0.020275265986638917, + "acc_norm": 0.6623853211009174, + "acc_norm_stderr": 0.020275265986638917 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5, + "acc_stderr": 0.020227834851568375, + "acc_norm": 0.5, + "acc_norm_stderr": 0.020227834851568375 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.02927553215970473, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.02927553215970473 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875192, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875192 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5324074074074074, + "acc_stderr": 0.034028015813589656, + "acc_norm": 0.5324074074074074, + "acc_norm_stderr": 0.034028015813589656 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372432, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372432 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.49264705882352944, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.49264705882352944, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6040816326530613, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.6040816326530613, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41264667535853977, + "acc_stderr": 0.012573836633799022, + "acc_norm": 0.41264667535853977, + "acc_norm_stderr": 0.012573836633799022 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.0332057461294543, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.0332057461294543 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624335, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624335 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.423500611995104, + "mc1_stderr": 0.01729742144853472, + "mc2": 0.5984445406996183, + "mc2_stderr": 0.015659729754718226 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6174734356552538, + "acc_stderr": 0.01670916538722883, + "acc_norm": 0.6340023612750886, + "acc_norm_stderr": 0.016561489664895714 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-SOLAR-10.7B", + "model_sha": "43af13fd87ce9041d0a60489f7b0d357febf14de", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LDCC/LDCC-SOLAR-10.7B/result_2024-01-12 13:01:27.json b/LDCC/LDCC-SOLAR-10.7B/result_2024-01-12 13:01:27.json new file mode 100644 index 0000000000000000000000000000000000000000..7853326b48d4da52d92b483595e692f06dd79758 --- /dev/null +++ b/LDCC/LDCC-SOLAR-10.7B/result_2024-01-12 13:01:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.49573378839590443, + "acc_stderr": 0.014610858923956959, + "acc_norm": 0.5537542662116041, + "acc_norm_stderr": 0.014526705548539987 + }, + "harness|ko_hellaswag|10": { + "acc": 0.47679745070703045, + "acc_stderr": 0.004984405935541095, + "acc_norm": 0.655646285600478, + "acc_norm_stderr": 0.004741859753178425 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370608, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370608 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.04656147110012351, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.04656147110012351 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6564495530012772, + "acc_stderr": 0.01698214563265247, + "acc_norm": 0.6564495530012772, + "acc_norm_stderr": 0.01698214563265247 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033583, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033583 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6205787781350482, + "acc_stderr": 0.027559949802347817, + "acc_norm": 0.6205787781350482, + "acc_norm_stderr": 0.027559949802347817 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5695067264573991, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.5695067264573991, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.042438692422305246, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.042438692422305246 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786753, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786753 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196156, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196156 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.03163145807552378, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.03163145807552378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5692307692307692, + "acc_stderr": 0.02510682066053976, + "acc_norm": 0.5692307692307692, + "acc_norm_stderr": 0.02510682066053976 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5709677419354838, + "acc_stderr": 0.028156036538233193, + "acc_norm": 0.5709677419354838, + "acc_norm_stderr": 0.028156036538233193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.02812096650391438, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.02812096650391438 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919795, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41798941798941797, + "acc_stderr": 0.02540255550326091, + "acc_norm": 0.41798941798941797, + "acc_norm_stderr": 0.02540255550326091 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5924855491329479, + "acc_stderr": 0.026454578146931505, + "acc_norm": 0.5924855491329479, + "acc_norm_stderr": 0.026454578146931505 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.595679012345679, + "acc_stderr": 0.027306625297327677, + "acc_norm": 0.595679012345679, + "acc_norm_stderr": 0.027306625297327677 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7098445595854922, + "acc_stderr": 0.03275264467791516, + "acc_norm": 0.7098445595854922, + "acc_norm_stderr": 0.03275264467791516 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04697085136647861, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04697085136647861 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.673394495412844, + "acc_stderr": 0.020106990889937303, + "acc_norm": 0.673394495412844, + "acc_norm_stderr": 0.020106990889937303 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.565359477124183, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.565359477124183, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.04065771002562603, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.04065771002562603 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.020219083895133917, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.020219083895133917 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596157, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596157 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.35977653631284917, + "acc_stderr": 0.016051419760310263, + "acc_norm": 0.35977653631284917, + "acc_norm_stderr": 0.016051419760310263 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5220588235294118, + "acc_stderr": 0.03034326422421352, + "acc_norm": 0.5220588235294118, + "acc_norm_stderr": 0.03034326422421352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7679324894514767, + "acc_stderr": 0.02747974455080852, + "acc_norm": 0.7679324894514767, + "acc_norm_stderr": 0.02747974455080852 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4276401564537158, + "acc_stderr": 0.012635799922765853, + "acc_norm": 0.4276401564537158, + "acc_norm_stderr": 0.012635799922765853 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03308611113236436, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03308611113236436 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4883720930232558, + "mc1_stderr": 0.017498767175740098, + "mc2": 0.6438820598507264, + "mc2_stderr": 0.015890057743773906 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5702479338842975, + "acc_stderr": 0.017019847535972205, + "acc_norm": 0.5796930342384888, + "acc_norm_stderr": 0.01697059828117771 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LDCC/LDCC-SOLAR-10.7B", + "model_sha": "c8741ec6f4f24324a96041efaf2f627a99d946e6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.001/result_2024-01-09 07:37:11.json b/LI-ST/Mistral-7B-ko-v0.001/result_2024-01-09 07:37:11.json new file mode 100644 index 0000000000000000000000000000000000000000..455f028240b7757144e01332df0bd5aa21af2628 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.001/result_2024-01-09 07:37:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3302047781569966, + "acc_stderr": 0.013743085603760424, + "acc_norm": 0.37627986348122866, + "acc_norm_stderr": 0.014157022555407175 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3580959968133838, + "acc_stderr": 0.004784607222774628, + "acc_norm": 0.448814977096196, + "acc_norm_stderr": 0.004963567029129058 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4521072796934866, + "acc_stderr": 0.017797751493865626, + "acc_norm": 0.4521072796934866, + "acc_norm_stderr": 0.017797751493865626 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357766, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357766 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3890675241157556, + "acc_stderr": 0.027690337536485376, + "acc_norm": 0.3890675241157556, + "acc_norm_stderr": 0.027690337536485376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.37404580152671757, + "acc_stderr": 0.04243869242230524, + "acc_norm": 0.37404580152671757, + "acc_norm_stderr": 0.04243869242230524 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4494949494949495, + "acc_stderr": 0.03544132491947969, + "acc_norm": 0.4494949494949495, + "acc_norm_stderr": 0.03544132491947969 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617749, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617749 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938152, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938152 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.033554009049695646, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.033554009049695646 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4064516129032258, + "acc_stderr": 0.027941727346256315, + "acc_norm": 0.4064516129032258, + "acc_norm_stderr": 0.027941727346256315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507748, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507748 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137285, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137285 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.02629622791561367, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.02629622791561367 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.027237415094592474, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.027237415094592474 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42018348623853213, + "acc_stderr": 0.021162420048273515, + "acc_norm": 0.42018348623853213, + "acc_norm_stderr": 0.021162420048273515 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.027780141207023344, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.027780141207023344 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.01943177567703731, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.01943177567703731 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265016, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265016 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.0449394906861354, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.0449394906861354 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.03154696285656627, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.03154696285656627 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103986, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103986 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396573, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396573 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271803, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271803 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833342, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833342 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006506, + "mc2": 0.45470265644306807, + "mc2_stderr": 0.01566955975434091 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3600944510035419, + "acc_stderr": 0.016503686720440065, + "acc_norm": 0.4604486422668241, + "acc_norm_stderr": 0.017136487626049846 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.001", + "model_sha": "7fa29dd55c6d480bd1dd023d04bbc351d9c465c2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.002/result_2024-01-09 07:37:19.json b/LI-ST/Mistral-7B-ko-v0.002/result_2024-01-09 07:37:19.json new file mode 100644 index 0000000000000000000000000000000000000000..b60fd8fdebf52bbe930508abef32e556ec6156b6 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.002/result_2024-01-09 07:37:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3438566552901024, + "acc_stderr": 0.013880644570156211, + "acc_norm": 0.3890784982935154, + "acc_norm_stderr": 0.014247309976045607 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35331607249551883, + "acc_stderr": 0.004770229206838895, + "acc_norm": 0.4455287791276638, + "acc_norm_stderr": 0.00496008252885244 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4278416347381865, + "acc_stderr": 0.01769278792780373, + "acc_norm": 0.4278416347381865, + "acc_norm_stderr": 0.01769278792780373 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.30638297872340425, + "acc_stderr": 0.030135906478517563, + "acc_norm": 0.30638297872340425, + "acc_norm_stderr": 0.030135906478517563 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42765273311897106, + "acc_stderr": 0.028099240775809553, + "acc_norm": 0.42765273311897106, + "acc_norm_stderr": 0.028099240775809553 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416828, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416828 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4797979797979798, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.4797979797979798, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793254, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793254 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03156663099215416, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03156663099215416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34102564102564104, + "acc_stderr": 0.024035489676335047, + "acc_norm": 0.34102564102564104, + "acc_norm_stderr": 0.024035489676335047 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776296, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776296 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596426, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596426 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.038258255488486076, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.038258255488486076 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.0276847214156562, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.0276847214156562 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46422018348623856, + "acc_stderr": 0.0213823647757019, + "acc_norm": 0.46422018348623856, + "acc_norm_stderr": 0.0213823647757019 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.028074158947600653, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.028074158947600653 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952674, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952674 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34477124183006536, + "acc_stderr": 0.019228322018696644, + "acc_norm": 0.34477124183006536, + "acc_norm_stderr": 0.019228322018696644 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103986, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103986 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48523206751054854, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.48523206751054854, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2907431551499348, + "acc_stderr": 0.011598062372851981, + "acc_norm": 0.2907431551499348, + "acc_norm_stderr": 0.011598062372851981 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833342, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833342 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268049, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268049 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768542, + "mc2": 0.4863566236457324, + "mc2_stderr": 0.01602375681264972 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38016528925619836, + "acc_stderr": 0.01668933359698011, + "acc_norm": 0.4675324675324675, + "acc_norm_stderr": 0.01715407371668286 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.002", + "model_sha": "0865e8e478e51f8872f96340d22109e6f86025aa", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.003/result_2024-01-09 07:37:24.json b/LI-ST/Mistral-7B-ko-v0.003/result_2024-01-09 07:37:24.json new file mode 100644 index 0000000000000000000000000000000000000000..9999aae21a071ce29d67fb70ea40848e8657b307 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.003/result_2024-01-09 07:37:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.013888816286782112, + "acc_norm": 0.4069965870307167, + "acc_norm_stderr": 0.014356399418009135 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3681537542322247, + "acc_stderr": 0.004813177057496271, + "acc_norm": 0.47082254530969925, + "acc_norm_stderr": 0.00498127832642802 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45849297573435505, + "acc_stderr": 0.017818248603465554, + "acc_norm": 0.45849297573435505, + "acc_norm_stderr": 0.017818248603465554 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.0402477840197711, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.0402477840197711 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330315, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330315 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.02515826601686856, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.02515826601686856 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019413, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019413 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.03011821010694263, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.03011821010694263 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009812, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009812 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228402, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228402 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.02397386199899207, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.02397386199899207 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.02680372058320619, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.02680372058320619 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.03814269893261837, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.03814269893261837 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.02743162372241502, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.02743162372241502 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144809, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144809 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46605504587155966, + "acc_stderr": 0.021387863350353996, + "acc_norm": 0.46605504587155966, + "acc_norm_stderr": 0.021387863350353996 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142638, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142638 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.044492703500683815, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.044492703500683815 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.03977749934622074, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.03977749934622074 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36437908496732024, + "acc_stderr": 0.019469518221573702, + "acc_norm": 0.36437908496732024, + "acc_norm_stderr": 0.019469518221573702 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.02689170942834396, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.02689170942834396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016647, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016647 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.032002553478937816, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.032002553478937816 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30638852672750977, + "acc_stderr": 0.011773980329380694, + "acc_norm": 0.30638852672750977, + "acc_norm_stderr": 0.011773980329380694 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.03793713171165634, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.03793713171165634 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777308, + "mc2": 0.4510506680374379, + "mc2_stderr": 0.015547368837733567 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42502951593860683, + "acc_stderr": 0.016996016308362887, + "acc_norm": 0.5360094451003542, + "acc_norm_stderr": 0.017145715365486664 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.003", + "model_sha": "adc7c6aed876f04edaed3bbeba7fa4fdb993091c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.004/result_2024-01-09 07:37:28.json b/LI-ST/Mistral-7B-ko-v0.004/result_2024-01-09 07:37:28.json new file mode 100644 index 0000000000000000000000000000000000000000..d2227d7ce3bba891768c6ee85f9c66256053d255 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.004/result_2024-01-09 07:37:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3310580204778157, + "acc_stderr": 0.013752062419817836, + "acc_norm": 0.3848122866894198, + "acc_norm_stderr": 0.014218371065251117 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3650667197769369, + "acc_stderr": 0.0048046491971637005, + "acc_norm": 0.4547898824935272, + "acc_norm_stderr": 0.004969341773423513 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4342273307790549, + "acc_stderr": 0.017724589389677785, + "acc_norm": 0.4342273307790549, + "acc_norm_stderr": 0.017724589389677785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.039992628766177214, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.039992628766177214 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.38263665594855306, + "acc_stderr": 0.027604689028581993, + "acc_norm": 0.38263665594855306, + "acc_norm_stderr": 0.027604689028581993 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.033141902221106564, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.033141902221106564 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.37404580152671757, + "acc_stderr": 0.042438692422305246, + "acc_norm": 0.37404580152671757, + "acc_norm_stderr": 0.042438692422305246 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.41919191919191917, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.41919191919191917, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.35172413793103446, + "acc_stderr": 0.0397923663749741, + "acc_norm": 0.35172413793103446, + "acc_norm_stderr": 0.0397923663749741 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179961, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179961 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413926, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.02489047176993815, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.02489047176993815 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509568, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509568 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5897435897435898, + "acc_stderr": 0.03222414045241107, + "acc_norm": 0.5897435897435898, + "acc_norm_stderr": 0.03222414045241107 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.03479185572599661, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.03479185572599661 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4626865671641791, + "acc_stderr": 0.03525675167467974, + "acc_norm": 0.4626865671641791, + "acc_norm_stderr": 0.03525675167467974 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101806, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101806 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869334, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.026538189104705477, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.026538189104705477 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3549382716049383, + "acc_stderr": 0.02662415247884585, + "acc_norm": 0.3549382716049383, + "acc_norm_stderr": 0.02662415247884585 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45688073394495415, + "acc_stderr": 0.021357458785226203, + "acc_norm": 0.45688073394495415, + "acc_norm_stderr": 0.021357458785226203 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.0275828114151596, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.0275828114151596 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.0378272898086547, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.0378272898086547 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.018850084696468705, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.018850084696468705 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961459, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961459 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.03093285879278985, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.03093285879278985 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3116036505867014, + "acc_stderr": 0.011829039182849648, + "acc_norm": 0.3116036505867014, + "acc_norm_stderr": 0.011829039182849648 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.03434131164719128, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.03434131164719128 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3575757575757576, + "acc_stderr": 0.037425970438065864, + "acc_norm": 0.3575757575757576, + "acc_norm_stderr": 0.037425970438065864 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713606, + "mc2": 0.4749868563072917, + "mc2_stderr": 0.015742730178250185 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3789846517119244, + "acc_stderr": 0.016679260684229286, + "acc_norm": 0.48406139315230223, + "acc_norm_stderr": 0.017181617837190195 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.004", + "model_sha": "7f80f3cf2ad264fe73a1934824845e9aa7aa2451", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.005/result_2024-01-09 07:37:32.json b/LI-ST/Mistral-7B-ko-v0.005/result_2024-01-09 07:37:32.json new file mode 100644 index 0000000000000000000000000000000000000000..02f88f7a76d50bcdcf84f01f6a8953cb9ab8de6f --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.005/result_2024-01-09 07:37:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35238907849829354, + "acc_stderr": 0.013960142600598678, + "acc_norm": 0.3993174061433447, + "acc_norm_stderr": 0.014312094557946707 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36914957179844654, + "acc_stderr": 0.004815882719278385, + "acc_norm": 0.469627564230233, + "acc_norm_stderr": 0.004980566907790455 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.438058748403576, + "acc_stderr": 0.017742232238257223, + "acc_norm": 0.438058748403576, + "acc_norm_stderr": 0.017742232238257223 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.042320736951515885, + "acc_norm": 0.4, + "acc_norm_stderr": 0.042320736951515885 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.031068985963122155, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.031068985963122155 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.028365041542564563, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.028365041542564563 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828064, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828064 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993177, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042335, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042335 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.03046365674734027, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.03046365674734027 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123936, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123936 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.024594975128920938, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.024594975128920938 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.026803720583206188, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.026803720583206188 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.038470214204560246, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.038470214204560246 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.027431623722415015, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.027431623722415015 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775089, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775089 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215937, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215937 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169927, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169927 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249603, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249603 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.032002553478937816, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.032002553478937816 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3109517601043025, + "acc_stderr": 0.011822252917799201, + "acc_norm": 0.3109517601043025, + "acc_norm_stderr": 0.011822252917799201 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3575757575757576, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.3575757575757576, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.01611412415688246, + "mc2": 0.4639508584743867, + "mc2_stderr": 0.015654961139140635 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3778040141676505, + "acc_stderr": 0.016669082840694967, + "acc_norm": 0.5017709563164109, + "acc_norm_stderr": 0.01719024627623186 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.005", + "model_sha": "bfa2a68f96a69f45defb220e2bf4ffc31e6633f1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.006/result_2024-01-09 07:37:36.json b/LI-ST/Mistral-7B-ko-v0.006/result_2024-01-09 07:37:36.json new file mode 100644 index 0000000000000000000000000000000000000000..ec869e4c1bf3a7c9ad3afdc8084ea5260c919820 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.006/result_2024-01-09 07:37:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3293515358361775, + "acc_stderr": 0.013734057652635474, + "acc_norm": 0.3890784982935154, + "acc_norm_stderr": 0.014247309976045605 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3681537542322247, + "acc_stderr": 0.004813177057496272, + "acc_norm": 0.4651463851822346, + "acc_norm_stderr": 0.004977643730848598 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4355044699872286, + "acc_stderr": 0.01773058992792661, + "acc_norm": 0.4355044699872286, + "acc_norm_stderr": 0.01773058992792661 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.042320736951515885, + "acc_norm": 0.4, + "acc_norm_stderr": 0.042320736951515885 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.39228295819935693, + "acc_stderr": 0.027731258647011994, + "acc_norm": 0.39228295819935693, + "acc_norm_stderr": 0.027731258647011994 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929188, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929188 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.37404580152671757, + "acc_stderr": 0.042438692422305246, + "acc_norm": 0.37404580152671757, + "acc_norm_stderr": 0.042438692422305246 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.024537591572830517, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.024537591572830517 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969566, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969566 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4064516129032258, + "acc_stderr": 0.027941727346256308, + "acc_norm": 0.4064516129032258, + "acc_norm_stderr": 0.027941727346256308 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969481, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969481 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230186, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230186 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.02369541500946308, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.02369541500946308 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.026636539741116082, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.026636539741116082 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.038258255488486076, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.038258255488486076 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3734567901234568, + "acc_stderr": 0.02691500301138015, + "acc_norm": 0.3734567901234568, + "acc_norm_stderr": 0.02691500301138015 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42752293577981654, + "acc_stderr": 0.02121091020430043, + "acc_norm": 0.42752293577981654, + "acc_norm_stderr": 0.02121091020430043 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147125, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147125 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.044492703500683815, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.044492703500683815 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355442, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340455, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.014676252009319475, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.014676252009319475 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.028888193103988647, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.028888193103988647 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.031067211262872485, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.031067211262872485 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.011787910251664589, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.011787910251664589 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.032962451101722294, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.032962451101722294 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485083, + "mc2": 0.4338489222841401, + "mc2_stderr": 0.01562611722529729 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3659976387249115, + "acc_stderr": 0.016561489664895696, + "acc_norm": 0.4592680047225502, + "acc_norm_stderr": 0.017133218276537673 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.006", + "model_sha": "85711e2b40b627828aec4ad76955aa66062e9c97", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.1/result_2023-11-13 12:27:55.json b/LI-ST/Mistral-7B-ko-v0.1/result_2023-11-13 12:27:55.json new file mode 100644 index 0000000000000000000000000000000000000000..3654de047fbe80d249f4268c0708703e91d11697 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.1/result_2023-11-13 12:27:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.014077223108470137, + "acc_norm": 0.4087030716723549, + "acc_norm_stderr": 0.014365750345427001 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3648675562636925, + "acc_stderr": 0.004804091708812546, + "acc_norm": 0.4743079067914758, + "acc_norm_stderr": 0.0049831897112085155 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45977011494252873, + "acc_stderr": 0.01782199409693354, + "acc_norm": 0.45977011494252873, + "acc_norm_stderr": 0.01782199409693354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.02832032583010591, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.02832032583010591 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.03191863374478465, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.03191863374478465 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.024666744915187232, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.024666744915187232 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849738, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849738 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173092, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173092 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911521, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911521 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02784081149587192, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02784081149587192 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.034288678487786564, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.034288678487786564 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752052, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752052 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.038470214204560246, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.038470214204560246 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.027402042040269955, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.027402042040269955 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995096, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.02855582751652879, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.02855582751652879 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.019506291693954854, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.019506291693954854 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475363, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475363 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398864, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398864 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.032002553478937816, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.032002553478937816 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3116036505867014, + "acc_stderr": 0.011829039182849648, + "acc_norm": 0.3116036505867014, + "acc_norm_stderr": 0.011829039182849648 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155062, + "mc2": 0.465162397472841, + "mc2_stderr": 0.015592055613780503 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3919716646989374, + "acc_stderr": 0.016784332119424084, + "acc_norm": 0.4781582054309327, + "acc_norm_stderr": 0.017173944474294375 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.1", + "model_sha": "6a56df13013ea478d88ef7b77fde53f594bf1e8d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.10/result_2023-12-15 01:40:06.json b/LI-ST/Mistral-7B-ko-v0.10/result_2023-12-15 01:40:06.json new file mode 100644 index 0000000000000000000000000000000000000000..c44fe30c83739a74a67796caa690e7b61e9912c0 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.10/result_2023-12-15 01:40:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31399317406143346, + "acc_stderr": 0.013562691224726291, + "acc_norm": 0.35409556313993173, + "acc_norm_stderr": 0.013975454122756553 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36068512248556067, + "acc_stderr": 0.004792179052583444, + "acc_norm": 0.45140410276837284, + "acc_norm_stderr": 0.004966158142645413 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4342273307790549, + "acc_stderr": 0.017724589389677785, + "acc_norm": 0.4342273307790549, + "acc_norm_stderr": 0.017724589389677785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.02823776942208534, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.02823776942208534 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.03219079200419994, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.03219079200419994 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.024838811988033158, + "acc_norm": 0.4, + "acc_norm_stderr": 0.024838811988033158 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.034524539038220385, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.034524539038220385 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4064516129032258, + "acc_stderr": 0.027941727346256308, + "acc_norm": 0.4064516129032258, + "acc_norm_stderr": 0.027941727346256308 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35094339622641507, + "acc_stderr": 0.029373646253234686, + "acc_norm": 0.35094339622641507, + "acc_norm_stderr": 0.029373646253234686 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131147, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131147 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.0355068398916558, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.0355068398916558 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624555, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624555 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.038470214204560246, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.038470214204560246 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668784, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668784 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42201834862385323, + "acc_stderr": 0.02117499140776317, + "acc_norm": 0.42201834862385323, + "acc_norm_stderr": 0.02117499140776317 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36437908496732024, + "acc_stderr": 0.019469518221573702, + "acc_norm": 0.36437908496732024, + "acc_norm_stderr": 0.019469518221573702 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347247, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347247 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331149, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331149 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841195, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32920469361147325, + "acc_stderr": 0.012002091666902305, + "acc_norm": 0.32920469361147325, + "acc_norm_stderr": 0.012002091666902305 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.037818873532059816, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.037818873532059816 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283331, + "mc2": 0.4259316971970392, + "mc2_stderr": 0.015462913136325425 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38488783943329397, + "acc_stderr": 0.016728579701498665, + "acc_norm": 0.46635182998819363, + "acc_norm_stderr": 0.017151384117131876 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.10", + "model_sha": "b2feae16837ddfa9402366e848700bd25c88b330", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.2/result_2023-11-15 02:42:51.json b/LI-ST/Mistral-7B-ko-v0.2/result_2023-11-15 02:42:51.json new file mode 100644 index 0000000000000000000000000000000000000000..6d42bca4a3cebca979c119c099e944d23cf307f1 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.2/result_2023-11-15 02:42:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.014077223108470137, + "acc_norm": 0.4087030716723549, + "acc_norm_stderr": 0.014365750345427001 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3648675562636925, + "acc_stderr": 0.004804091708812546, + "acc_norm": 0.4743079067914758, + "acc_norm_stderr": 0.0049831897112085155 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45977011494252873, + "acc_stderr": 0.01782199409693354, + "acc_norm": 0.45977011494252873, + "acc_norm_stderr": 0.01782199409693354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.02832032583010591, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.02832032583010591 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.03191863374478465, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.03191863374478465 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.024666744915187232, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.024666744915187232 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849738, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849738 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173092, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173092 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911521, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911521 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02784081149587192, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02784081149587192 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.034288678487786564, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.034288678487786564 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752052, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752052 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.038470214204560246, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.038470214204560246 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.027402042040269955, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.027402042040269955 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995096, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.02855582751652879, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.02855582751652879 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.019506291693954854, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.019506291693954854 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475363, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475363 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398864, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398864 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.032002553478937816, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.032002553478937816 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3116036505867014, + "acc_stderr": 0.011829039182849648, + "acc_norm": 0.3116036505867014, + "acc_norm_stderr": 0.011829039182849648 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155062, + "mc2": 0.465162397472841, + "mc2_stderr": 0.015592055613780503 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3919716646989374, + "acc_stderr": 0.016784332119424084, + "acc_norm": 0.4781582054309327, + "acc_norm_stderr": 0.017173944474294375 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.2", + "model_sha": "0a2ba8844a3b7518c29314c775d81937a5c7b4e6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.3/result_2023-11-27 02:32:56.json b/LI-ST/Mistral-7B-ko-v0.3/result_2023-11-27 02:32:56.json new file mode 100644 index 0000000000000000000000000000000000000000..17259f138753bf1d27b46c4ddafb3be05adf2e36 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.3/result_2023-11-27 02:32:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32593856655290104, + "acc_stderr": 0.013697432466693246, + "acc_norm": 0.3848122866894198, + "acc_norm_stderr": 0.01421837106525111 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36735710017924716, + "acc_stderr": 0.004810996652324741, + "acc_norm": 0.469627564230233, + "acc_norm_stderr": 0.004980566907790453 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4495530012771392, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.4495530012771392, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.031709956060406545, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.031709956060406545 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.02823776942208533, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.02823776942208533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134986, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134986 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165897, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165897 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173078, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009805, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009805 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.02455229220934266, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.02455229220934266 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05000000000000001, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05000000000000001 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995096, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.042857142857142816, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.042857142857142816 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.018875682938069443, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.018875682938069443 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534785, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534785 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828979, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828979 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225606, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225606 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5021097046413502, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.5021097046413502, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3226857887874837, + "acc_stderr": 0.011940264193195983, + "acc_norm": 0.3226857887874837, + "acc_norm_stderr": 0.011940264193195983 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.03402272044340705, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.03402272044340705 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4518070307573933, + "mc2_stderr": 0.015652737933513572 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4155844155844156, + "acc_stderr": 0.016943586313076575, + "acc_norm": 0.525383707201889, + "acc_norm_stderr": 0.01716818720142926 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.3", + "model_sha": "79cf208351d82dbfb05791f76dfdb7b03c5b8abe", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.4/result_2023-11-27 02:45:47.json b/LI-ST/Mistral-7B-ko-v0.4/result_2023-11-27 02:45:47.json new file mode 100644 index 0000000000000000000000000000000000000000..51c31f547415f2d79fec9514a0a9700905a98362 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.4/result_2023-11-27 02:45:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30802047781569963, + "acc_stderr": 0.01349142951729204, + "acc_norm": 0.3703071672354949, + "acc_norm_stderr": 0.01411129875167495 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3505277833100976, + "acc_stderr": 0.004761601303258895, + "acc_norm": 0.4447321250746863, + "acc_norm_stderr": 0.0049592047730462 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.33980582524271846, + "acc_stderr": 0.04689765937278135, + "acc_norm": 0.33980582524271846, + "acc_norm_stderr": 0.04689765937278135 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44189016602809705, + "acc_stderr": 0.01775880053421442, + "acc_norm": 0.44189016602809705, + "acc_norm_stderr": 0.01775880053421442 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307808, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307808 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.02443301646605245, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.02443301646605245 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509568, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509568 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3967741935483871, + "acc_stderr": 0.02783123160576796, + "acc_norm": 0.3967741935483871, + "acc_norm_stderr": 0.02783123160576796 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3584905660377358, + "acc_stderr": 0.029514703583981765, + "acc_norm": 0.3584905660377358, + "acc_norm_stderr": 0.029514703583981765 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505417, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505417 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083025, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083025 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4925373134328358, + "acc_stderr": 0.035351400842767194, + "acc_norm": 0.4925373134328358, + "acc_norm_stderr": 0.035351400842767194 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342665, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.02661335084026174, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.02661335084026174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.404320987654321, + "acc_stderr": 0.027306625297327695, + "acc_norm": 0.404320987654321, + "acc_norm_stderr": 0.027306625297327695 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3798165137614679, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.3798165137614679, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.02803609227389176, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.02803609227389176 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03782728980865471, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03782728980865471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.019070985589687495, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.019070985589687495 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265015, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265015 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.014102223623152573, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.014102223623152573 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3135593220338983, + "acc_stderr": 0.011849234291459333, + "acc_norm": 0.3135593220338983, + "acc_norm_stderr": 0.011849234291459333 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.03608541011573967, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.03608541011573967 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842883, + "mc2": 0.42266472087084006, + "mc2_stderr": 0.015345191543063135 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3270365997638725, + "acc_stderr": 0.016129047485457022, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.016884749503191392 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.4", + "model_sha": "e19ae536336aadacd842ce5af2542617301421e3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.5/result_2023-11-27 02:45:55.json b/LI-ST/Mistral-7B-ko-v0.5/result_2023-11-27 02:45:55.json new file mode 100644 index 0000000000000000000000000000000000000000..f6f5ab3e06788007ac465a05e44911dec317dd18 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.5/result_2023-11-27 02:45:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3165529010238908, + "acc_stderr": 0.013592431519068082, + "acc_norm": 0.3856655290102389, + "acc_norm_stderr": 0.014224250973257174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36456881099382593, + "acc_stderr": 0.004803253812881047, + "acc_norm": 0.4691296554471221, + "acc_norm_stderr": 0.004980262025472489 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44316730523627074, + "acc_stderr": 0.01776408503534839, + "acc_norm": 0.44316730523627074, + "acc_norm_stderr": 0.01776408503534839 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3890675241157556, + "acc_stderr": 0.027690337536485376, + "acc_norm": 0.3890675241157556, + "acc_norm_stderr": 0.027690337536485376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237656, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237656 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.031753678460966245, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.031753678460966245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767766, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767766 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091265, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.03005258057955784, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.03005258057955784 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02831753349606648, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02831753349606648 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165582, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165582 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.02397386199899207, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.02397386199899207 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686934, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686934 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.026613350840261736, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.026613350840261736 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44770642201834865, + "acc_stderr": 0.021319754962425462, + "acc_norm": 0.44770642201834865, + "acc_norm_stderr": 0.021319754962425462 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.02835895631342355, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.02835895631342355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.018824219512706214, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.018824219512706214 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.0280459469420424, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.0280459469420424 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285714, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.014333522059217892, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.014333522059217892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.0296246635811597, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.0296246635811597 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.03175195237583322, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.03175195237583322 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4641350210970464, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.4641350210970464, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3005215123859192, + "acc_stderr": 0.011709918883039131, + "acc_norm": 0.3005215123859192, + "acc_norm_stderr": 0.011709918883039131 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03308611113236434, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03308611113236434 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.03663974994391243, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.03663974994391243 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766379, + "mc2": 0.44720320938084884, + "mc2_stderr": 0.015529246019817096 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40613931523022434, + "acc_stderr": 0.016884749503191392, + "acc_norm": 0.4639905548996458, + "acc_norm_stderr": 0.017145715365486657 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.5", + "model_sha": "b20a0853eaf043c7271df8b634b0fc5983b70b72", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.6/result_2023-11-28 02:26:55.json b/LI-ST/Mistral-7B-ko-v0.6/result_2023-11-28 02:26:55.json new file mode 100644 index 0000000000000000000000000000000000000000..9008c24d3db5e4f1026e5a161cdae67fdd6af7ab --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.6/result_2023-11-28 02:26:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3293515358361775, + "acc_stderr": 0.013734057652635474, + "acc_norm": 0.37372013651877134, + "acc_norm_stderr": 0.014137708601759086 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3659629555865365, + "acc_stderr": 0.004807146925162055, + "acc_norm": 0.4735112527384983, + "acc_norm_stderr": 0.004982774293927773 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.0381107966983353, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.0381107966983353 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44189016602809705, + "acc_stderr": 0.017758800534214417, + "acc_norm": 0.44189016602809705, + "acc_norm_stderr": 0.017758800534214417 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534446, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534446 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4180064308681672, + "acc_stderr": 0.02801365189199507, + "acc_norm": 0.4180064308681672, + "acc_norm_stderr": 0.02801365189199507 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37948717948717947, + "acc_stderr": 0.024603626924097417, + "acc_norm": 0.37948717948717947, + "acc_norm_stderr": 0.024603626924097417 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3471698113207547, + "acc_stderr": 0.029300101705549652, + "acc_norm": 0.3471698113207547, + "acc_norm_stderr": 0.029300101705549652 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945284, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945284 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.035995863012470784, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.035995863012470784 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983056, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.0276847214156562, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.0276847214156562 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442205, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442205 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41467889908256883, + "acc_stderr": 0.0211229032086026, + "acc_norm": 0.41467889908256883, + "acc_norm_stderr": 0.0211229032086026 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.044313245019684304, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.044313245019684304 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223977, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223977 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21675977653631284, + "acc_stderr": 0.013780598486443356, + "acc_norm": 0.21675977653631284, + "acc_norm_stderr": 0.013780598486443356 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031232, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031232 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03168091161233883, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03168091161233883 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3396349413298566, + "acc_stderr": 0.01209559250693197, + "acc_norm": 0.3396349413298566, + "acc_norm_stderr": 0.01209559250693197 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.033744993563193555, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.033744993563193555 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512567, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512567 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.4702791160430879, + "mc2_stderr": 0.015650617016562784 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.39433293978748524, + "acc_stderr": 0.01680209067489321, + "acc_norm": 0.4757969303423849, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.6", + "model_sha": "bd48b7c993d858d5e9dcf571e72247d303c5497d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.7/result_2023-12-04 02:04:39.json b/LI-ST/Mistral-7B-ko-v0.7/result_2023-12-04 02:04:39.json new file mode 100644 index 0000000000000000000000000000000000000000..ce3599a32e4606ef081523d74cb22c28bc02c667 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.7/result_2023-12-04 02:04:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35665529010238906, + "acc_stderr": 0.013998056902620199, + "acc_norm": 0.4044368600682594, + "acc_norm_stderr": 0.01434203648343617 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36984664409480184, + "acc_stderr": 0.004817763581410233, + "acc_norm": 0.477096195976897, + "acc_norm_stderr": 0.004984543540932337 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.04944901092973781, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.04944901092973781 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4342273307790549, + "acc_stderr": 0.017724589389677785, + "acc_norm": 0.4342273307790549, + "acc_norm_stderr": 0.017724589389677785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42765273311897106, + "acc_stderr": 0.02809924077580956, + "acc_norm": 0.42765273311897106, + "acc_norm_stderr": 0.02809924077580956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.02489047176993815, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.02489047176993815 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280457, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280457 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768817, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768817 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.03047144586718323, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.03047144586718323 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02784081149587193, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02784081149587193 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.037336266553835096, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.037336266553835096 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047732, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047732 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.026362437574546545, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.026362437574546545 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4104938271604938, + "acc_stderr": 0.027371350925124768, + "acc_norm": 0.4104938271604938, + "acc_norm_stderr": 0.027371350925124768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46972477064220186, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.46972477064220186, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.01929196189506638, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.01929196189506638 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.014333522059217892, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.014333522059217892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.030254372573976694, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.030254372573976694 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.03186785930004128, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.03186785930004128 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45569620253164556, + "acc_stderr": 0.03241920684693335, + "acc_norm": 0.45569620253164556, + "acc_norm_stderr": 0.03241920684693335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2796610169491525, + "acc_stderr": 0.011463397393861959, + "acc_norm": 0.2796610169491525, + "acc_norm_stderr": 0.011463397393861959 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.033540924375915195, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.033540924375915195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.038049136539710114, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.038049136539710114 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875837, + "mc2": 0.4469620706076587, + "mc2_stderr": 0.015389336522397358 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3990554899645809, + "acc_stderr": 0.016836377292849296, + "acc_norm": 0.4675324675324675, + "acc_norm_stderr": 0.01715407371668286 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.7", + "model_sha": "2c55047921103b0bebd83ffe967a97f94aa60e02", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.8/result_2023-12-15 01:39:49.json b/LI-ST/Mistral-7B-ko-v0.8/result_2023-12-15 01:39:49.json new file mode 100644 index 0000000000000000000000000000000000000000..550a39f091eac7832015a061d9d3438ae36315a1 --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.8/result_2023-12-15 01:39:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3097269624573379, + "acc_stderr": 0.013512058415238361, + "acc_norm": 0.3626279863481229, + "acc_norm_stderr": 0.01404910656495502 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3571001792471619, + "acc_stderr": 0.004781654610857131, + "acc_norm": 0.4563831905994822, + "acc_norm_stderr": 0.004970759774676884 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.038268824176603676, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.038268824176603676 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45721583652618136, + "acc_stderr": 0.01781438523853444, + "acc_norm": 0.45721583652618136, + "acc_norm_stderr": 0.01781438523853444 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.02815023224453559, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.02815023224453559 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.033408675019233246, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.033408675019233246 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.40404040404040403, + "acc_stderr": 0.03496130972056127, + "acc_norm": 0.40404040404040403, + "acc_norm_stderr": 0.03496130972056127 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.0316314580755238, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.0316314580755238 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.382051282051282, + "acc_stderr": 0.024635549163908223, + "acc_norm": 0.382051282051282, + "acc_norm_stderr": 0.024635549163908223 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509568, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509568 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.027869320571664635, + "acc_norm": 0.4, + "acc_norm_stderr": 0.027869320571664635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618554, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618554 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972742, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972742 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.02813325257881564, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.02813325257881564 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247077, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247077 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115978, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.038470214204560246, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.038470214204560246 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4055045871559633, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.4055045871559633, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.027684181883302884, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.027684181883302884 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.01952431674486635, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.01952431674486635 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.02824568739146292, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.02824568739146292 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2979591836734694, + "acc_stderr": 0.029279567411065677, + "acc_norm": 0.2979591836734694, + "acc_norm_stderr": 0.029279567411065677 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33833116036505867, + "acc_stderr": 0.012084265626344202, + "acc_norm": 0.33833116036505867, + "acc_norm_stderr": 0.012084265626344202 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.03393388584958403, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.03393388584958403 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3072215422276622, + "mc1_stderr": 0.016150201321323002, + "mc2": 0.47435317492542983, + "mc2_stderr": 0.015496855268461061 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3907910271546635, + "acc_stderr": 0.01677529846510825, + "acc_norm": 0.45690672963400236, + "acc_norm_stderr": 0.017126389093086777 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.8", + "model_sha": "49bb6983b858b53fcd9bcb996bc33feeffc4d8a1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LI-ST/Mistral-7B-ko-v0.9/result_2023-12-15 01:39:58.json b/LI-ST/Mistral-7B-ko-v0.9/result_2023-12-15 01:39:58.json new file mode 100644 index 0000000000000000000000000000000000000000..c91cab077f69cbf15c68ef105e405b4d72d6e0fa --- /dev/null +++ b/LI-ST/Mistral-7B-ko-v0.9/result_2023-12-15 01:39:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30119453924914674, + "acc_stderr": 0.013406741767847636, + "acc_norm": 0.3464163822525597, + "acc_norm_stderr": 0.013905011180063244 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35132443736307506, + "acc_stderr": 0.0047640845971769034, + "acc_norm": 0.4510057757418841, + "acc_norm_stderr": 0.004965768348628059 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.03815827365913236, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.03815827365913236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4495530012771392, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.4495530012771392, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.031068985963122155, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.031068985963122155 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923325, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923325 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.03521224908841583, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.03521224908841583 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938152, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938152 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.02737987122994325, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.02737987122994325 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.029445175328199596, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.029445175328199596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815632, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815632 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.036812296333943194, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.036812296333943194 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115978, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.026756255129663765, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.026756255129663765 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.03814269893261837, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.03814269893261837 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379417, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379417 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0383515395439942, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0383515395439942 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44587155963302755, + "acc_stderr": 0.02131133500970858, + "acc_norm": 0.44587155963302755, + "acc_norm_stderr": 0.02131133500970858 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.0282135041778241, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.0282135041778241 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779205, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456053, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456053 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331149, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331149 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.028245687391462913, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.028245687391462913 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.032419206846933335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.032419206846933335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3226857887874837, + "acc_stderr": 0.011940264193195974, + "acc_norm": 0.3226857887874837, + "acc_norm_stderr": 0.011940264193195974 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.03320574612945432, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.03320574612945432 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396715, + "mc2": 0.4518577671193954, + "mc2_stderr": 0.015379505911432577 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42266824085005905, + "acc_stderr": 0.016983506079577604, + "acc_norm": 0.5171192443919717, + "acc_norm_stderr": 0.01718027524608563 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LI-ST/Mistral-7B-ko-v0.9", + "model_sha": "c2ede85533e0895505871be87fc34c1906433304", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LostCow/cowshed-base/result_2024-07-19 00:27:28.json b/LostCow/cowshed-base/result_2024-07-19 00:27:28.json new file mode 100644 index 0000000000000000000000000000000000000000..9e762a88f55d0cf5a9627fa4f6203cfc2a242989 --- /dev/null +++ b/LostCow/cowshed-base/result_2024-07-19 00:27:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20392491467576793, + "acc_stderr": 0.011774262478702252, + "acc_norm": 0.26109215017064846, + "acc_norm_stderr": 0.012835523909473855 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2901812387970524, + "acc_stderr": 0.004529183522542082, + "acc_norm": 0.3320055765783708, + "acc_norm_stderr": 0.00469970528097658 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.19883040935672514, + "acc_stderr": 0.030611116557432528, + "acc_norm": 0.19883040935672514, + "acc_norm_stderr": 0.030611116557432528 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1650485436893204, + "acc_stderr": 0.036756688322331886, + "acc_norm": 0.1650485436893204, + "acc_norm_stderr": 0.036756688322331886 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26309067688378035, + "acc_stderr": 0.015745497169049057, + "acc_norm": 0.26309067688378035, + "acc_norm_stderr": 0.015745497169049057 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03591444084196969, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03591444084196969 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.02655698211783875, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.02655698211783875 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.23493975903614459, + "acc_stderr": 0.03300533186128922, + "acc_norm": 0.23493975903614459, + "acc_norm_stderr": 0.03300533186128922 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.0324430528300873, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.0324430528300873 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.038073871163060866, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.038073871163060866 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.029620227874790465, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.029620227874790465 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03724563619774631, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03724563619774631 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35384615384615387, + "acc_stderr": 0.02424378399406217, + "acc_norm": 0.35384615384615387, + "acc_norm_stderr": 0.02424378399406217 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678242, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678242 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.0255606047210229, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.0255606047210229 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.025604233470899105, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.025604233470899105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782855, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782855 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844082, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844082 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.035839017547364134, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.035839017547364134 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757173, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757173 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24691358024691357, + "acc_stderr": 0.02399350170904211, + "acc_norm": 0.24691358024691357, + "acc_norm_stderr": 0.02399350170904211 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37305699481865284, + "acc_stderr": 0.03490205592048573, + "acc_norm": 0.37305699481865284, + "acc_norm_stderr": 0.03490205592048573 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3119266055045872, + "acc_stderr": 0.019862967976707245, + "acc_norm": 0.3119266055045872, + "acc_norm_stderr": 0.019862967976707245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.02495418432487991, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.02495418432487991 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3305785123966942, + "acc_stderr": 0.042943408452120954, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.042943408452120954 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.036906779861372814, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.036906779861372814 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.016729937565537537, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.016729937565537537 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767864, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767864 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235922, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235922 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24185136897001303, + "acc_stderr": 0.010936550813827066, + "acc_norm": 0.24185136897001303, + "acc_norm_stderr": 0.010936550813827066 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885415, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885415 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456418, + "mc2": 0.4146991343526123, + "mc2_stderr": 0.015256749671974849 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26210153482880755, + "acc_stderr": 0.015119864670254158, + "acc_norm": 0.345926800472255, + "acc_norm_stderr": 0.01635385341434757 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LostCow/cowshed-base", + "model_sha": "295ee88b9083c12e9df98e377995436a678faa85", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/LostCow/cowshed-mini/result_2024-07-13 05:59:39.json b/LostCow/cowshed-mini/result_2024-07-13 05:59:39.json new file mode 100644 index 0000000000000000000000000000000000000000..b00934836d0c1331b8173b1d0961746f824d1afe --- /dev/null +++ b/LostCow/cowshed-mini/result_2024-07-13 05:59:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2022184300341297, + "acc_stderr": 0.011737454431872104, + "acc_norm": 0.24744027303754265, + "acc_norm_stderr": 0.01261035266329267 + }, + "harness|ko_hellaswag|10": { + "acc": 0.27912766381198967, + "acc_stderr": 0.004476536569056585, + "acc_norm": 0.30462059350726944, + "acc_norm_stderr": 0.00459305936767621 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.02917088550072766, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.02917088550072766 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23243933588761176, + "acc_stderr": 0.015104550008905706, + "acc_norm": 0.23243933588761176, + "acc_norm_stderr": 0.015104550008905706 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334943, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334943 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071854, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071854 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2379421221864952, + "acc_stderr": 0.024185150647818707, + "acc_norm": 0.2379421221864952, + "acc_norm_stderr": 0.024185150647818707 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.10762331838565023, + "acc_stderr": 0.02079940008288, + "acc_norm": 0.10762331838565023, + "acc_norm_stderr": 0.02079940008288 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35858585858585856, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.35858585858585856, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2689655172413793, + "acc_stderr": 0.036951833116502325, + "acc_norm": 0.2689655172413793, + "acc_norm_stderr": 0.036951833116502325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.024321738484602364, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.024321738484602364 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653697, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653697 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233485, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.02564938106302926, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.02564938106302926 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23018867924528302, + "acc_stderr": 0.025907897122408173, + "acc_norm": 0.23018867924528302, + "acc_norm_stderr": 0.025907897122408173 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878284, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878284 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255168, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255168 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.27860696517412936, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.27860696517412936, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.021855255263421806, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.021855255263421806 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37305699481865284, + "acc_stderr": 0.03490205592048573, + "acc_norm": 0.37305699481865284, + "acc_norm_stderr": 0.03490205592048573 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3504587155963303, + "acc_stderr": 0.02045607759982446, + "acc_norm": 0.3504587155963303, + "acc_norm_stderr": 0.02045607759982446 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790606, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.20915032679738563, + "acc_stderr": 0.023287685312334806, + "acc_norm": 0.20915032679738563, + "acc_norm_stderr": 0.023287685312334806 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.19008264462809918, + "acc_stderr": 0.035817969517092825, + "acc_norm": 0.19008264462809918, + "acc_norm_stderr": 0.035817969517092825 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952925, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952925 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.016774672365468517, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.016774672365468517 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953776, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953776 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.0356236785009539, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.0356236785009539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.19831223628691982, + "acc_stderr": 0.02595502084162111, + "acc_norm": 0.19831223628691982, + "acc_norm_stderr": 0.02595502084162111 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.010986307870045524, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.010986307870045524 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715014, + "mc2": 0.42553741239544696, + "mc2_stderr": 0.015535376745852906 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2762691853600944, + "acc_stderr": 0.015373387500464478, + "acc_norm": 0.43565525383707204, + "acc_norm_stderr": 0.01704741522947634 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "LostCow/cowshed-mini", + "model_sha": "14c02a0fb081215334d0932b43391002c40226ed", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Loyola/Mistral-7b-ITmodel/result_2024-01-17 02:24:12.json b/Loyola/Mistral-7b-ITmodel/result_2024-01-17 02:24:12.json new file mode 100644 index 0000000000000000000000000000000000000000..1768f4a3478b2f3c60e53ad17670b64077447e0e --- /dev/null +++ b/Loyola/Mistral-7b-ITmodel/result_2024-01-17 02:24:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.01388881628678211, + "acc_norm": 0.3984641638225256, + "acc_norm_stderr": 0.014306946052735565 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3717386974706234, + "acc_stderr": 0.004822814501358899, + "acc_norm": 0.4774945230033858, + "acc_norm_stderr": 0.004984724235115118 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4559386973180077, + "acc_stderr": 0.017810403925435366, + "acc_norm": 0.4559386973180077, + "acc_norm_stderr": 0.017810403925435366 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985905, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985905 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539746, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539746 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403325, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403325 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761005, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761005 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.02987257770889118, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.02987257770889118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652459, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652459 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602842, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303128, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303128 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.027402042040269952, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.027402042040269952 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4935779816513762, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.4935779816513762, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.03977749934622074, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.03977749934622074 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.01948802574552966, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.01948802574552966 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053479, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053479 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.033509916046960436, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.033509916046960436 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.32625698324022345, + "acc_stderr": 0.01568044151888918, + "acc_norm": 0.32625698324022345, + "acc_norm_stderr": 0.01568044151888918 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483927, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483927 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.031987615467631264, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.031987615467631264 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811224, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811224 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35528031290743156, + "acc_stderr": 0.012223623364044043, + "acc_norm": 0.35528031290743156, + "acc_norm_stderr": 0.012223623364044043 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32068543451652387, + "mc1_stderr": 0.016339170373280906, + "mc2": 0.4733613258729537, + "mc2_stderr": 0.015571052806018785 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3624557260920897, + "acc_stderr": 0.01652713124045372, + "acc_norm": 0.45336481700118064, + "acc_norm_stderr": 0.01711541822522686 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Loyola/Mistral-7b-ITmodel", + "model_sha": "19d9919d9624af34763c8263e3ca64d3a038c596", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MLP-KTLim/llama-3-Korean-Bllossom-8B/result_2024-07-04 08:22:05.json b/MLP-KTLim/llama-3-Korean-Bllossom-8B/result_2024-07-04 08:22:05.json new file mode 100644 index 0000000000000000000000000000000000000000..f50a1ffa169c7aea7b0481916749032e9aeae5d6 --- /dev/null +++ b/MLP-KTLim/llama-3-Korean-Bllossom-8B/result_2024-07-04 08:22:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34982935153583616, + "acc_stderr": 0.013936809212158287, + "acc_norm": 0.4044368600682594, + "acc_norm_stderr": 0.014342036483436172 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3754232224656443, + "acc_stderr": 0.00483242363059319, + "acc_norm": 0.47699661422027484, + "acc_norm_stderr": 0.004984497871025246 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45721583652618136, + "acc_stderr": 0.017814385238534427, + "acc_norm": 0.45721583652618136, + "acc_norm_stderr": 0.017814385238534427 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.03915450630414251, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.03915450630414251 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840625, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840625 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.043482080516448585, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.043482080516448585 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.035594435655639196, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.035594435655639196 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.02535100632816969, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02535100632816969 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4967741935483871, + "acc_stderr": 0.02844341422643833, + "acc_norm": 0.4967741935483871, + "acc_norm_stderr": 0.02844341422643833 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809445, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809445 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.030770900763851302, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.030770900763851302 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.02904560029061626, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.02904560029061626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.025075981767601688, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.025075981767601688 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723369, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723369 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.026830805998952243, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.026830805998952243 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008746, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008746 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.021364122533881688, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.021364122533881688 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.565359477124183, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.565359477124183, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.01979448890002411, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.01979448890002411 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875192, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875192 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.014487500852850426, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.014487500852850426 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.030862144921087565, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.030862144921087565 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36766623207301175, + "acc_stderr": 0.012314845910071705, + "acc_norm": 0.36766623207301175, + "acc_norm_stderr": 0.012314845910071705 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.038154943086889305, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.038154943086889305 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.45766451994678464, + "mc2_stderr": 0.015764513292821112 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48288075560802834, + "acc_stderr": 0.017180275246085626, + "acc_norm": 0.5242030696576151, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MLP-KTLim/llama-3-Korean-Bllossom-8B", + "model_sha": "8a738f9f622ffc2b0a4a6b81dabbca80406248bf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNC-Jihun/Mistral-11B-OP-u1k-ver0.7/result_2023-11-01 02:26:39.json b/MNC-Jihun/Mistral-11B-OP-u1k-ver0.7/result_2023-11-01 02:26:39.json new file mode 100644 index 0000000000000000000000000000000000000000..88abf96728e14a928278a47577ce14bc5403b820 --- /dev/null +++ b/MNC-Jihun/Mistral-11B-OP-u1k-ver0.7/result_2023-11-01 02:26:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34726962457337884, + "acc_stderr": 0.013913034529620451, + "acc_norm": 0.39078498293515357, + "acc_norm_stderr": 0.01425856388051378 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3799044015136427, + "acc_stderr": 0.00484370855038653, + "acc_norm": 0.4960167297351125, + "acc_norm_stderr": 0.004989623068778803 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4674329501915709, + "acc_stderr": 0.017841995750520857, + "acc_norm": 0.4674329501915709, + "acc_norm_stderr": 0.017841995750520857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0314108219759624, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0314108219759624 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755292, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755292 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126177, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126177 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.033959703819985754, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.033959703819985754 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389174, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389174 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823018, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422704, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422704 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.019898412717635903, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.019898412717635903 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.02746470844202213, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.02746470844202213 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833587, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833587 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966339, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966339 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877743, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33116036505867014, + "acc_stderr": 0.012020128195985757, + "acc_norm": 0.33116036505867014, + "acc_norm_stderr": 0.012020128195985757 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3157894736842105, + "mc1_stderr": 0.01627228795791694, + "mc2": 0.49501961999008254, + "mc2_stderr": 0.01565387033555305 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5029515938606848, + "acc_stderr": 0.017190054580194694, + "acc_norm": 0.5348288075560803, + "acc_norm_stderr": 0.017148598015747422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNC-Jihun/Mistral-11B-OP-u1k-ver0.7", + "model_sha": "c5549370a409724d0d5c4a4b071cb2b5aa85c184", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNC-Jihun/Mistral-11B-Omni-OP-u1k-ver0.5/result_2023-10-30 03:47:03.json b/MNC-Jihun/Mistral-11B-Omni-OP-u1k-ver0.5/result_2023-10-30 03:47:03.json new file mode 100644 index 0000000000000000000000000000000000000000..0adc67fa897fac2e9bd44d6632ed858bb45de735 --- /dev/null +++ b/MNC-Jihun/Mistral-11B-Omni-OP-u1k-ver0.5/result_2023-10-30 03:47:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34897610921501704, + "acc_stderr": 0.013928933461382494, + "acc_norm": 0.3984641638225256, + "acc_norm_stderr": 0.014306946052735567 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37502489543915557, + "acc_stderr": 0.004831399218500244, + "acc_norm": 0.47849034056960765, + "acc_norm_stderr": 0.00498516207433611 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45977011494252873, + "acc_stderr": 0.01782199409693353, + "acc_norm": 0.45977011494252873, + "acc_norm_stderr": 0.01782199409693353 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.028256660723360184, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.028256660723360184 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3991031390134529, + "acc_stderr": 0.03286745312567961, + "acc_norm": 0.3991031390134529, + "acc_norm_stderr": 0.03286745312567961 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707546, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707546 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.02486499515976776, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.02486499515976776 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.02999695185834948, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.02999695185834948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.0213704946099951, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.0213704946099951 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.044313245019684304, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.044313245019684304 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.01948802574552967, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.01948802574552967 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966346, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966346 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29921773142112124, + "acc_stderr": 0.011695374630696047, + "acc_norm": 0.29921773142112124, + "acc_norm_stderr": 0.011695374630696047 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608742, + "mc2": 0.47047609010515296, + "mc2_stderr": 0.016013828931677482 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4025974025974026, + "acc_stderr": 0.016861020486407776, + "acc_norm": 0.42384887839433294, + "acc_norm_stderr": 0.016989810834628253 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNC-Jihun/Mistral-11B-Omni-OP-u1k-ver0.5", + "model_sha": "8c58d63d92483624ec8b73e6b3ba93338d1abf86", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNC-Jihun/Mistral-7B-A-u0.5-b2-ver0.4/result_2023-10-31 05:51:25.json b/MNC-Jihun/Mistral-7B-A-u0.5-b2-ver0.4/result_2023-10-31 05:51:25.json new file mode 100644 index 0000000000000000000000000000000000000000..2e92149702faf454b20f14dc8a4ddc7b8393ac71 --- /dev/null +++ b/MNC-Jihun/Mistral-7B-A-u0.5-b2-ver0.4/result_2023-10-31 05:51:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3242320819112628, + "acc_stderr": 0.013678810399518815, + "acc_norm": 0.3771331058020478, + "acc_norm_stderr": 0.01416336689619259 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3686516630153356, + "acc_stderr": 0.00481453264257465, + "acc_norm": 0.4765982871937861, + "acc_norm_stderr": 0.004984313205791442 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44572158365261816, + "acc_stderr": 0.0177742972824795, + "acc_norm": 0.44572158365261816, + "acc_norm_stderr": 0.0177742972824795 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.04161808503501528, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.04161808503501528 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.035056301407857426, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.035056301407857426 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.028434533152681855, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.028434533152681855 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983045, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983045 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857403, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857403 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817729, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817729 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656206, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656206 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362233, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362233 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.0403356566784832, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.0403356566784832 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401147, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401147 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199502, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199502 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03372343271653063, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03372343271653063 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3307262569832402, + "acc_stderr": 0.01573502625896612, + "acc_norm": 0.3307262569832402, + "acc_norm_stderr": 0.01573502625896612 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.02997280717046463, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.02997280717046463 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897634, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897634 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768545, + "mc2": 0.47266598912504365, + "mc2_stderr": 0.015392669159401157 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5041322314049587, + "acc_stderr": 0.01718976703213082, + "acc_norm": 0.5501770956316411, + "acc_norm_stderr": 0.01710357334382571 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNC-Jihun/Mistral-7B-A-u0.5-b2-ver0.4", + "model_sha": "2274c77af5e028132156c1737de2a39d39bbff01", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNC-Jihun/Mistral-7B-AO-u0.5-b2-ver0.4/result_2023-10-31 05:52:02.json b/MNC-Jihun/Mistral-7B-AO-u0.5-b2-ver0.4/result_2023-10-31 05:52:02.json new file mode 100644 index 0000000000000000000000000000000000000000..074081c4d6ae6f22b544060888c8541cd68d34e6 --- /dev/null +++ b/MNC-Jihun/Mistral-7B-AO-u0.5-b2-ver0.4/result_2023-10-31 05:52:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.013847460518892978, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.01425295984889289 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37621987651862177, + "acc_stderr": 0.004834461997944863, + "acc_norm": 0.4880501892053376, + "acc_norm_stderr": 0.004988356146499017 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107675, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107675 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468544, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468544 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006938, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006938 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.02535100632816969, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02535100632816969 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4967741935483871, + "acc_stderr": 0.02844341422643833, + "acc_norm": 0.4967741935483871, + "acc_norm_stderr": 0.02844341422643833 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857403, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857403 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520193, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520193 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539277, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539277 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04426266681379909, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04426266681379909 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483205, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.01999797303545833, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.01999797303545833 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.03385177976044811, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.03385177976044811 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.36089385474860336, + "acc_stderr": 0.016062290671110476, + "acc_norm": 0.36089385474860336, + "acc_norm_stderr": 0.016062290671110476 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.03175195237583323, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.03175195237583323 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33572359843546284, + "acc_stderr": 0.012061304157664626, + "acc_norm": 0.33572359843546284, + "acc_norm_stderr": 0.012061304157664626 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133033, + "mc2": 0.4785073157857354, + "mc2_stderr": 0.015443979160746298 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.51357733175915, + "acc_stderr": 0.01718401506040146, + "acc_norm": 0.5761511216056671, + "acc_norm_stderr": 0.01698981083462825 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNC-Jihun/Mistral-7B-AO-u0.5-b2-ver0.4", + "model_sha": "de25cb8c3f247d1b0ce3189b9ee3595db7dbbe1f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNC-Jihun/Mistral-7B-OP-u1k-ver0.6/result_2023-10-30 03:47:23.json b/MNC-Jihun/Mistral-7B-OP-u1k-ver0.6/result_2023-10-30 03:47:23.json new file mode 100644 index 0000000000000000000000000000000000000000..7272b31b42918f942c2a9990c80c870a549e3ca2 --- /dev/null +++ b/MNC-Jihun/Mistral-7B-OP-u1k-ver0.6/result_2023-10-30 03:47:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145687, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.014194389086685265 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3619796853216491, + "acc_stderr": 0.004795908282584555, + "acc_norm": 0.44761999601672975, + "acc_norm_stderr": 0.0049623252978409915 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.01785298126663395, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.01785298126663395 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.028386198084177673, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.028386198084177673 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071722, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071722 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.024537591572830524, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.024537591572830524 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568385, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.03445406271987053, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.03445406271987053 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159665, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159665 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115979, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115979 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.02748747298087159, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.02748747298087159 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.03597524411734577, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.03597524411734577 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.021364122533881695, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.021364122533881695 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.01945076843250551, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.01945076843250551 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.033448873829978666, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.033448873829978666 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.014149575348976276, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.014149575348976276 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933105, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.03200682020163908, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.03200682020163908 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.032419206846933335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.032419206846933335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3155149934810952, + "acc_stderr": 0.011869184843058642, + "acc_norm": 0.3155149934810952, + "acc_norm_stderr": 0.011869184843058642 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.038517163193983954, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.038517163193983954 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589667, + "mc2": 0.46872875951621523, + "mc2_stderr": 0.01631020915826667 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3624557260920897, + "acc_stderr": 0.016527131240453716, + "acc_norm": 0.3825265643447462, + "acc_norm_stderr": 0.016709165387228817 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNC-Jihun/Mistral-7B-OP-u1k-ver0.6", + "model_sha": "23c7a5ec9de97c7c729fb2d9dc76bba8f6cb3406", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNC-Jihun/Mistral-7B-OP-u1k-ver0.7/result_2023-10-31 01:07:15.json b/MNC-Jihun/Mistral-7B-OP-u1k-ver0.7/result_2023-10-31 01:07:15.json new file mode 100644 index 0000000000000000000000000000000000000000..14feb06a76cfd90f391d4b2784ac48019d901b59 --- /dev/null +++ b/MNC-Jihun/Mistral-7B-OP-u1k-ver0.7/result_2023-10-31 01:07:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3506825938566553, + "acc_stderr": 0.013944635930726092, + "acc_norm": 0.40273037542662116, + "acc_norm_stderr": 0.014332236306790138 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37880900219079866, + "acc_stderr": 0.004840990593494688, + "acc_norm": 0.49830711013742285, + "acc_norm_stderr": 0.004989752811173411 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4674329501915709, + "acc_stderr": 0.017841995750520857, + "acc_norm": 0.4674329501915709, + "acc_norm_stderr": 0.017841995750520857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49743589743589745, + "acc_stderr": 0.025350672979412202, + "acc_norm": 0.49743589743589745, + "acc_norm_stderr": 0.025350672979412202 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.028434533152681848, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.028434533152681848 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804723, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804723 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.0343751933733825, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.0343751933733825 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887249, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887249 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137282, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137282 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756663, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756663 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47889908256880737, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.47889908256880737, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556054, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556054 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.019898412717635913, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.019898412717635913 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887865, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887865 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502326, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502326 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3486033519553073, + "acc_stderr": 0.015937484656687022, + "acc_norm": 0.3486033519553073, + "acc_norm_stderr": 0.015937484656687022 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.03175195237583324, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.03175195237583324 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.03172295004332331, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.03172295004332331 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32985658409387225, + "acc_stderr": 0.012008129938540479, + "acc_norm": 0.32985658409387225, + "acc_norm_stderr": 0.012008129938540479 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015473, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015473 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.0158663464013843, + "mc2": 0.48509410722375507, + "mc2_stderr": 0.015448476334612172 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5182998819362455, + "acc_stderr": 0.017178836639177755, + "acc_norm": 0.5631641086186541, + "acc_norm_stderr": 0.017052633559856065 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNC-Jihun/Mistral-7B-OP-u1k-ver0.7", + "model_sha": "d6e5e9f3245ff8beba92c77a0cedcfbb5eb8798f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNC-LLM/Mistral-11B-Omni-OPA-u1k-ver0.7/result_2023-11-02 01:16:59.json b/MNC-LLM/Mistral-11B-Omni-OPA-u1k-ver0.7/result_2023-11-02 01:16:59.json new file mode 100644 index 0000000000000000000000000000000000000000..8770d38f30deeea5391b9e3100c38e2486734e4a --- /dev/null +++ b/MNC-LLM/Mistral-11B-Omni-OPA-u1k-ver0.7/result_2023-11-02 01:16:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.013855831287497724, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.014252959848892893 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37801234813782114, + "acc_stderr": 0.004838997427699758, + "acc_norm": 0.4923322047400916, + "acc_norm_stderr": 0.004989194627707854 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4674329501915709, + "acc_stderr": 0.017841995750520857, + "acc_norm": 0.4674329501915709, + "acc_norm_stderr": 0.017841995750520857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707546, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707546 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.04810840148082635, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.04810840148082635 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5672268907563025, + "acc_stderr": 0.032183581077426124, + "acc_norm": 0.5672268907563025, + "acc_norm_stderr": 0.032183581077426124 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.025254485424799605, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.025254485424799605 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.033959703819985754, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.033959703819985754 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961827, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961827 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823018, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092056, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092056 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.02497695405315525, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.02497695405315525 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442203, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442203 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537318, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537318 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.02862930519400355, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.02862930519400355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.01988622103750187, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.01988622103750187 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.033981108902946366, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.033981108902946366 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.01461446582196634, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.01461446582196634 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.02997280717046463, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.02997280717046463 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687578, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.032419206846933335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.032419206846933335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33116036505867014, + "acc_stderr": 0.01202012819598576, + "acc_norm": 0.33116036505867014, + "acc_norm_stderr": 0.01202012819598576 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.4911572350172599, + "mc2_stderr": 0.015610028118935604 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5159386068476978, + "acc_stderr": 0.017181617837190192, + "acc_norm": 0.5430932703659976, + "acc_norm_stderr": 0.01712638909308678 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNC-LLM/Mistral-11B-Omni-OPA-u1k-ver0.7", + "model_sha": "b191a814d7f0ab540eaa36f8f6ca4c189e4d3a5f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNC-LLM/Mistral-7B-O3k-Au1k-ver0.7/result_2023-11-01 05:18:23.json b/MNC-LLM/Mistral-7B-O3k-Au1k-ver0.7/result_2023-11-01 05:18:23.json new file mode 100644 index 0000000000000000000000000000000000000000..1ebc4971704dc45c9247a9ea12f8cd08ad5551e1 --- /dev/null +++ b/MNC-LLM/Mistral-7B-O3k-Au1k-ver0.7/result_2023-11-01 05:18:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.01389693846114568, + "acc_norm": 0.3890784982935154, + "acc_norm_stderr": 0.014247309976045605 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3761202947619996, + "acc_stderr": 0.004834207964061324, + "acc_norm": 0.48834893447520417, + "acc_norm_stderr": 0.004988426528513012 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107675, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107675 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468544, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468544 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5462184873949579, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.5462184873949579, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520193, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520193 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.040260970832965585, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.040260970832965585 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.019861155193829156, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.019861155193829156 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963775, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963775 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.34413407821229053, + "acc_stderr": 0.015889221313307094, + "acc_norm": 0.34413407821229053, + "acc_norm_stderr": 0.015889221313307094 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767105, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452225, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452225 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.01600265148736101, + "mc2": 0.47256825783555356, + "mc2_stderr": 0.015562189062650065 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5171192443919717, + "acc_stderr": 0.01718027524608563, + "acc_norm": 0.5525383707201889, + "acc_norm_stderr": 0.017095190301500578 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNC-LLM/Mistral-7B-O3k-Au1k-ver0.7", + "model_sha": "99abb58ee6efae9e5cdc9bc427c79bc4a7b6f1a2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJ1hun/Dolphin-Mistral-7B-OP-u1k-ver0.1/result_2023-10-28 16:43:18.json b/MNCJ1hun/Dolphin-Mistral-7B-OP-u1k-ver0.1/result_2023-10-28 16:43:18.json new file mode 100644 index 0000000000000000000000000000000000000000..1b6b05a8ecf43f2c3e6629f7eb60b1e6947a27c6 --- /dev/null +++ b/MNCJ1hun/Dolphin-Mistral-7B-OP-u1k-ver0.1/result_2023-10-28 16:43:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35409556313993173, + "acc_stderr": 0.01397545412275656, + "acc_norm": 0.40784982935153585, + "acc_norm_stderr": 0.014361097288449686 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38189603664608646, + "acc_stderr": 0.0048485832436066904, + "acc_norm": 0.49661422027484564, + "acc_norm_stderr": 0.004989667009372637 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.01783579880629064, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.01783579880629064 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.02825666072336018, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.02825666072336018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416546, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416546 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.0323854694875898, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.0323854694875898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017838, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017838 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.03035152732334494, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.03035152732334494 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.03070948699255654, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.03070948699255654 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699954, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699954 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686934, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686934 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4935779816513762, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.4935779816513762, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490435, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490435 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.01948802574552966, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.01948802574552966 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.0141022236231526, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.0141022236231526 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30964797913950454, + "acc_stderr": 0.011808598262503318, + "acc_norm": 0.30964797913950454, + "acc_norm_stderr": 0.011808598262503318 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3268053855569155, + "mc1_stderr": 0.016419874731135035, + "mc2": 0.4937623805683608, + "mc2_stderr": 0.015810468549274707 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4344746162927981, + "acc_stderr": 0.017042098620824928, + "acc_norm": 0.4793388429752066, + "acc_norm_stderr": 0.017175671279836446 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJ1hun/Dolphin-Mistral-7B-OP-u1k-ver0.1", + "model_sha": "4790deb15d0c30a0a8728d8f8419e1694c21eb1a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJ1hun/MIstral-11B-Omni-OP-1k-2048-ver0.1/result_2023-10-29 00:22:07.json b/MNCJ1hun/MIstral-11B-Omni-OP-1k-2048-ver0.1/result_2023-10-29 00:22:07.json new file mode 100644 index 0000000000000000000000000000000000000000..cfa4747ccfd0968523fa804f074739c8df031c9f --- /dev/null +++ b/MNCJ1hun/MIstral-11B-Omni-OP-1k-2048-ver0.1/result_2023-10-29 00:22:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36177474402730375, + "acc_stderr": 0.01404195794503808, + "acc_norm": 0.41552901023890787, + "acc_norm_stderr": 0.014401366641216395 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38179645488946423, + "acc_stderr": 0.00484834156049215, + "acc_norm": 0.4947221668990241, + "acc_norm_stderr": 0.004989503417767287 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45849297573435505, + "acc_stderr": 0.017818248603465554, + "acc_norm": 0.45849297573435505, + "acc_norm_stderr": 0.017818248603465554 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986483, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438804, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438804 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.0279404571362284, + "acc_norm": 0.3, + "acc_norm_stderr": 0.0279404571362284 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752052, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752052 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607715, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607715 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362223, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362223 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017087, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017087 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.02855582751652879, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.02855582751652879 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.019643801557924806, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.019643801557924806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759426, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759426 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21675977653631284, + "acc_stderr": 0.013780598486443354, + "acc_norm": 0.21675977653631284, + "acc_norm_stderr": 0.013780598486443354 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411952, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411952 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.012014142101842977, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.012014142101842977 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396708, + "mc2": 0.4663054587466787, + "mc2_stderr": 0.015613323568757127 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.017177301992342547, + "acc_norm": 0.51357733175915, + "acc_norm_stderr": 0.01718401506040145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJ1hun/MIstral-11B-Omni-OP-1k-2048-ver0.1", + "model_sha": "a64bcca1371fa2285981fc40dbd8b879857f1e2e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJ1hun/MIstral-11B-Omni-OP-u1k-ver0.1/result_2023-10-29 00:20:22.json b/MNCJ1hun/MIstral-11B-Omni-OP-u1k-ver0.1/result_2023-10-29 00:20:22.json new file mode 100644 index 0000000000000000000000000000000000000000..1d6ae20595f9f6b0e9daf55bb18f375326d8e991 --- /dev/null +++ b/MNCJ1hun/MIstral-11B-Omni-OP-u1k-ver0.1/result_2023-10-29 00:20:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3575085324232082, + "acc_stderr": 0.014005494275916576, + "acc_norm": 0.42150170648464164, + "acc_norm_stderr": 0.014430197069326028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38279227245568614, + "acc_stderr": 0.004850748687859933, + "acc_norm": 0.4874526986656045, + "acc_norm_stderr": 0.004988210033832016 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4623243933588761, + "acc_stderr": 0.017829131764287198, + "acc_norm": 0.4623243933588761, + "acc_norm_stderr": 0.017829131764287198 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.032025630761017346, + "acc_norm": 0.4, + "acc_norm_stderr": 0.032025630761017346 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307807, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307807 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.02508830145469484, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.02508830145469484 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019413, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019413 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173095, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173095 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556538, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556538 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.03461199429040013, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.03461199429040013 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0242785680243077, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0242785680243077 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369818, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369818 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47522935779816516, + "acc_stderr": 0.02141099975363592, + "acc_norm": 0.47522935779816516, + "acc_norm_stderr": 0.02141099975363592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171573, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171573 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.03977749934622074, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.03977749934622074 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.019706875804085627, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.019706875804085627 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21564245810055865, + "acc_stderr": 0.013754835975482355, + "acc_norm": 0.21564245810055865, + "acc_norm_stderr": 0.013754835975482355 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.03175195237583322, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.03175195237583322 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31747066492829207, + "acc_stderr": 0.01188889206880931, + "acc_norm": 0.31747066492829207, + "acc_norm_stderr": 0.01188889206880931 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394823, + "mc2": 0.47510378175366297, + "mc2_stderr": 0.015686785961170725 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4297520661157025, + "acc_stderr": 0.017019847535972205, + "acc_norm": 0.48642266824085006, + "acc_norm_stderr": 0.017184015060401448 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJ1hun/MIstral-11B-Omni-OP-u1k-ver0.1", + "model_sha": "3cf7eb4c014f181bec2a9b36897771b2710422d1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJ1hun/Mistral-7B-OP-u1k-ver0.4/result_2023-10-29 12:13:24.json b/MNCJ1hun/Mistral-7B-OP-u1k-ver0.4/result_2023-10-29 12:13:24.json new file mode 100644 index 0000000000000000000000000000000000000000..c2b93f10f3ad33b439e1e8e611543aa5371d09e1 --- /dev/null +++ b/MNCJ1hun/Mistral-7B-OP-u1k-ver0.4/result_2023-10-29 12:13:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145685, + "acc_norm": 0.39334470989761094, + "acc_norm_stderr": 0.014275101465693028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37860983867755427, + "acc_stderr": 0.004840493603166214, + "acc_norm": 0.49482174865564627, + "acc_norm_stderr": 0.004989513809408586 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458935, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45977011494252873, + "acc_stderr": 0.01782199409693353, + "acc_norm": 0.45977011494252873, + "acc_norm_stderr": 0.01782199409693353 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923325, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923325 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841585, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841585 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.02860595370200425, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.02860595370200425 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983042, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983042 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.03419832608176007, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.03419832608176007 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.0250107491161376, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.0250107491161376 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.026882643434022902, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.026882643434022902 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47889908256880737, + "acc_stderr": 0.02141822475426464, + "acc_norm": 0.47889908256880737, + "acc_norm_stderr": 0.02141822475426464 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978252, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978252 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.01988622103750186, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.01988622103750186 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321616, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321616 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3396648044692737, + "acc_stderr": 0.01583940040621249, + "acc_norm": 0.3396648044692737, + "acc_norm_stderr": 0.01583940040621249 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03025437257397669, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03025437257397669 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.011989936640666544, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.011989936640666544 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015473, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015473 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.039025510073744496, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.039025510073744496 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.01598359510181139, + "mc2": 0.48626023725218265, + "mc2_stderr": 0.015456180399303063 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.51357733175915, + "acc_stderr": 0.01718401506040146, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.017090852631668332 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJ1hun/Mistral-7B-OP-u1k-ver0.4", + "model_sha": "584915ea3f453b6771b188b11629e859473e7e9d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJ1hun/Mistral-7B-OP-u1k-ver0.5/result_2023-10-29 12:13:11.json b/MNCJ1hun/Mistral-7B-OP-u1k-ver0.5/result_2023-10-29 12:13:11.json new file mode 100644 index 0000000000000000000000000000000000000000..f1fb50a17277b1039f7b0a6c6cdd9fc59284c986 --- /dev/null +++ b/MNCJ1hun/Mistral-7B-OP-u1k-ver0.5/result_2023-10-29 12:13:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3506825938566553, + "acc_stderr": 0.013944635930726085, + "acc_norm": 0.3993174061433447, + "acc_norm_stderr": 0.014312094557946704 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38099980083648677, + "acc_stderr": 0.004846400325585238, + "acc_norm": 0.485062736506672, + "acc_norm_stderr": 0.004987554255981858 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44316730523627074, + "acc_stderr": 0.01776408503534839, + "acc_norm": 0.44316730523627074, + "acc_norm_stderr": 0.01776408503534839 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742399, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.02825666072336018, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.02825666072336018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449296, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449296 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.0438986995680878, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.0438986995680878 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042335, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042335 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.0497569851956243, + "acc_norm": 0.57, + "acc_norm_stderr": 0.0497569851956243 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.030656748696739428, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.030656748696739428 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159664, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.021364122533881695, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.021364122533881695 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.01954210156485412, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.01954210156485412 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101362, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101362 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.046355501356099754, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.046355501356099754 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103987, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103987 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.011787910251664594, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.011787910251664594 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4121212121212121, + "acc_stderr": 0.03843566993588718, + "acc_norm": 0.4121212121212121, + "acc_norm_stderr": 0.03843566993588718 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4662275406128657, + "mc2_stderr": 0.015931307436185087 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4132231404958678, + "acc_stderr": 0.016929480234495226, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.017090852631668336 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJ1hun/Mistral-7B-OP-u1k-ver0.5", + "model_sha": "3ccdca4afa332d805c50ffbaaa84cd8fa8b9ebe1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJ1hun/Zephyr-7B-alpha-OP-u1k-ver0.1/result_2023-10-29 00:21:54.json b/MNCJ1hun/Zephyr-7B-alpha-OP-u1k-ver0.1/result_2023-10-29 00:21:54.json new file mode 100644 index 0000000000000000000000000000000000000000..af5c1ec1665b6861cdc88048169842cfbe73afa6 --- /dev/null +++ b/MNCJ1hun/Zephyr-7B-alpha-OP-u1k-ver0.1/result_2023-10-29 00:21:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3532423208191126, + "acc_stderr": 0.013967822714840055, + "acc_norm": 0.41723549488054607, + "acc_norm_stderr": 0.014409825518403082 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38149770961959767, + "acc_stderr": 0.00484761521647345, + "acc_norm": 0.4923322047400916, + "acc_norm_stderr": 0.0049891946277078525 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468547, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468547 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.02512465352588513, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.02512465352588513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998573, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998573 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.03035152732334493, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.03035152732334493 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.035995863012470784, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.035995863012470784 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.02432631052914913, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.02432631052914913 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.36728395061728397, + "acc_stderr": 0.026822801759507894, + "acc_norm": 0.36728395061728397, + "acc_norm_stderr": 0.026822801759507894 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.02143642095552942, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.02143642095552942 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.03977749934622074, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.03977749934622074 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.01954210156485412, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.01954210156485412 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639886, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639886 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291521, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291521 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961459, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961459 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125468, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125468 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29921773142112124, + "acc_stderr": 0.011695374630696052, + "acc_norm": 0.29921773142112124, + "acc_norm_stderr": 0.011695374630696052 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219371, + "mc2": 0.45601808163931185, + "mc2_stderr": 0.015622209231910858 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40731995277449823, + "acc_stderr": 0.01689245669519127, + "acc_norm": 0.4604486422668241, + "acc_norm_stderr": 0.017136487626049846 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJ1hun/Zephyr-7B-alpha-OP-u1k-ver0.1", + "model_sha": "7692de676eb6a3561d10a21a64bcf45cc629665b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-OpenOrca-eng-kor-combined/result_2023-10-24 01:28:22.json b/MNCJihun/Mistral-7B-OpenOrca-eng-kor-combined/result_2023-10-24 01:28:22.json new file mode 100644 index 0000000000000000000000000000000000000000..3bccf879ace5823d26407a014943723b2d3cdfc0 --- /dev/null +++ b/MNCJihun/Mistral-7B-OpenOrca-eng-kor-combined/result_2023-10-24 01:28:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2721843003412969, + "acc_stderr": 0.013006600406423709, + "acc_norm": 0.32849829351535836, + "acc_norm_stderr": 0.013724978465537377 + }, + "harness|ko_hellaswag|10": { + "acc": 0.345947022505477, + "acc_stderr": 0.00474703876817253, + "acc_norm": 0.42362079267078273, + "acc_norm_stderr": 0.004931219148182244 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.047504583990416925, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.047504583990416925 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4163473818646232, + "acc_stderr": 0.017627948030430298, + "acc_norm": 0.4163473818646232, + "acc_norm_stderr": 0.017627948030430298 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800254, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800254 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3633440514469453, + "acc_stderr": 0.027316847674192717, + "acc_norm": 0.3633440514469453, + "acc_norm_stderr": 0.027316847674192717 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3282442748091603, + "acc_stderr": 0.041184385658062976, + "acc_norm": 0.3282442748091603, + "acc_norm_stderr": 0.041184385658062976 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.033586181457325226, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.033586181457325226 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.32413793103448274, + "acc_stderr": 0.03900432069185553, + "acc_norm": 0.32413793103448274, + "acc_norm_stderr": 0.03900432069185553 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931673, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931673 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.024321738484602357, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.024321738484602357 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.027869320571664632, + "acc_norm": 0.4, + "acc_norm_stderr": 0.027869320571664632 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.03265903381186195, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.03265903381186195 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3622641509433962, + "acc_stderr": 0.0295822451283843, + "acc_norm": 0.3622641509433962, + "acc_norm_stderr": 0.0295822451283843 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425464, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425464 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43781094527363185, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.43781094527363185, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321659, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321659 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028428, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028428 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.02494679222527231, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.02494679222527231 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.037311335196738925, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.037311335196738925 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02712511551316687, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02712511551316687 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3779816513761468, + "acc_stderr": 0.020789187066728113, + "acc_norm": 0.3779816513761468, + "acc_norm_stderr": 0.020789187066728113 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871137, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871137 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.027530078447110317, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.027530078447110317 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.0387813988879761, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.0387813988879761 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.018875682938069443, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.018875682938069443 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.02971127586000533, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.02971127586000533 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966342, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966342 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.02824568739146293, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.02824568739146293 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.30612244897959184, + "acc_stderr": 0.029504896454595968, + "acc_norm": 0.30612244897959184, + "acc_norm_stderr": 0.029504896454595968 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4388185654008439, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.4388185654008439, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.011855911587048231, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.011855911587048231 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.03364487286088299, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.03364487286088299 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3575757575757576, + "acc_stderr": 0.037425970438065864, + "acc_norm": 0.3575757575757576, + "acc_norm_stderr": 0.037425970438065864 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.47535947414675184, + "mc2_stderr": 0.015845184891705482 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22195985832349469, + "acc_stderr": 0.014287394616821172, + "acc_norm": 0.2668240850059032, + "acc_norm_stderr": 0.015206575684565883 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihun/Mistral-7B-OpenOrca-eng-kor-combined", + "model_sha": "5f5dac05ae42c508810fe2dc7d4eef1350c3a1b2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-SlimOrca-eng-kor-combined/result_2023-10-24 01:03:10.json b/MNCJihun/Mistral-7B-SlimOrca-eng-kor-combined/result_2023-10-24 01:03:10.json new file mode 100644 index 0000000000000000000000000000000000000000..beb317e4f22f8306d6ca044f2fd1ec97f622d238 --- /dev/null +++ b/MNCJihun/Mistral-7B-SlimOrca-eng-kor-combined/result_2023-10-24 01:03:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.295221843003413, + "acc_stderr": 0.013329750293382316, + "acc_norm": 0.3378839590443686, + "acc_norm_stderr": 0.013822047922283516 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3476399123680542, + "acc_stderr": 0.004752476997887829, + "acc_norm": 0.434973112925712, + "acc_norm_stderr": 0.004947402907996247 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.0484674825397724, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.0484674825397724 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4112388250319285, + "acc_stderr": 0.017595971908056573, + "acc_norm": 0.4112388250319285, + "acc_norm_stderr": 0.017595971908056573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596241, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596241 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.03571609230053481, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.03571609230053481 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085335, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085335 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330313, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330313 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3282442748091603, + "acc_stderr": 0.041184385658062976, + "acc_norm": 0.3282442748091603, + "acc_norm_stderr": 0.041184385658062976 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.35172413793103446, + "acc_stderr": 0.03979236637497411, + "acc_norm": 0.35172413793103446, + "acc_norm_stderr": 0.03979236637497411 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36554621848739494, + "acc_stderr": 0.03128217706368461, + "acc_norm": 0.36554621848739494, + "acc_norm_stderr": 0.03128217706368461 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.0242831405294673, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.0242831405294673 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280457, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280457 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.02737987122994325, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.02737987122994325 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5854700854700855, + "acc_stderr": 0.0322739656762378, + "acc_norm": 0.5854700854700855, + "acc_norm_stderr": 0.0322739656762378 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432118, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432118 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02831753349606648, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02831753349606648 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.46766169154228854, + "acc_stderr": 0.035281314729336065, + "acc_norm": 0.46766169154228854, + "acc_norm_stderr": 0.035281314729336065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762606, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762606 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41329479768786126, + "acc_stderr": 0.02651126136940924, + "acc_norm": 0.41329479768786126, + "acc_norm_stderr": 0.02651126136940924 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.03746668325470021, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.03746668325470021 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.027163686038271233, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.027163686038271233 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.035615873276858855, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.035615873276858855 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3871559633027523, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.3871559633027523, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.042857142857142816, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.042857142857142816 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510468, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510468 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.018771683893528183, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.018771683893528183 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.031141447823536048, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.031141447823536048 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22793296089385476, + "acc_stderr": 0.014030149950805097, + "acc_norm": 0.22793296089385476, + "acc_norm_stderr": 0.014030149950805097 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.02736586113151381, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.02736586113151381 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788153, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788153 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31877444589308995, + "acc_stderr": 0.011901895635786088, + "acc_norm": 0.31877444589308995, + "acc_norm_stderr": 0.011901895635786088 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.0327028718148208, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.0327028718148208 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.0381549430868893, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.0381549430868893 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219374, + "mc2": 0.4600089007139919, + "mc2_stderr": 0.015856276729730875 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24321133412042503, + "acc_stderr": 0.014750068360453263, + "acc_norm": 0.2798110979929162, + "acc_norm_stderr": 0.015433715795427778 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihun/Mistral-7B-SlimOrca-eng-kor-combined", + "model_sha": "a9340fcc369bba2e0200a3a378078fa14f4075b3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-SlimOrca-orca-platy-1k/result_2023-10-23 07:44:01.json b/MNCJihun/Mistral-7B-SlimOrca-orca-platy-1k/result_2023-10-23 07:44:01.json new file mode 100644 index 0000000000000000000000000000000000000000..d6ba16aac915bfe1b99cfbf54aa697633e0be650 --- /dev/null +++ b/MNCJihun/Mistral-7B-SlimOrca-orca-platy-1k/result_2023-10-23 07:44:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28754266211604096, + "acc_stderr": 0.013226719056266127, + "acc_norm": 0.3395904436860068, + "acc_norm_stderr": 0.01383903976282016 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36297550288787095, + "acc_stderr": 0.004798751281560832, + "acc_norm": 0.45558653654650466, + "acc_norm_stderr": 0.004970057183367319 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.01757070523925654, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.01757070523925654 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.030472973363380056, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.030472973363380056 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3665594855305466, + "acc_stderr": 0.02736807824397163, + "acc_norm": 0.3665594855305466, + "acc_norm_stderr": 0.02736807824397163 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2914798206278027, + "acc_stderr": 0.03050028317654591, + "acc_norm": 0.2914798206278027, + "acc_norm_stderr": 0.03050028317654591 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4797979797979798, + "acc_stderr": 0.03559443565563918, + "acc_norm": 0.4797979797979798, + "acc_norm_stderr": 0.03559443565563918 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.025158266016868568, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.025158266016868568 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.033661244890514495, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.033661244890514495 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.35161290322580646, + "acc_stderr": 0.027162537826948458, + "acc_norm": 0.35161290322580646, + "acc_norm_stderr": 0.027162537826948458 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5512820512820513, + "acc_stderr": 0.032583346493868806, + "acc_norm": 0.5512820512820513, + "acc_norm_stderr": 0.032583346493868806 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.42786069651741293, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.42786069651741293, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.0365634365335316, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.0365634365335316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983053, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983053 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3208092485549133, + "acc_stderr": 0.025131000233647907, + "acc_norm": 0.3208092485549133, + "acc_norm_stderr": 0.025131000233647907 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3765432098765432, + "acc_stderr": 0.02695934451874779, + "acc_norm": 0.3765432098765432, + "acc_norm_stderr": 0.02695934451874779 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.03606065001832917, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.03606065001832917 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995096, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768176, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768176 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.47107438016528924, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849727, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849727 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26633986928104575, + "acc_stderr": 0.017883188134667192, + "acc_norm": 0.26633986928104575, + "acc_norm_stderr": 0.017883188134667192 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952685, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952685 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409167, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409167 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556163, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556163 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.41350210970464135, + "acc_stderr": 0.03205649904851859, + "acc_norm": 0.41350210970464135, + "acc_norm_stderr": 0.03205649904851859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.03283472056108567, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.03283472056108567 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3575757575757576, + "acc_stderr": 0.03742597043806586, + "acc_norm": 0.3575757575757576, + "acc_norm_stderr": 0.03742597043806586 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.01607750926613303, + "mc2": 0.4664193395730685, + "mc2_stderr": 0.015885964841438872 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.345926800472255, + "acc_stderr": 0.016353853414347575, + "acc_norm": 0.4132231404958678, + "acc_norm_stderr": 0.016929480234495226 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihun/Mistral-7B-SlimOrca-orca-platy-1k", + "model_sha": "96fceca38b3714b0ae8ec6dc120f13036eaeb69c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-eng-kor-cot-combined/result_2023-10-24 01:05:12.json b/MNCJihun/Mistral-7B-eng-kor-cot-combined/result_2023-10-24 01:05:12.json new file mode 100644 index 0000000000000000000000000000000000000000..03851aa9d089b62ce37b769ee184e26e5cc87f2a --- /dev/null +++ b/MNCJihun/Mistral-7B-eng-kor-cot-combined/result_2023-10-24 01:05:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28668941979522183, + "acc_stderr": 0.01321498632927476, + "acc_norm": 0.33447098976109213, + "acc_norm_stderr": 0.013787460322441374 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34096793467436765, + "acc_stderr": 0.004730658073041555, + "acc_norm": 0.4268074088826927, + "acc_norm_stderr": 0.004936029827672039 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.36257309941520466, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.36257309941520466, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.046202840822800406, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.046202840822800406 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3997445721583653, + "acc_stderr": 0.01751684790705327, + "acc_norm": 0.3997445721583653, + "acc_norm_stderr": 0.01751684790705327 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.0402477840197711, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.0402477840197711 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.030472973363380045, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.030472973363380045 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288085, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288085 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3858520900321543, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.3858520900321543, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484504, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484504 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.03878352372138622, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.03878352372138622 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237653, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.023901157979402538, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.023901157979402538 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.03108982600293753, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.03108982600293753 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.32903225806451614, + "acc_stderr": 0.02672949906834996, + "acc_norm": 0.32903225806451614, + "acc_norm_stderr": 0.02672949906834996 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5042735042735043, + "acc_stderr": 0.03275489264382132, + "acc_norm": 0.5042735042735043, + "acc_norm_stderr": 0.03275489264382132 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.028049186315695248, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.028049186315695248 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228412, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228412 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43283582089552236, + "acc_stderr": 0.0350349092367328, + "acc_norm": 0.43283582089552236, + "acc_norm_stderr": 0.0350349092367328 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.0321473730202947, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.0321473730202947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.024026846392873502, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.024026846392873502 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548574, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548574 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999998, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999998 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3439306358381503, + "acc_stderr": 0.025574123786546648, + "acc_norm": 0.3439306358381503, + "acc_norm_stderr": 0.025574123786546648 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.32515337423312884, + "acc_stderr": 0.03680350371286461, + "acc_norm": 0.32515337423312884, + "acc_norm_stderr": 0.03680350371286461 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.026869490744815247, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.026869490744815247 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3626943005181347, + "acc_stderr": 0.034697137917043715, + "acc_norm": 0.3626943005181347, + "acc_norm_stderr": 0.034697137917043715 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3651376146788991, + "acc_stderr": 0.020642801454383995, + "acc_norm": 0.3651376146788991, + "acc_norm_stderr": 0.020642801454383995 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.026787453111906532, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.026787453111906532 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32189542483660133, + "acc_stderr": 0.018901015322093085, + "acc_norm": 0.32189542483660133, + "acc_norm_stderr": 0.018901015322093085 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952688, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952688 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456052, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456052 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22793296089385476, + "acc_stderr": 0.014030149950805097, + "acc_norm": 0.22793296089385476, + "acc_norm_stderr": 0.014030149950805097 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225418, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225418 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3469387755102041, + "acc_stderr": 0.030472526026726492, + "acc_norm": 0.3469387755102041, + "acc_norm_stderr": 0.030472526026726492 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4219409282700422, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.4219409282700422, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29986962190352023, + "acc_stderr": 0.011702660860193989, + "acc_norm": 0.29986962190352023, + "acc_norm_stderr": 0.011702660860193989 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03815494308688929, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03815494308688929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502342, + "mc2": 0.46556936650012803, + "mc2_stderr": 0.01608055615378503 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.20188902007083825, + "acc_stderr": 0.01380075389577743, + "acc_norm": 0.21959858323494688, + "acc_norm_stderr": 0.014232743085580275 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihun/Mistral-7B-eng-kor-cot-combined", + "model_sha": "ad4d7c60244d0f1e0cc11d44be9b14c3354df448", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-guanaco-1k-orca-platy-1k-ep4/result_2023-10-23 06:43:50.json b/MNCJihun/Mistral-7B-guanaco-1k-orca-platy-1k-ep4/result_2023-10-23 06:43:50.json new file mode 100644 index 0000000000000000000000000000000000000000..10b174279193daba664e736aec18b78fb1b15fbf --- /dev/null +++ b/MNCJihun/Mistral-7B-guanaco-1k-orca-platy-1k-ep4/result_2023-10-23 06:43:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2960750853242321, + "acc_stderr": 0.013340916085246263, + "acc_norm": 0.3319112627986348, + "acc_norm_stderr": 0.01376098820088054 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36367257518422624, + "acc_stderr": 0.004800728138792386, + "acc_norm": 0.4591714797849034, + "acc_norm_stderr": 0.004973117975062484 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.42718446601941745, + "acc_stderr": 0.04897957737781169, + "acc_norm": 0.42718446601941745, + "acc_norm_stderr": 0.04897957737781169 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45849297573435505, + "acc_stderr": 0.017818248603465568, + "acc_norm": 0.45849297573435505, + "acc_norm_stderr": 0.017818248603465568 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.036807836907275814, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.036807836907275814 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3890675241157556, + "acc_stderr": 0.027690337536485372, + "acc_norm": 0.3890675241157556, + "acc_norm_stderr": 0.027690337536485372 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03481285338232963, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03481285338232963 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121633, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121633 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.0242831405294673, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.0242831405294673 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3622641509433962, + "acc_stderr": 0.029582245128384303, + "acc_norm": 0.3622641509433962, + "acc_norm_stderr": 0.029582245128384303 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4129353233830846, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.4129353233830846, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.034140140070440354, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.034140140070440354 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.02418049716437691, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.02418049716437691 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3670520231213873, + "acc_stderr": 0.02595005433765408, + "acc_norm": 0.3670520231213873, + "acc_norm_stderr": 0.02595005433765408 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38580246913580246, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.38580246913580246, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40414507772020725, + "acc_stderr": 0.035415085788840193, + "acc_norm": 0.40414507772020725, + "acc_norm_stderr": 0.035415085788840193 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373146, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373146 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3871559633027523, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.3871559633027523, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.027780141207023334, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.027780141207023334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351585, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351585 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223977, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223977 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320196, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320196 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697625, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697625 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331149, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331149 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.027472274473233818, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.027472274473233818 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.32653061224489793, + "acc_stderr": 0.030021056238440327, + "acc_norm": 0.32653061224489793, + "acc_norm_stderr": 0.030021056238440327 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.47257383966244726, + "acc_stderr": 0.03249822718301303, + "acc_norm": 0.47257383966244726, + "acc_norm_stderr": 0.03249822718301303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2940026075619296, + "acc_stderr": 0.011636062953698604, + "acc_norm": 0.2940026075619296, + "acc_norm_stderr": 0.011636062953698604 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833344, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833344 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524753, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524753 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768545, + "mc2": 0.4747810026483803, + "mc2_stderr": 0.016087880887613513 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3010625737898465, + "acc_stderr": 0.015771113299945457, + "acc_norm": 0.3140495867768595, + "acc_norm_stderr": 0.015957332434295066 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihun/Mistral-7B-guanaco-1k-orca-platy-1k-ep4", + "model_sha": "13e5692b7a084265617f75f81209dce34e414489", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihun/Mistral-7B-orca-platy-2k/result_2023-10-23 06:43:37.json b/MNCJihun/Mistral-7B-orca-platy-2k/result_2023-10-23 06:43:37.json new file mode 100644 index 0000000000000000000000000000000000000000..7c1a49fa48b43b81cbe4f0d9ac40db3b58e6cb21 --- /dev/null +++ b/MNCJihun/Mistral-7B-orca-platy-2k/result_2023-10-23 06:43:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2977815699658703, + "acc_stderr": 0.013363080107244489, + "acc_norm": 0.33361774744027306, + "acc_norm_stderr": 0.013778687054176541 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3686516630153356, + "acc_stderr": 0.004814532642574648, + "acc_norm": 0.46086436964748057, + "acc_norm_stderr": 0.004974473255391268 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529918, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529918 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.0484674825397724, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.0484674825397724 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41890166028097064, + "acc_stderr": 0.01764320505237717, + "acc_norm": 0.41890166028097064, + "acc_norm_stderr": 0.01764320505237717 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977109, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977109 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386694, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386694 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085342, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085342 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.041423137719966634, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.041423137719966634 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.034273086529999344, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.034273086529999344 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461224, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461224 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.047128212574267705, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.047128212574267705 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38064516129032255, + "acc_stderr": 0.027621717832907036, + "acc_norm": 0.38064516129032255, + "acc_norm_stderr": 0.027621717832907036 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3622641509433962, + "acc_stderr": 0.0295822451283843, + "acc_norm": 0.3622641509433962, + "acc_norm_stderr": 0.0295822451283843 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02784081149587194, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02784081149587194 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.03531987930208732, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.03531987930208732 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194974, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194974 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.026362437574546545, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.026362437574546545 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.038020681028996146, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.038020681028996146 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005138, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005138 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579861, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579861 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43302752293577984, + "acc_stderr": 0.021244146569074345, + "acc_norm": 0.43302752293577984, + "acc_norm_stderr": 0.021244146569074345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.02692565465361569, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.02692565465361569 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.018975427920507215, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.018975427920507215 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639882, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639882 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915206, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915206 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476787, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476787 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687765, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687765 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.028123429335142804, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.028123429335142804 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33116036505867014, + "acc_stderr": 0.012020128195985759, + "acc_norm": 0.33116036505867014, + "acc_norm_stderr": 0.012020128195985759 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.01598359510181139, + "mc2": 0.4593881639963632, + "mc2_stderr": 0.01579718957910925 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.35182998819362454, + "acc_stderr": 0.016418206451218057, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.016819438642971408 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihun/Mistral-7B-orca-platy-2k", + "model_sha": "45eb0f68911f65b3a5ac83a851c716add059bf5a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihunKim/MIstral-7B-SlimOrca-OP-2k/result_2023-10-26 01:19:41.json b/MNCJihunKim/MIstral-7B-SlimOrca-OP-2k/result_2023-10-26 01:19:41.json new file mode 100644 index 0000000000000000000000000000000000000000..d5cdccf95db237450070f18c4b363c47bbd0eedf --- /dev/null +++ b/MNCJihunKim/MIstral-7B-SlimOrca-OP-2k/result_2023-10-26 01:19:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3174061433447099, + "acc_stderr": 0.01360223908803817, + "acc_norm": 0.35238907849829354, + "acc_norm_stderr": 0.01396014260059868 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36536546504680345, + "acc_stderr": 0.004805483767055344, + "acc_norm": 0.45648277235610435, + "acc_norm_stderr": 0.004970846697552307 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4355044699872286, + "acc_stderr": 0.01773058992792662, + "acc_norm": 0.4355044699872286, + "acc_norm_stderr": 0.01773058992792662 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.0402477840197711, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.0402477840197711 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928276, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4128205128205128, + "acc_stderr": 0.024962683564331817, + "acc_norm": 0.4128205128205128, + "acc_norm_stderr": 0.024962683564331817 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883231, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883231 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.02766618207553963, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.02766618207553963 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286102, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286102 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815646, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815646 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696525, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520196, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520196 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.038818912133343826, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.038818912133343826 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871595, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871595 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44036697247706424, + "acc_stderr": 0.021284310623761547, + "acc_norm": 0.44036697247706424, + "acc_norm_stderr": 0.021284310623761547 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556054, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556054 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.03977749934622074, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.03977749934622074 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.01918463932809249, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.01918463932809249 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403196, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.318122555410691, + "acc_stderr": 0.011895407281104097, + "acc_norm": 0.318122555410691, + "acc_norm_stderr": 0.011895407281104097 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833344, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833344 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768542, + "mc2": 0.4631702412075074, + "mc2_stderr": 0.01580874554216882 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3789846517119244, + "acc_stderr": 0.01667926068422929, + "acc_norm": 0.43683589138134593, + "acc_norm_stderr": 0.017052633559856076 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihunKim/MIstral-7B-SlimOrca-OP-2k", + "model_sha": "339ce8fcda3879a2a6e0dbe0ffb06d1f0be9fd15", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihunKim/Mistral-7B-OpenOrca-orca-platy-out1kover/result_2023-10-25 15:13:11.json b/MNCJihunKim/Mistral-7B-OpenOrca-orca-platy-out1kover/result_2023-10-25 15:13:11.json new file mode 100644 index 0000000000000000000000000000000000000000..289f0936786c4552f974239a33b7c4de4946ac00 --- /dev/null +++ b/MNCJihunKim/Mistral-7B-OpenOrca-orca-platy-out1kover/result_2023-10-25 15:13:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35409556313993173, + "acc_stderr": 0.013975454122756557, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398322 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3813981278629755, + "acc_stderr": 0.004847372670134637, + "acc_norm": 0.48954391555467036, + "acc_norm_stderr": 0.00498869022950566 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4776500638569604, + "acc_stderr": 0.017862091778507876, + "acc_norm": 0.4776500638569604, + "acc_norm_stderr": 0.017862091778507876 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.042561937679014075, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.042561937679014075 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562786, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562786 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009225, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009225 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5032258064516129, + "acc_stderr": 0.028443414226438316, + "acc_norm": 0.5032258064516129, + "acc_norm_stderr": 0.028443414226438316 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768818, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768818 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702862, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702862 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948496, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778657, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778657 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159795, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159795 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869334, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.03889066619112722, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.03889066619112722 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607718, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607718 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.03878139888797609, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.03878139888797609 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.01957695312208884, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.01957695312208884 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.046840993210771065, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.046840993210771065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2022346368715084, + "acc_stderr": 0.01343372948332099, + "acc_norm": 0.2022346368715084, + "acc_norm_stderr": 0.01343372948332099 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682487, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682487 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31029986962190353, + "acc_stderr": 0.011815439293469829, + "acc_norm": 0.31029986962190353, + "acc_norm_stderr": 0.011815439293469829 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133033, + "mc2": 0.4750791587895867, + "mc2_stderr": 0.015736885636484024 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44037780401416765, + "acc_stderr": 0.01706769977431298, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.01718976703213082 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihunKim/Mistral-7B-OpenOrca-orca-platy-out1kover", + "model_sha": "6a36ede83f774993cca1e5193c0c702e4b998676", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihunKim/Mistral-7B-SlimOrca-OP-8k/result_2023-10-26 04:06:59.json b/MNCJihunKim/Mistral-7B-SlimOrca-OP-8k/result_2023-10-26 04:06:59.json new file mode 100644 index 0000000000000000000000000000000000000000..145a1d19b567aa342917393d1e553481eb8cf123 --- /dev/null +++ b/MNCJihunKim/Mistral-7B-SlimOrca-OP-8k/result_2023-10-26 04:06:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30716723549488056, + "acc_stderr": 0.013481034054980945, + "acc_norm": 0.34215017064846415, + "acc_norm_stderr": 0.013864152159177278 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36317466640111534, + "acc_stderr": 0.004799317209902018, + "acc_norm": 0.4519020115514838, + "acc_norm_stderr": 0.0049666408680838605 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066165, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066165 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.438058748403576, + "acc_stderr": 0.01774223223825723, + "acc_norm": 0.438058748403576, + "acc_norm_stderr": 0.01774223223825723 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.030472973363380056, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.030472973363380056 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3987138263665595, + "acc_stderr": 0.0278093225857745, + "acc_norm": 0.3987138263665595, + "acc_norm_stderr": 0.0278093225857745 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.0324430528300873, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.0324430528300873 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467766, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467766 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309993, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309993 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.03149930577784906, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.03149930577784906 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.024537591572830517, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.024537591572830517 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.02786932057166464, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02786932057166464 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736411, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736411 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332786, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332786 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.02581675679158419, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.02581675679158419 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.027002521034516468, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.027002521034516468 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036093, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036093 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43302752293577984, + "acc_stderr": 0.02124414656907434, + "acc_norm": 0.43302752293577984, + "acc_norm_stderr": 0.02124414656907434 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791438, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791438 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.03842498559395269, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.03842498559395269 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.019117213911495155, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.019117213911495155 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.043642261558410445, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.043642261558410445 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808847, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808847 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.0290294228156814, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.0290294228156814 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.31020408163265306, + "acc_stderr": 0.029613459872484378, + "acc_norm": 0.31020408163265306, + "acc_norm_stderr": 0.029613459872484378 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4641350210970464, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.4641350210970464, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.012014142101842982, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.012014142101842982 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.037937131711656344, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.037937131711656344 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.4607470933289765, + "mc2_stderr": 0.015783351321862177 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3447461629279811, + "acc_stderr": 0.016340649905418697, + "acc_norm": 0.41086186540731995, + "acc_norm_stderr": 0.01691497276784107 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihunKim/Mistral-7B-SlimOrca-OP-8k", + "model_sha": "70b643a9304f4b45ca6ae3b4ff6afbd8f8967145", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCJihunKim/Mistral-7B-SlimOrca-orca-platy-out1kover/result_2023-10-25 15:12:52.json b/MNCJihunKim/Mistral-7B-SlimOrca-orca-platy-out1kover/result_2023-10-25 15:12:52.json new file mode 100644 index 0000000000000000000000000000000000000000..8ebafe0aae63a671395032bd5374543291de2de9 --- /dev/null +++ b/MNCJihunKim/Mistral-7B-SlimOrca-orca-platy-out1kover/result_2023-10-25 15:12:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35921501706484643, + "acc_stderr": 0.014020224155839155, + "acc_norm": 0.41552901023890787, + "acc_norm_stderr": 0.014401366641216395 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3822943636725752, + "acc_stderr": 0.004849547819134474, + "acc_norm": 0.4878510256920932, + "acc_norm_stderr": 0.004988308234687271 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.017879948914431662, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.017879948914431662 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.37404580152671757, + "acc_stderr": 0.042438692422305246, + "acc_norm": 0.37404580152671757, + "acc_norm_stderr": 0.042438692422305246 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.02512465352588513, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.02512465352588513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145631, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145631 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561056, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561056 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.02961432369045665, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.02961432369045665 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149126, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149126 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48623853211009177, + "acc_stderr": 0.02142920208987408, + "acc_norm": 0.48623853211009177, + "acc_norm_stderr": 0.02142920208987408 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981747, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981747 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449848, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449848 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265015, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265015 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2011173184357542, + "acc_stderr": 0.013405946402609045, + "acc_norm": 0.2011173184357542, + "acc_norm_stderr": 0.013405946402609045 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681404, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681404 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5274261603375527, + "acc_stderr": 0.03249822718301304, + "acc_norm": 0.5274261603375527, + "acc_norm_stderr": 0.03249822718301304 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32073011734028684, + "acc_stderr": 0.011921199991782625, + "acc_norm": 0.32073011734028684, + "acc_norm_stderr": 0.011921199991782625 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.4582449322023691, + "mc2_stderr": 0.015573281761179949 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41912632821723733, + "acc_stderr": 0.016963995010862792, + "acc_norm": 0.4781582054309327, + "acc_norm_stderr": 0.017173944474294375 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCJihunKim/Mistral-7B-SlimOrca-orca-platy-out1kover", + "model_sha": "fcc2973dac87df41de97b6972e0323fee599bcf3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCKim/Mistral-7B-OpenHermes/result_2023-10-26 08:29:28.json b/MNCKim/Mistral-7B-OpenHermes/result_2023-10-26 08:29:28.json new file mode 100644 index 0000000000000000000000000000000000000000..2790f061466368e66dd82e297b1da9e3164bbe87 --- /dev/null +++ b/MNCKim/Mistral-7B-OpenHermes/result_2023-10-26 08:29:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34215017064846415, + "acc_stderr": 0.013864152159177278, + "acc_norm": 0.38822525597269625, + "acc_norm_stderr": 0.014241614207414044 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38020314678350925, + "acc_stderr": 0.004844445265582655, + "acc_norm": 0.4870543716391157, + "acc_norm_stderr": 0.004988108663179765 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.01785298126663396, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.01785298126663396 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236785, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236785 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961827, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961827 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.035995863012470784, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.035995863012470784 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520196, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520196 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.040894654493255835, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.040894654493255835 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579861, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579861 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5137614678899083, + "acc_stderr": 0.02142920208987408, + "acc_norm": 0.5137614678899083, + "acc_norm_stderr": 0.02142920208987408 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.04343525428949097, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.04343525428949097 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.01994491413687358, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.01994491413687358 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.028893955412115886, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.028893955412115886 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20446927374301677, + "acc_stderr": 0.013488813404711909, + "acc_norm": 0.20446927374301677, + "acc_norm_stderr": 0.013488813404711909 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824873, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824873 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3559322033898305, + "acc_stderr": 0.01222864553727757, + "acc_norm": 0.3559322033898305, + "acc_norm_stderr": 0.01222864553727757 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31334149326805383, + "mc1_stderr": 0.016238065069059615, + "mc2": 0.49276821876862364, + "mc2_stderr": 0.015815875390844718 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44037780401416765, + "acc_stderr": 0.01706769977431297, + "acc_norm": 0.4734356552538371, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCKim/Mistral-7B-OpenHermes", + "model_sha": "847254b43b055cbe217b7aedf1219942457aa942", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCKim/Mistral-7B-SlimOrca-OP-U2048-ran2k/result_2023-10-26 05:19:08.json b/MNCKim/Mistral-7B-SlimOrca-OP-U2048-ran2k/result_2023-10-26 05:19:08.json new file mode 100644 index 0000000000000000000000000000000000000000..5f474b55d206213beb022c98ab6b1d2c6801d929 --- /dev/null +++ b/MNCKim/Mistral-7B-SlimOrca-OP-U2048-ran2k/result_2023-10-26 05:19:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3037542662116041, + "acc_stderr": 0.013438909184778762, + "acc_norm": 0.34044368600682595, + "acc_norm_stderr": 0.013847460518892976 + }, + "harness|ko_hellaswag|10": { + "acc": 0.359788886675961, + "acc_stderr": 0.0047895751634186535, + "acc_norm": 0.45180242979486157, + "acc_norm_stderr": 0.004966544724452225 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4099616858237548, + "acc_stderr": 0.017587672312336055, + "acc_norm": 0.4099616858237548, + "acc_norm_stderr": 0.017587672312336055 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.041716541613545426, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.041716541613545426 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3536977491961415, + "acc_stderr": 0.02715520810320086, + "acc_norm": 0.3536977491961415, + "acc_norm_stderr": 0.02715520810320086 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.37373737373737376, + "acc_stderr": 0.03446897738659332, + "acc_norm": 0.37373737373737376, + "acc_norm_stderr": 0.03446897738659332 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.040824829046386284, + "acc_norm": 0.4, + "acc_norm_stderr": 0.040824829046386284 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931673, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931673 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.024283140529467298, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.024283140529467298 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.0333276906841079, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.0333276906841079 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3580645161290323, + "acc_stderr": 0.027273890594300642, + "acc_norm": 0.3580645161290323, + "acc_norm_stderr": 0.027273890594300642 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.38113207547169814, + "acc_stderr": 0.029890609686286616, + "acc_norm": 0.38113207547169814, + "acc_norm_stderr": 0.029890609686286616 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.40298507462686567, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.40298507462686567, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101817, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101817 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.025624723994030454, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.025624723994030454 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005138, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005138 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37305699481865284, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.37305699481865284, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3522935779816514, + "acc_stderr": 0.020480568843998993, + "acc_norm": 0.3522935779816514, + "acc_norm_stderr": 0.020480568843998993 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924316, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.0273053080762747, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.0273053080762747 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32189542483660133, + "acc_stderr": 0.018901015322093092, + "acc_norm": 0.32189542483660133, + "acc_norm_stderr": 0.018901015322093092 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053479, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053479 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.0305467452649532, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.0305467452649532 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.014655780837497724, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.014655780837497724 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2536764705882353, + "acc_stderr": 0.026431329870789524, + "acc_norm": 0.2536764705882353, + "acc_norm_stderr": 0.026431329870789524 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.028263889943784617, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.028263889943784617 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.42616033755274263, + "acc_stderr": 0.032190357031317736, + "acc_norm": 0.42616033755274263, + "acc_norm_stderr": 0.032190357031317736 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2848761408083442, + "acc_stderr": 0.011527830846369038, + "acc_norm": 0.2848761408083442, + "acc_norm_stderr": 0.011527830846369038 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674119, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674119 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.037131580674819135, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.037131580674819135 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.4567888942781546, + "mc2_stderr": 0.015721003734360934 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3400236127508855, + "acc_stderr": 0.01628671722073768, + "acc_norm": 0.41912632821723733, + "acc_norm_stderr": 0.016963995010862792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCKim/Mistral-7B-SlimOrca-OP-U2048-ran2k", + "model_sha": "b7d5d28670cc0536eff52f462ec04de3712fd4e9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCKim/Mistral-7B-SlimOrca-OP-U2048-ran4k/result_2023-10-26 05:19:05.json b/MNCKim/Mistral-7B-SlimOrca-OP-U2048-ran4k/result_2023-10-26 05:19:05.json new file mode 100644 index 0000000000000000000000000000000000000000..8cb5674e3fdcf780de3a918c79ad092fc8fa4240 --- /dev/null +++ b/MNCKim/Mistral-7B-SlimOrca-OP-U2048-ran4k/result_2023-10-26 05:19:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2977815699658703, + "acc_stderr": 0.013363080107244492, + "acc_norm": 0.3395904436860068, + "acc_norm_stderr": 0.013839039762820164 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3595897231627166, + "acc_stderr": 0.004788994060654273, + "acc_norm": 0.4554869547898825, + "acc_norm_stderr": 0.004969968458256169 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457921, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457921 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3300970873786408, + "acc_stderr": 0.04656147110012351, + "acc_norm": 0.3300970873786408, + "acc_norm_stderr": 0.04656147110012351 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.34227330779054915, + "acc_stderr": 0.016967031766413624, + "acc_norm": 0.34227330779054915, + "acc_norm_stderr": 0.016967031766413624 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552004, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.03097669299853443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.03097669299853443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818788, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818788 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847836, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847836 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380565, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380565 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.0219169577092138, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.0219169577092138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.031618563353586086, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.031618563353586086 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27741935483870966, + "acc_stderr": 0.025470196835900055, + "acc_norm": 0.27741935483870966, + "acc_norm_stderr": 0.025470196835900055 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36752136752136755, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.36752136752136755, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493868, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493868 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.046313813194254635, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.046313813194254635 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712156, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712156 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008937, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008937 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.30845771144278605, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.30845771144278605, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194974, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194974 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.024685316867257803, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.024685316867257803 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.024836057868294674, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.024836057868294674 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27706422018348625, + "acc_stderr": 0.019188482590169535, + "acc_norm": 0.27706422018348625, + "acc_norm_stderr": 0.019188482590169535 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523809, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523809 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.025553169991826514, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.025553169991826514 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882925, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882925 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2957516339869281, + "acc_stderr": 0.01846315413263282, + "acc_norm": 0.2957516339869281, + "acc_norm_stderr": 0.01846315413263282 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.17592592592592593, + "acc_stderr": 0.02596742095825853, + "acc_norm": 0.17592592592592593, + "acc_norm_stderr": 0.02596742095825853 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925293, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925293 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21323529411764705, + "acc_stderr": 0.024880971512294264, + "acc_norm": 0.21323529411764705, + "acc_norm_stderr": 0.024880971512294264 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3080168776371308, + "acc_stderr": 0.0300523893356057, + "acc_norm": 0.3080168776371308, + "acc_norm_stderr": 0.0300523893356057 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23859191655801826, + "acc_stderr": 0.010885929742002209, + "acc_norm": 0.23859191655801826, + "acc_norm_stderr": 0.010885929742002209 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501954, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501954 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885415, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885415 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.45719878783289014, + "mc2_stderr": 0.01579045306232963 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3010625737898465, + "acc_stderr": 0.015771113299945457, + "acc_norm": 0.38488783943329397, + "acc_norm_stderr": 0.01672857970149866 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCKim/Mistral-7B-SlimOrca-OP-U2048-ran4k", + "model_sha": "397f2df4c4563a7b94ab4c30493004f89edf5eec", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCKim/Mistral-7B-SlimOrca-OP-U2048-top2k/result_2023-10-26 05:18:54.json b/MNCKim/Mistral-7B-SlimOrca-OP-U2048-top2k/result_2023-10-26 05:18:54.json new file mode 100644 index 0000000000000000000000000000000000000000..31822a22868abc1055aa6d1ef3358eaf987e6423 --- /dev/null +++ b/MNCKim/Mistral-7B-SlimOrca-OP-U2048-top2k/result_2023-10-26 05:18:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3037542662116041, + "acc_stderr": 0.013438909184778757, + "acc_norm": 0.35580204778157, + "acc_norm_stderr": 0.01399057113791876 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3489344752041426, + "acc_stderr": 0.004756590961576588, + "acc_norm": 0.4340768771161123, + "acc_norm_stderr": 0.0049462215121452826 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44061302681992337, + "acc_stderr": 0.017753396973908486, + "acc_norm": 0.44061302681992337, + "acc_norm_stderr": 0.017753396973908486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3452914798206278, + "acc_stderr": 0.03191100192835794, + "acc_norm": 0.3452914798206278, + "acc_norm_stderr": 0.03191100192835794 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793254, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793254 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.02491524398598784, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.02491524398598784 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.0281291127091659, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.0281291127091659 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.03098029699261855, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.03098029699261855 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342668, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342668 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.026772990653361816, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.026772990653361816 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579859, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579859 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680814, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680814 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.019117213911495144, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.019117213911495144 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.02889395541211589, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.02889395541211589 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.045723723587374296, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.045723723587374296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.01448750085285041, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.01448750085285041 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254177, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254177 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.0320068202016391, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.0320068202016391 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.46835443037974683, + "acc_stderr": 0.03248197400511075, + "acc_norm": 0.46835443037974683, + "acc_norm_stderr": 0.03248197400511075 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30247718383311606, + "acc_stderr": 0.011731524234165704, + "acc_norm": 0.30247718383311606, + "acc_norm_stderr": 0.011731524234165704 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.03402272044340703, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.03402272044340703 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879076, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879076 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31456548347613217, + "mc1_stderr": 0.016255241993179195, + "mc2": 0.4866055692949919, + "mc2_stderr": 0.015740372637770925 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3234946871310508, + "acc_stderr": 0.016083627290483668, + "acc_norm": 0.36481700118063753, + "acc_norm_stderr": 0.016550144337046588 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCKim/Mistral-7B-SlimOrca-OP-U2048-top2k", + "model_sha": "9ea446751434a20492fc12f4843c9cdc8d8084b8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCKim/Mistral-7B-SlimOrca-OP-U2048-top4k/result_2023-10-26 05:18:57.json b/MNCKim/Mistral-7B-SlimOrca-OP-U2048-top4k/result_2023-10-26 05:18:57.json new file mode 100644 index 0000000000000000000000000000000000000000..6ae7c519d2bfa2a131e8b5170dff1686db389f63 --- /dev/null +++ b/MNCKim/Mistral-7B-SlimOrca-OP-U2048-top4k/result_2023-10-26 05:18:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2986348122866894, + "acc_stderr": 0.013374078615068757, + "acc_norm": 0.34812286689419797, + "acc_norm_stderr": 0.013921008595179344 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3474407488548098, + "acc_stderr": 0.004751840646730853, + "acc_norm": 0.4311890061740689, + "acc_norm_stderr": 0.004942302768002104 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41890166028097064, + "acc_stderr": 0.01764320505237717, + "acc_norm": 0.41890166028097064, + "acc_norm_stderr": 0.01764320505237717 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.02804339985821063, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.02804339985821063 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.47474747474747475, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.47474747474747475, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.024939313906940777, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.024939313906940777 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4064516129032258, + "acc_stderr": 0.027941727346256308, + "acc_norm": 0.4064516129032258, + "acc_norm_stderr": 0.027941727346256308 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342582, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342582 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028428, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028428 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.026613350840261736, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.026613350840261736 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379417, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379417 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995096, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.019333142020797056, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.019333142020797056 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320196, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320196 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647206, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647206 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.39, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48523206751054854, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.48523206751054854, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31877444589308995, + "acc_stderr": 0.011901895635786095, + "acc_norm": 0.31877444589308995, + "acc_norm_stderr": 0.011901895635786095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3072215422276622, + "mc1_stderr": 0.016150201321322992, + "mc2": 0.4763580752793618, + "mc2_stderr": 0.01591246406391595 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3435655253837072, + "acc_stderr": 0.016327334806429145, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.016884749503191392 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCKim/Mistral-7B-SlimOrca-OP-U2048-top4k", + "model_sha": "0df21efbb44a7aeac958f99c94d27887bdeb7e04", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCLLM/Mistral-7B-KoCot-Platypus-4096/result_2023-10-24 10:48:31.json b/MNCLLM/Mistral-7B-KoCot-Platypus-4096/result_2023-10-24 10:48:31.json new file mode 100644 index 0000000000000000000000000000000000000000..0a9411d46dc2e719f84a7907695d6203fc01865a --- /dev/null +++ b/MNCLLM/Mistral-7B-KoCot-Platypus-4096/result_2023-10-24 10:48:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28668941979522183, + "acc_stderr": 0.01321498632927477, + "acc_norm": 0.3387372013651877, + "acc_norm_stderr": 0.01383056892797433 + }, + "harness|ko_hellaswag|10": { + "acc": 0.344353714399522, + "acc_stderr": 0.004741859753178415, + "acc_norm": 0.4213304122684724, + "acc_norm_stderr": 0.004927631806477553 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03565079670708311, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03565079670708311 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3946360153256705, + "acc_stderr": 0.017478464305911542, + "acc_norm": 0.3946360153256705, + "acc_norm_stderr": 0.017478464305911542 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40514469453376206, + "acc_stderr": 0.027882383791325946, + "acc_norm": 0.40514469453376206, + "acc_norm_stderr": 0.027882383791325946 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289202, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289202 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768362, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768362 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547155, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.32413793103448274, + "acc_stderr": 0.03900432069185554, + "acc_norm": 0.32413793103448274, + "acc_norm_stderr": 0.03900432069185554 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931673, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931673 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.024121125416941183, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.024121125416941183 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36774193548387096, + "acc_stderr": 0.027430866579973474, + "acc_norm": 0.36774193548387096, + "acc_norm_stderr": 0.027430866579973474 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5683760683760684, + "acc_stderr": 0.0324483553531149, + "acc_norm": 0.5683760683760684, + "acc_norm_stderr": 0.0324483553531149 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493854, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493854 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228416, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228416 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008937, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008937 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43283582089552236, + "acc_stderr": 0.0350349092367328, + "acc_norm": 0.43283582089552236, + "acc_norm_stderr": 0.0350349092367328 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.0236369759961018, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.0236369759961018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869334, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3786127167630058, + "acc_stderr": 0.026113749361310338, + "acc_norm": 0.3786127167630058, + "acc_norm_stderr": 0.026113749361310338 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33641975308641975, + "acc_stderr": 0.026289734945952926, + "acc_norm": 0.33641975308641975, + "acc_norm_stderr": 0.026289734945952926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.034234651001042816, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.034234651001042816 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3871559633027523, + "acc_stderr": 0.020884231992643453, + "acc_norm": 0.3871559633027523, + "acc_norm_stderr": 0.020884231992643453 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047182, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047182 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.35947712418300654, + "acc_stderr": 0.027475969910660952, + "acc_norm": 0.35947712418300654, + "acc_norm_stderr": 0.027475969910660952 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32516339869281047, + "acc_stderr": 0.018950886770806308, + "acc_norm": 0.32516339869281047, + "acc_norm_stderr": 0.018950886770806308 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2659217877094972, + "acc_stderr": 0.014776765066438888, + "acc_norm": 0.2659217877094972, + "acc_norm_stderr": 0.014776765066438888 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2977941176470588, + "acc_stderr": 0.027778298701545436, + "acc_norm": 0.2977941176470588, + "acc_norm_stderr": 0.027778298701545436 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3510204081632653, + "acc_stderr": 0.03055531675557364, + "acc_norm": 0.3510204081632653, + "acc_norm_stderr": 0.03055531675557364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4008438818565401, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.4008438818565401, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32529335071707954, + "acc_stderr": 0.01196531153657153, + "acc_norm": 0.32529335071707954, + "acc_norm_stderr": 0.01196531153657153 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.01572313952460875, + "mc2": 0.44624551916312966, + "mc2_stderr": 0.015796983100879885 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.15820543093270367, + "acc_stderr": 0.012546672797728753, + "acc_norm": 0.179456906729634, + "acc_norm_stderr": 0.013193062031400433 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCLLM/Mistral-7B-KoCot-Platypus-4096", + "model_sha": "bbb51b457200947001a0dc6e318a7d2d7e717197", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCLLM/Mistral-7B-OP-over1k-grad0.3/result_2023-10-25 09:13:09.json b/MNCLLM/Mistral-7B-OP-over1k-grad0.3/result_2023-10-25 09:13:09.json new file mode 100644 index 0000000000000000000000000000000000000000..8442cf12fd8b00881c4e515eeac1338894a8c2b1 --- /dev/null +++ b/MNCLLM/Mistral-7B-OP-over1k-grad0.3/result_2023-10-25 09:13:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3651877133105802, + "acc_stderr": 0.014070265519268802, + "acc_norm": 0.4104095563139932, + "acc_norm_stderr": 0.014374922192642662 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38309101772555265, + "acc_stderr": 0.004851466623601446, + "acc_norm": 0.4949213304122685, + "acc_norm_stderr": 0.0049895240030924425 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47381864623243936, + "acc_stderr": 0.017855434554041982, + "acc_norm": 0.47381864623243936, + "acc_norm_stderr": 0.017855434554041982 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.04049122041702506, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.04049122041702506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033582, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033582 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.03046365674734026, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.03046365674734026 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.030656748696739428, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.030656748696739428 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066475, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066475 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756653, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756653 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.039015918258361836, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.039015918258361836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607718, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607718 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680814, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680814 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437538, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437538 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2100558659217877, + "acc_stderr": 0.013623755371333519, + "acc_norm": 0.2100558659217877, + "acc_norm_stderr": 0.013623755371333519 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.0290294228156814, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.0290294228156814 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.03198761546763126, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.03198761546763126 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.03172295004332331, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.03172295004332331 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31877444589308995, + "acc_stderr": 0.011901895635786088, + "acc_norm": 0.31877444589308995, + "acc_norm_stderr": 0.011901895635786088 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777305, + "mc2": 0.4637619506541597, + "mc2_stderr": 0.015446438806039912 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45808736717827625, + "acc_stderr": 0.01712985211791114, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.017185069732676528 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCLLM/Mistral-7B-OP-over1k-grad0.3", + "model_sha": "4053a441cc7724e204d047f88c2b1646a1d6aad2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCLLM/Mistral-7B-OP-over1k-grad1.0/result_2023-10-25 09:13:00.json b/MNCLLM/Mistral-7B-OP-over1k-grad1.0/result_2023-10-25 09:13:00.json new file mode 100644 index 0000000000000000000000000000000000000000..4dce8f6eca5f1cad8d8b042f6bad95a8d51298f1 --- /dev/null +++ b/MNCLLM/Mistral-7B-OP-over1k-grad1.0/result_2023-10-25 09:13:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36177474402730375, + "acc_stderr": 0.014041957945038076, + "acc_norm": 0.41723549488054607, + "acc_norm_stderr": 0.014409825518403084 + }, + "harness|ko_hellaswag|10": { + "acc": 0.386476797450707, + "acc_stderr": 0.00485946798415526, + "acc_norm": 0.4965146385182235, + "acc_norm_stderr": 0.00498966018079217 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4661558109833972, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.4661558109833972, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789958, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789958 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993177, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.02529460802398648, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.02529460802398648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.021414757058175506, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.021414757058175506 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094593, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094593 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.033509916046960436, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.033509916046960436 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3116036505867014, + "acc_stderr": 0.011829039182849646, + "acc_norm": 0.3116036505867014, + "acc_norm_stderr": 0.011829039182849646 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384308, + "mc2": 0.4667125764870672, + "mc2_stderr": 0.015432249803510123 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4722550177095632, + "acc_stderr": 0.01716386797945601, + "acc_norm": 0.5277449822904369, + "acc_norm_stderr": 0.017163867979456016 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCLLM/Mistral-7B-OP-over1k-grad1.0", + "model_sha": "b03dd11e5e2e64d2c59bf37ab513947869606609", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCLLM/Mistral-7B-OP-over500-grad1.0/result_2023-10-25 09:14:48.json b/MNCLLM/Mistral-7B-OP-over500-grad1.0/result_2023-10-25 09:14:48.json new file mode 100644 index 0000000000000000000000000000000000000000..6367399fd75b13c15ed399c2a4cdbabab27a6000 --- /dev/null +++ b/MNCLLM/Mistral-7B-OP-over500-grad1.0/result_2023-10-25 09:14:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.371160409556314, + "acc_stderr": 0.014117971901142817, + "acc_norm": 0.4283276450511945, + "acc_norm_stderr": 0.014460496367599022 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37950607448715395, + "acc_stderr": 0.004842723234022034, + "acc_norm": 0.481876120294762, + "acc_norm_stderr": 0.00498650229693118 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4840357598978289, + "acc_stderr": 0.017870847506081717, + "acc_norm": 0.4840357598978289, + "acc_norm_stderr": 0.017870847506081717 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.41479099678456594, + "acc_stderr": 0.027982680459759556, + "acc_norm": 0.41479099678456594, + "acc_norm_stderr": 0.027982680459759556 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.024915243985987837, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.024915243985987837 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.030351527323344944, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.030351527323344944 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.030656748696739428, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.030656748696739428 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616255, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416908, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416908 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666633, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666633 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.0275860062216077, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.0275860062216077 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45321100917431195, + "acc_stderr": 0.021343255165546034, + "acc_norm": 0.45321100917431195, + "acc_norm_stderr": 0.021343255165546034 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.02824513402438729, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.02824513402438729 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.019886221037501872, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.019886221037501872 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590954, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590954 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828978, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828978 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898435, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898435 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3510204081632653, + "acc_stderr": 0.03055531675557364, + "acc_norm": 0.3510204081632653, + "acc_norm_stderr": 0.03055531675557364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32073011734028684, + "acc_stderr": 0.011921199991782629, + "acc_norm": 0.32073011734028684, + "acc_norm_stderr": 0.011921199991782629 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.01570210709062788, + "mc2": 0.46295306302174644, + "mc2_stderr": 0.015320970978421385 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4085005903187721, + "acc_stderr": 0.01690006287942712, + "acc_norm": 0.4793388429752066, + "acc_norm_stderr": 0.017175671279836442 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCLLM/Mistral-7B-OP-over500-grad1.0", + "model_sha": "f7789c5af9b3b166070a886207090228deccf9d6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MNCLLM/Mistral-7B-orca-platy-over1k/result_2023-10-25 06:43:45.json b/MNCLLM/Mistral-7B-orca-platy-over1k/result_2023-10-25 06:43:45.json new file mode 100644 index 0000000000000000000000000000000000000000..55c19cd09090d3dddcb47da2f37cb10f8544237a --- /dev/null +++ b/MNCLLM/Mistral-7B-orca-platy-over1k/result_2023-10-25 06:43:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36177474402730375, + "acc_stderr": 0.014041957945038076, + "acc_norm": 0.41723549488054607, + "acc_norm_stderr": 0.014409825518403084 + }, + "harness|ko_hellaswag|10": { + "acc": 0.386476797450707, + "acc_stderr": 0.00485946798415526, + "acc_norm": 0.4965146385182235, + "acc_norm_stderr": 0.00498966018079217 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4674329501915709, + "acc_stderr": 0.017841995750520857, + "acc_norm": 0.4674329501915709, + "acc_norm_stderr": 0.017841995750520857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789958, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789958 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993177, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.02529460802398648, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.02529460802398648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.021414757058175506, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.021414757058175506 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094593, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094593 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.033509916046960436, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.033509916046960436 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3116036505867014, + "acc_stderr": 0.011829039182849646, + "acc_norm": 0.3116036505867014, + "acc_norm_stderr": 0.011829039182849646 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384308, + "mc2": 0.4667008752277657, + "mc2_stderr": 0.015432114393165898 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4722550177095632, + "acc_stderr": 0.01716386797945601, + "acc_norm": 0.5277449822904369, + "acc_norm_stderr": 0.017163867979456016 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MNCLLM/Mistral-7B-orca-platy-over1k", + "model_sha": "65fda49b7459f17a98b8d1c5136001698f647919", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MRAIRR/MRAI_synatra_7B_v1/result_2023-11-24 16:23:24.json b/MRAIRR/MRAI_synatra_7B_v1/result_2023-11-24 16:23:24.json new file mode 100644 index 0000000000000000000000000000000000000000..e0f0e2e5c4709f13f564565fb92d37ad2c318e5b --- /dev/null +++ b/MRAIRR/MRAI_synatra_7B_v1/result_2023-11-24 16:23:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.01421244498065189, + "acc_norm": 0.4377133105802048, + "acc_norm_stderr": 0.014497573881108282 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3868751244771958, + "acc_stderr": 0.004860393011974675, + "acc_norm": 0.4931288587930691, + "acc_norm_stderr": 0.00498931022827612 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5312899106002554, + "acc_stderr": 0.01784491809046855, + "acc_norm": 0.5312899106002554, + "acc_norm_stderr": 0.01784491809046855 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707546, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707546 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.02493931390694077, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.02493931390694077 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.02818173972001942, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.02818173972001942 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688173, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688173 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524582, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524582 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.023973861998992072, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.023973861998992072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.02680372058320619, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.02680372058320619 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422708, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44220183486238535, + "acc_stderr": 0.021293613207520205, + "acc_norm": 0.44220183486238535, + "acc_norm_stderr": 0.021293613207520205 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528784, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528784 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849726, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849726 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.019524316744866342, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.019524316744866342 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.02755336616510137, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.02755336616510137 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553996, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553996 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.02866199620233531, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.02866199620233531 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.031996152328062855, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.031996152328062855 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.03210353032241268, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.03210353032241268 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3272490221642764, + "acc_stderr": 0.011983819806464754, + "acc_norm": 0.3272490221642764, + "acc_norm_stderr": 0.011983819806464754 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.45897203021636795, + "mc2_stderr": 0.015978279165358995 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41440377804014167, + "acc_stderr": 0.016936583383943642, + "acc_norm": 0.44155844155844154, + "acc_norm_stderr": 0.0170725258755631 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MRAIRR/MRAI_synatra_7B_v1", + "model_sha": "2232a0c5aaffdf526fffd3516ff28b7bf6679378", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MRAIRR/Navistral/result_2023-11-06 10:51:25.json b/MRAIRR/Navistral/result_2023-11-06 10:51:25.json new file mode 100644 index 0000000000000000000000000000000000000000..0cb8e571b7ad478d73826fb64b17d2837b502bea --- /dev/null +++ b/MRAIRR/Navistral/result_2023-11-06 10:51:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2721843003412969, + "acc_stderr": 0.013006600406423707, + "acc_norm": 0.31143344709897613, + "acc_norm_stderr": 0.013532472099850942 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3149770961959769, + "acc_stderr": 0.004635574339176323, + "acc_norm": 0.382194781915953, + "acc_norm_stderr": 0.004849306998727776 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.04944901092973781, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.04944901092973781 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4125159642401022, + "acc_stderr": 0.017604149108671936, + "acc_norm": 0.4125159642401022, + "acc_norm_stderr": 0.017604149108671936 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029319, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029319 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461227, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461227 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419871, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419871 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38064516129032255, + "acc_stderr": 0.02762171783290704, + "acc_norm": 0.38064516129032255, + "acc_norm_stderr": 0.02762171783290704 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.03050329201334259, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.03050329201334259 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983056, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.02656417811142263, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.02656417811142263 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38580246913580246, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.38580246913580246, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39378238341968913, + "acc_stderr": 0.03526077095548237, + "acc_norm": 0.39378238341968913, + "acc_norm_stderr": 0.03526077095548237 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3779816513761468, + "acc_stderr": 0.020789187066728117, + "acc_norm": 0.3779816513761468, + "acc_norm_stderr": 0.020789187066728117 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.037827289808654706, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.037827289808654706 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.019023726160724553, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.019023726160724553 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.015131608849963745, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.015131608849963745 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714864, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714864 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669276, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669276 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31681877444589307, + "acc_stderr": 0.011882349954723008, + "acc_norm": 0.31681877444589307, + "acc_norm_stderr": 0.011882349954723008 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.03343311240488419, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.03343311240488419 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.0372820699868265, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.0372820699868265 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.4540203721938441, + "mc2_stderr": 0.015668476056429896 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3990554899645809, + "acc_stderr": 0.016836377292849296, + "acc_norm": 0.4427390791027155, + "acc_norm_stderr": 0.017077254131556217 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MRAIRR/Navistral", + "model_sha": "591fda7ce94712932e454509cf3ea4c24d9dd619", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MRAIRR/Nextstage/result_2023-11-01 03:26:01.json b/MRAIRR/Nextstage/result_2023-11-01 03:26:01.json new file mode 100644 index 0000000000000000000000000000000000000000..363f76a4c65156bd9da909fea2db74503f09ca57 --- /dev/null +++ b/MRAIRR/Nextstage/result_2023-11-01 03:26:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2773037542662116, + "acc_stderr": 0.013082095839059374, + "acc_norm": 0.32593856655290104, + "acc_norm_stderr": 0.013697432466693246 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3405696076478789, + "acc_stderr": 0.004729322613301549, + "acc_norm": 0.4224258115913165, + "acc_norm_stderr": 0.004929361040558251 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.03771283107626545, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.03771283107626545 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44316730523627074, + "acc_stderr": 0.017764085035348386, + "acc_norm": 0.44316730523627074, + "acc_norm_stderr": 0.017764085035348386 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596239, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596239 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.028173917761762875, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.028173917761762875 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.46464646464646464, + "acc_stderr": 0.035534363688280626, + "acc_norm": 0.46464646464646464, + "acc_norm_stderr": 0.035534363688280626 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.025174048384000766, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.025174048384000766 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.027906150826041143, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.027906150826041143 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.029996951858349465, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.029996951858349465 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3849056603773585, + "acc_stderr": 0.029946498567699948, + "acc_norm": 0.3849056603773585, + "acc_norm_stderr": 0.029946498567699948 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670238, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670238 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02831753349606647, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02831753349606647 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602842, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.026803720583206184, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.026803720583206184 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.039015918258361836, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.039015918258361836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38580246913580246, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.38580246913580246, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.03561587327685884, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.03561587327685884 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44403669724770645, + "acc_stderr": 0.021302621211654525, + "acc_norm": 0.44403669724770645, + "acc_norm_stderr": 0.021302621211654525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557836, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557836 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033522, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033522 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.018926082916083393, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.018926082916083393 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590954, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590954 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252336, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252336 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.01453033020146864, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.01453033020146864 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.03189141832421396, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.03189141832421396 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5021097046413502, + "acc_stderr": 0.03254693801802008, + "acc_norm": 0.5021097046413502, + "acc_norm_stderr": 0.03254693801802008 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3011734028683181, + "acc_stderr": 0.011717148751648431, + "acc_norm": 0.3011734028683181, + "acc_norm_stderr": 0.011717148751648431 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.033744993563193555, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.033744993563193555 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03815494308688929, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03815494308688929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834562, + "mc2": 0.42637566603576926, + "mc2_stderr": 0.015537081390223764 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.35537190082644626, + "acc_stderr": 0.01645549600031453, + "acc_norm": 0.4002361275088548, + "acc_norm_stderr": 0.016844693510505052 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MRAIRR/Nextstage", + "model_sha": "9457f0fd266dc20b3808e56fc81d9242d2a9486a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MRAIRR/minillama3_8b/result_2024-07-08 07:22:22.json b/MRAIRR/minillama3_8b/result_2024-07-08 07:22:22.json new file mode 100644 index 0000000000000000000000000000000000000000..c95800b9b4d259144190cc0f5d1a554f179f3843 --- /dev/null +++ b/MRAIRR/minillama3_8b/result_2024-07-08 07:22:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34726962457337884, + "acc_stderr": 0.013913034529620442, + "acc_norm": 0.39761092150170646, + "acc_norm_stderr": 0.014301752223279531 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37094204341764586, + "acc_stderr": 0.004820697457420418, + "acc_norm": 0.4618601872137024, + "acc_norm_stderr": 0.004975243508752005 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3831417624521073, + "acc_stderr": 0.017384774194885638, + "acc_norm": 0.3831417624521073, + "acc_norm_stderr": 0.017384774194885638 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742401, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742401 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.03115852213135778, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.03115852213135778 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.0362933532994786, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.0362933532994786 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42443729903536975, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.42443729903536975, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.32286995515695066, + "acc_stderr": 0.031381476375754995, + "acc_norm": 0.32286995515695066, + "acc_norm_stderr": 0.031381476375754995 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978813, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978813 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4064516129032258, + "acc_stderr": 0.02794172734625631, + "acc_norm": 0.4064516129032258, + "acc_norm_stderr": 0.02794172734625631 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5299145299145299, + "acc_stderr": 0.032697411068124425, + "acc_norm": 0.5299145299145299, + "acc_norm_stderr": 0.032697411068124425 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.03530235517334682, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.03530235517334682 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.023135287974325635, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.023135287974325635 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.37283236994219654, + "acc_stderr": 0.02603389061357628, + "acc_norm": 0.37283236994219654, + "acc_norm_stderr": 0.02603389061357628 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899616, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899616 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194045, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40414507772020725, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.40414507772020725, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46422018348623856, + "acc_stderr": 0.021382364775701906, + "acc_norm": 0.46422018348623856, + "acc_norm_stderr": 0.021382364775701906 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238126, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791434, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.045641987674327526, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.045641987674327526 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32189542483660133, + "acc_stderr": 0.018901015322093085, + "acc_norm": 0.32189542483660133, + "acc_norm_stderr": 0.018901015322093085 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.027374128882631153, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.027374128882631153 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.04327040932578732, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.04327040932578732 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.015024083883322884, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.015024083883322884 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.02981263070156974, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.02981263070156974 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175364, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4767932489451477, + "acc_stderr": 0.03251215201141018, + "acc_norm": 0.4767932489451477, + "acc_norm_stderr": 0.03251215201141018 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271827, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271827 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.015945068581236614, + "mc2": 0.4940603787439581, + "mc2_stderr": 0.01599824397692502 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3884297520661157, + "acc_stderr": 0.016756921571069415, + "acc_norm": 0.4309327036599764, + "acc_norm_stderr": 0.017025558196043136 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MRAIRR/minillama3_8b", + "model_sha": "61b8f76174e6270f4b63b29dff15b76d5766c15e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MRAIRR/minillama3_8b_all/result_2024-07-10 06:55:48.json b/MRAIRR/minillama3_8b_all/result_2024-07-10 06:55:48.json new file mode 100644 index 0000000000000000000000000000000000000000..2503d81ee0d8f4891409e7cdd6ddcead9642f369 --- /dev/null +++ b/MRAIRR/minillama3_8b_all/result_2024-07-10 06:55:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3506825938566553, + "acc_stderr": 0.013944635930726092, + "acc_norm": 0.4052901023890785, + "acc_norm_stderr": 0.014346869060229321 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37094204341764586, + "acc_stderr": 0.004820697457420416, + "acc_norm": 0.4684325831507668, + "acc_norm_stderr": 0.004979826829400768 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.0486577757041077, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.0486577757041077 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3959131545338442, + "acc_stderr": 0.017488247006979284, + "acc_norm": 0.3959131545338442, + "acc_norm_stderr": 0.017488247006979284 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742401, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742401 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42765273311897106, + "acc_stderr": 0.028099240775809574, + "acc_norm": 0.42765273311897106, + "acc_norm_stderr": 0.028099240775809574 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.030769352008229143, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.030769352008229143 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4129032258064516, + "acc_stderr": 0.028009138125400387, + "acc_norm": 0.4129032258064516, + "acc_norm_stderr": 0.028009138125400387 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5299145299145299, + "acc_stderr": 0.032697411068124425, + "acc_norm": 0.5299145299145299, + "acc_norm_stderr": 0.032697411068124425 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505416, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505416 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.02475747390275206, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.02475747390275206 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470867, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470867 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362227, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362227 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.038932596106046755, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.038932596106046755 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.045641987674327526, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.045641987674327526 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.04065771002562603, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.04065771002562603 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.018492596536396955, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.018492596536396955 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880596, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880596 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.042032772914677614, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.042032772914677614 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608044, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28156424581005585, + "acc_stderr": 0.015042290171866113, + "acc_norm": 0.28156424581005585, + "acc_norm_stderr": 0.015042290171866113 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.03175195237583322, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.03175195237583322 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.031299208255302136, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.031299208255302136 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28552803129074317, + "acc_stderr": 0.011535751586665671, + "acc_norm": 0.28552803129074317, + "acc_norm_stderr": 0.011535751586665671 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31701346389228885, + "mc1_stderr": 0.016289203374403382, + "mc2": 0.5028209235842843, + "mc2_stderr": 0.016058600400881456 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4014167650531287, + "acc_stderr": 0.01685290785872906, + "acc_norm": 0.4344746162927981, + "acc_norm_stderr": 0.017042098620824935 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MRAIRR/minillama3_8b_all", + "model_sha": "418a1179bb2bf3cc9dcb52987abef3147c2bb7b5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MTSAIR/multi_verse_model/result_2024-05-25 05:36:59.json b/MTSAIR/multi_verse_model/result_2024-05-25 05:36:59.json new file mode 100644 index 0000000000000000000000000000000000000000..2dec886d09fe82b642470562feca98f437f8bad5 --- /dev/null +++ b/MTSAIR/multi_verse_model/result_2024-05-25 05:36:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3924914675767918, + "acc_stderr": 0.014269634635670709, + "acc_norm": 0.4564846416382253, + "acc_norm_stderr": 0.01455594976049644 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39563831905994823, + "acc_stderr": 0.004879880092103962, + "acc_norm": 0.525592511451902, + "acc_norm_stderr": 0.004983240744101385 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45977011494252873, + "acc_stderr": 0.01782199409693353, + "acc_norm": 0.45977011494252873, + "acc_norm_stderr": 0.01782199409693353 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40836012861736337, + "acc_stderr": 0.027917050748484624, + "acc_norm": 0.40836012861736337, + "acc_norm_stderr": 0.027917050748484624 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.040233822736177476, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.040233822736177476 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954963, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954963 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502737, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137595, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137595 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05000000000000001, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05000000000000001 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.036080032255696545, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.036080032255696545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5064220183486239, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.5064220183486239, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.044359328928514664, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.044359328928514664 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528784, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528784 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762637, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762637 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.01502408388332288, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.01502408388332288 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.028418208619406794, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.028418208619406794 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585895, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585895 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.42472460220318237, + "mc1_stderr": 0.017304000957167484, + "mc2": 0.5960114176047056, + "mc2_stderr": 0.016280327491614625 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4214876033057851, + "acc_stderr": 0.01697710193260152, + "acc_norm": 0.42739079102715466, + "acc_norm_stderr": 0.017008129844823156 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MTSAIR/multi_verse_model", + "model_sha": "a4ca706d1bbc263b95e223a80ad68b0f125840b3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Magpie-Align/Llama-3-8B-Magpie-Align-SFT-v0.3/result_2024-08-06 18:04:58.json b/Magpie-Align/Llama-3-8B-Magpie-Align-SFT-v0.3/result_2024-08-06 18:04:58.json new file mode 100644 index 0000000000000000000000000000000000000000..cc4a33a7db8900a9abec5376bb0364e34f83609a --- /dev/null +++ b/Magpie-Align/Llama-3-8B-Magpie-Align-SFT-v0.3/result_2024-08-06 18:04:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38993174061433444, + "acc_stderr": 0.014252959848892896, + "acc_norm": 0.44795221843003413, + "acc_norm_stderr": 0.01453201149821167 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3886675960963951, + "acc_stderr": 0.0048645132621943045, + "acc_norm": 0.5192192790280821, + "acc_norm_stderr": 0.004986093791041661 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280041, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280041 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5338441890166028, + "acc_stderr": 0.017838956009136805, + "acc_norm": 0.5338441890166028, + "acc_norm_stderr": 0.017838956009136805 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.032600385118357715, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.032600385118357715 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.03394853965156402, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.03394853965156402 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.03225294232399639, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.03225294232399639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.025317649726448673, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.025317649726448673 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.0343046241610387, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.0343046241610387 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5290322580645161, + "acc_stderr": 0.028396016402760998, + "acc_norm": 0.5290322580645161, + "acc_norm_stderr": 0.028396016402760998 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.03077265364207567, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.03077265364207567 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.029958249250082114, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.029958249250082114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.4105960264900662, + "acc_stderr": 0.04016689594849929, + "acc_norm": 0.4105960264900662, + "acc_norm_stderr": 0.04016689594849929 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332783, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542125, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542125 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.02677299065336182, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.02677299065336182 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6321243523316062, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.6321243523316062, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070434, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070434 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5853211009174312, + "acc_stderr": 0.021122903208602585, + "acc_norm": 0.5853211009174312, + "acc_norm_stderr": 0.021122903208602585 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5359477124183006, + "acc_stderr": 0.02855582751652878, + "acc_norm": 0.5359477124183006, + "acc_norm_stderr": 0.02855582751652878 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.0200176292142131, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.0200176292142131 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.49107142857142855, + "acc_stderr": 0.04745033255489123, + "acc_norm": 0.49107142857142855, + "acc_norm_stderr": 0.04745033255489123 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.03406315360711507, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.03406315360711507 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3206703910614525, + "acc_stderr": 0.01560992955934841, + "acc_norm": 0.3206703910614525, + "acc_norm_stderr": 0.01560992955934841 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252609, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252609 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6571428571428571, + "acc_stderr": 0.03038726291954772, + "acc_norm": 0.6571428571428571, + "acc_norm_stderr": 0.03038726291954772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3650586701434159, + "acc_stderr": 0.012296373743443475, + "acc_norm": 0.3650586701434159, + "acc_norm_stderr": 0.012296373743443475 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5931372549019608, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.5931372549019608, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.037563357751878954, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.037563357751878954 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.016002651487361005, + "mc2": 0.47580795839719275, + "mc2_stderr": 0.015384573264612632 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4604486422668241, + "acc_stderr": 0.017136487626049846, + "acc_norm": 0.5548996458087367, + "acc_norm_stderr": 0.017086417431005467 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Magpie-Align/Llama-3-8B-Magpie-Align-SFT-v0.3", + "model_sha": "d2578eb754d1c20efe604749296580f680950917", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MarkrAI/DopeorNope-Maestro-v1-13B/result_2023-11-29 14:33:57.json b/MarkrAI/DopeorNope-Maestro-v1-13B/result_2023-11-29 14:33:57.json new file mode 100644 index 0000000000000000000000000000000000000000..18bbad2da3f7f0add930b9d6845c05edbeaa863f --- /dev/null +++ b/MarkrAI/DopeorNope-Maestro-v1-13B/result_2023-11-29 14:33:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42406143344709896, + "acc_stderr": 0.0144418896274644, + "acc_norm": 0.4761092150170648, + "acc_norm_stderr": 0.014594701798071655 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4298944433379805, + "acc_stderr": 0.00494049050824065, + "acc_norm": 0.5765783708424617, + "acc_norm_stderr": 0.00493091151508479 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.558109833971903, + "acc_stderr": 0.01775880053421441, + "acc_norm": 0.558109833971903, + "acc_norm_stderr": 0.01775880053421441 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789958, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789958 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836918, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836918 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245403, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245403 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389188, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389188 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230182, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230182 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523846, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523846 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.618348623853211, + "acc_stderr": 0.02082814851702261, + "acc_norm": 0.618348623853211, + "acc_norm_stderr": 0.02082814851702261 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024106, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024106 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.03000856284500347, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.03000856284500347 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3709256844850065, + "acc_stderr": 0.012337391684530312, + "acc_norm": 0.3709256844850065, + "acc_norm_stderr": 0.012337391684530312 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882466, + "mc2": 0.46630606087545945, + "mc2_stderr": 0.015285468253345829 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4651711924439197, + "acc_stderr": 0.017148598015747422, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.01711541822522687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MarkrAI/DopeorNope-Maestro-v1-13B", + "model_sha": "c68224b35bf9eb9c2f55270489870f5a6451871f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MarkrAI/DopeorNope-maestro-v1-DPO-13b/result_2023-11-29 17:12:08.json b/MarkrAI/DopeorNope-maestro-v1-DPO-13b/result_2023-11-29 17:12:08.json new file mode 100644 index 0000000000000000000000000000000000000000..9c97b67b7da0d3e6b1f0994c8af5cd7d072f1c13 --- /dev/null +++ b/MarkrAI/DopeorNope-maestro-v1-DPO-13b/result_2023-11-29 17:12:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42406143344709896, + "acc_stderr": 0.0144418896274644, + "acc_norm": 0.47696245733788395, + "acc_norm_stderr": 0.014595873205358273 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4298944433379805, + "acc_stderr": 0.00494049050824065, + "acc_norm": 0.5764787890858395, + "acc_norm_stderr": 0.0049310654341736876 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.558109833971903, + "acc_stderr": 0.01775880053421441, + "acc_norm": 0.558109833971903, + "acc_norm_stderr": 0.01775880053421441 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.02834378725054064, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.02834378725054064 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245403, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245403 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389188, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389188 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523846, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523846 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.618348623853211, + "acc_stderr": 0.02082814851702261, + "acc_norm": 0.618348623853211, + "acc_norm_stderr": 0.02082814851702261 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024106, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024106 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.03000856284500347, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.03000856284500347 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37027379400260757, + "acc_stderr": 0.012332930781256723, + "acc_norm": 0.37027379400260757, + "acc_norm_stderr": 0.012332930781256723 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882466, + "mc2": 0.4663124402531116, + "mc2_stderr": 0.015286600923431525 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4651711924439197, + "acc_stderr": 0.017148598015747422, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.01711541822522687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MarkrAI/DopeorNope-maestro-v1-DPO-13b", + "model_sha": "c7a7352cb9fa7e2f84385eef0d84de34a0417129", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MarkrAI/DopeorNope-maestro-v1.1-DPO-13b/result_2023-11-29 22:34:39.json b/MarkrAI/DopeorNope-maestro-v1.1-DPO-13b/result_2023-11-29 22:34:39.json new file mode 100644 index 0000000000000000000000000000000000000000..8b0a1dce04f3489c631de6cb3c518bcab21abb75 --- /dev/null +++ b/MarkrAI/DopeorNope-maestro-v1.1-DPO-13b/result_2023-11-29 22:34:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42406143344709896, + "acc_stderr": 0.0144418896274644, + "acc_norm": 0.4761092150170648, + "acc_norm_stderr": 0.014594701798071655 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4298944433379805, + "acc_stderr": 0.00494049050824065, + "acc_norm": 0.5763792073292173, + "acc_norm_stderr": 0.004931219148182245 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.558109833971903, + "acc_stderr": 0.01775880053421441, + "acc_norm": 0.558109833971903, + "acc_norm_stderr": 0.01775880053421441 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986483, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836918, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836918 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245403, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245403 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523846, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523846 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.618348623853211, + "acc_stderr": 0.02082814851702261, + "acc_norm": 0.618348623853211, + "acc_norm_stderr": 0.02082814851702261 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024106, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024106 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.03000856284500347, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.03000856284500347 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36897001303780963, + "acc_stderr": 0.012323936650174859, + "acc_norm": 0.36897001303780963, + "acc_norm_stderr": 0.012323936650174859 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882466, + "mc2": 0.46627654282840275, + "mc2_stderr": 0.015286096744214328 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4651711924439197, + "acc_stderr": 0.017148598015747422, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.01711541822522687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MarkrAI/DopeorNope-maestro-v1.1-DPO-13b", + "model_sha": "8dc70bf0ccd7914ca6ebbe2e661f783e69172b95", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MarkrAI/DopeorNope-maestro-v2-DPO-13b/result_2023-11-30 07:24:14.json b/MarkrAI/DopeorNope-maestro-v2-DPO-13b/result_2023-11-30 07:24:14.json new file mode 100644 index 0000000000000000000000000000000000000000..b1113b6bee8734dab7c3a0f960c03bd48308f70b --- /dev/null +++ b/MarkrAI/DopeorNope-maestro-v2-DPO-13b/result_2023-11-30 07:24:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910467, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955264 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42103166699860584, + "acc_stderr": 0.00492715588259819, + "acc_norm": 0.5669189404501095, + "acc_norm_stderr": 0.004944889545497954 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5236270753512133, + "acc_stderr": 0.017859989765176453, + "acc_norm": 0.5236270753512133, + "acc_norm_stderr": 0.017859989765176453 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938152, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938152 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736413, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736413 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.02286083830923207, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.02286083830923207 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422704, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422704 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.02141822475426464, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.02141822475426464 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.01961085147488029, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.01961085147488029 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.02896370257079105, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.02896370257079105 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2926988265971317, + "acc_stderr": 0.011620949195849531, + "acc_norm": 0.2926988265971317, + "acc_norm_stderr": 0.011620949195849531 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.01557284045287583, + "mc2": 0.42260797250546406, + "mc2_stderr": 0.014810055872742214 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5324675324675324, + "acc_stderr": 0.017154073716682868, + "acc_norm": 0.6162927981109799, + "acc_norm_stderr": 0.016718924637231826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MarkrAI/DopeorNope-maestro-v2-DPO-13b", + "model_sha": "e6aeb2886265d8bdbfda9460926c63b991694ce9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MarkrAI/DopeorNope-maestro-v3-DPO-13b/result_2023-11-30 08:14:03.json b/MarkrAI/DopeorNope-maestro-v3-DPO-13b/result_2023-11-30 08:14:03.json new file mode 100644 index 0000000000000000000000000000000000000000..9717e0963a8d938d5ce4b24a436b894926947893 --- /dev/null +++ b/MarkrAI/DopeorNope-maestro-v3-DPO-13b/result_2023-11-30 08:14:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3873720136518771, + "acc_stderr": 0.014235872487909865, + "acc_norm": 0.44368600682593856, + "acc_norm_stderr": 0.014518421825670447 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42093208524198367, + "acc_stderr": 0.004926996830194231, + "acc_norm": 0.5696076478789086, + "acc_norm_stderr": 0.0049411916073179105 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5440613026819924, + "acc_stderr": 0.01781040392543535, + "acc_norm": 0.5440613026819924, + "acc_norm_stderr": 0.01781040392543535 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977978, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977978 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866767, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866767 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.02512465352588513, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.02512465352588513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653315, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.03158539157745637, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.03158539157745637 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.0305032920133426, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.0305032920133426 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.040894654493255835, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.040894654493255835 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.021311335009708575, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.021311335009708575 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.01969145905235415, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.01969145905235415 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.02718712701150381, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.02718712701150381 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.029624663581159696, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.029624663581159696 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.333116036505867, + "acc_stderr": 0.012037930451512052, + "acc_norm": 0.333116036505867, + "acc_norm_stderr": 0.012037930451512052 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237024, + "mc2": 0.4173314540045968, + "mc2_stderr": 0.014766350516789333 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4769775678866588, + "acc_stderr": 0.017172121546727637, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.017122829143292658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MarkrAI/DopeorNope-maestro-v3-DPO-13b", + "model_sha": "2b13d8118774db16fd5c520866865674899f3240", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MarkrAI/DopeorNope-maestro-v4-DPO-13b/result_2023-11-30 12:53:00.json b/MarkrAI/DopeorNope-maestro-v4-DPO-13b/result_2023-11-30 12:53:00.json new file mode 100644 index 0000000000000000000000000000000000000000..b0a863d7da4647917440ad45337dbab29775a839 --- /dev/null +++ b/MarkrAI/DopeorNope-maestro-v4-DPO-13b/result_2023-11-30 12:53:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4035836177474403, + "acc_stderr": 0.014337158914268436, + "acc_norm": 0.47013651877133106, + "acc_norm_stderr": 0.014585305840007109 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4304919338777136, + "acc_stderr": 0.004941331215598551, + "acc_norm": 0.5734913363871739, + "acc_norm_stderr": 0.004935587729948866 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.017850410794380173, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.017850410794380173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.02832032583010592, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.02832032583010592 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.025069094387296546, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.025069094387296546 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561953, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561953 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.03058805297427065, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.03058805297427065 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432564, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432564 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03981240543717861, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03981240543717861 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347666, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347666 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5431192660550459, + "acc_stderr": 0.02135745878522622, + "acc_norm": 0.5431192660550459, + "acc_norm_stderr": 0.02135745878522622 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510468008, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510468008 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215923, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215923 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650144, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650144 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.0318421386668758, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.0318421386668758 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3500651890482399, + "acc_stderr": 0.012182552313215179, + "acc_norm": 0.3500651890482399, + "acc_norm_stderr": 0.012182552313215179 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.01594506858123661, + "mc2": 0.46546644792049385, + "mc2_stderr": 0.015216431408315606 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.512396694214876, + "acc_stderr": 0.01718506973267653, + "acc_norm": 0.58913813459268, + "acc_norm_stderr": 0.01691497276784105 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MarkrAI/DopeorNope-maestro-v4-DPO-13b", + "model_sha": "3e95480ac846fcbcfa758b1a1f3d54230a810c47", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MarkrAI/kyujin-CoTy-platypus-ko-12.8b/result_2023-10-03 18:47:45.json b/MarkrAI/kyujin-CoTy-platypus-ko-12.8b/result_2023-10-03 18:47:45.json new file mode 100644 index 0000000000000000000000000000000000000000..8825c9be8b614073fcfc50d046d956b63dec3468 --- /dev/null +++ b/MarkrAI/kyujin-CoTy-platypus-ko-12.8b/result_2023-10-03 18:47:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28668941979522183, + "acc_stderr": 0.013214986329274755, + "acc_norm": 0.34982935153583616, + "acc_norm_stderr": 0.013936809212158287 + }, + "harness|ko_hellaswag|10": { + "acc": 0.383788090021908, + "acc_stderr": 0.004853134271547759, + "acc_norm": 0.4911372236606254, + "acc_norm_stderr": 0.004988997467134492 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26309067688378035, + "acc_stderr": 0.01574549716904906, + "acc_norm": 0.26309067688378035, + "acc_norm_stderr": 0.01574549716904906 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.19574468085106383, + "acc_stderr": 0.025937853139977148, + "acc_norm": 0.19574468085106383, + "acc_norm_stderr": 0.025937853139977148 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.031069390260789413, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.031069390260789413 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3086816720257235, + "acc_stderr": 0.026236965881153266, + "acc_norm": 0.3086816720257235, + "acc_norm_stderr": 0.026236965881153266 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19730941704035873, + "acc_stderr": 0.02670985334496796, + "acc_norm": 0.19730941704035873, + "acc_norm_stderr": 0.02670985334496796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.031156269519646836, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.031156269519646836 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678242, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678242 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239952, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.026749899771241238, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.026749899771241238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.03076944496729602, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.03076944496729602 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708607, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708607 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28034682080924855, + "acc_stderr": 0.024182427496577612, + "acc_norm": 0.28034682080924855, + "acc_norm_stderr": 0.024182427496577612 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25871559633027524, + "acc_stderr": 0.01877605231961962, + "acc_norm": 0.25871559633027524, + "acc_norm_stderr": 0.01877605231961962 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1349206349206349, + "acc_stderr": 0.030557101589417508, + "acc_norm": 0.1349206349206349, + "acc_norm_stderr": 0.030557101589417508 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.025261691219729484, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.025261691219729484 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.03761070869867479, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.03761070869867479 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.018152871051538812, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.018152871051538812 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984301, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984301 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.028139689444859676, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.028139689444859676 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859924, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859924 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23161764705882354, + "acc_stderr": 0.025626533803777562, + "acc_norm": 0.23161764705882354, + "acc_norm_stderr": 0.025626533803777562 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26792698826597133, + "acc_stderr": 0.011311347690633872, + "acc_norm": 0.26792698826597133, + "acc_norm_stderr": 0.011311347690633872 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.03077855467869326, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.03077855467869326 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2252141982864137, + "mc1_stderr": 0.014623240768023479, + "mc2": 0.3758708542635285, + "mc2_stderr": 0.014474804257008467 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2892561983471074, + "acc_stderr": 0.015588800386053557, + "acc_norm": 0.3152302243211334, + "acc_norm_stderr": 0.01597353492379448 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MarkrAI/kyujin-CoTy-platypus-ko-12.8b", + "model_sha": "f37be07af5a262fa9c37c33dcbff3b8702eac9a4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MarkrAI/kyujin-Poly-platypus-ko-12.8b/result_2023-10-02 06:47:06.json b/MarkrAI/kyujin-Poly-platypus-ko-12.8b/result_2023-10-02 06:47:06.json new file mode 100644 index 0000000000000000000000000000000000000000..15c303246ea168a0e80dd39d2bb16c65b0aea360 --- /dev/null +++ b/MarkrAI/kyujin-Poly-platypus-ko-12.8b/result_2023-10-02 06:47:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30887372013651876, + "acc_stderr": 0.013501770929344003, + "acc_norm": 0.3515358361774744, + "acc_norm_stderr": 0.013952413699600933 + }, + "harness|ko_hellaswag|10": { + "acc": 0.391256721768572, + "acc_stderr": 0.004870342592915049, + "acc_norm": 0.5038836885082653, + "acc_norm_stderr": 0.004989630887066195 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.034678266857038245, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.034678266857038245 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26309067688378035, + "acc_stderr": 0.015745497169049057, + "acc_norm": 0.26309067688378035, + "acc_norm_stderr": 0.015745497169049057 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838752, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838752 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.027157150479563824, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.027157150479563824 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596916, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596916 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21025641025641026, + "acc_stderr": 0.020660597485026928, + "acc_norm": 0.21025641025641026, + "acc_norm_stderr": 0.020660597485026928 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678242, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678242 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.025604233470899098, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.025604233470899098 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.03096590312357301, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.03096590312357301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483098, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483098 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906865, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906865 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.02454761779480383, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.02454761779480383 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22752293577981653, + "acc_stderr": 0.0179744635787765, + "acc_norm": 0.22752293577981653, + "acc_norm_stderr": 0.0179744635787765 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276863, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276863 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.025261691219729487, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.025261691219729487 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.03761070869867479, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.03761070869867479 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.028139689444859676, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.028139689444859676 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.022571771025494757, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.022571771025494757 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29957805907172996, + "acc_stderr": 0.0298180247497531, + "acc_norm": 0.29957805907172996, + "acc_norm_stderr": 0.0298180247497531 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25945241199478486, + "acc_stderr": 0.011195262076350309, + "acc_norm": 0.25945241199478486, + "acc_norm_stderr": 0.011195262076350309 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624337, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624337 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23011015911872704, + "mc1_stderr": 0.01473455795980776, + "mc2": 0.38739814063055383, + "mc2_stderr": 0.01474443864761987 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2939787485242031, + "acc_stderr": 0.015663242569091115, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.016272952997019124 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MarkrAI/kyujin-Poly-platypus-ko-12.8b", + "model_sha": "cc48d722e28e785ef32b05f4ef0246df177af942", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MaziyarPanahi/Calme-4x7B-MoE-v0.1/result_2024-06-16 23:34:11.json b/MaziyarPanahi/Calme-4x7B-MoE-v0.1/result_2024-06-16 23:34:11.json new file mode 100644 index 0000000000000000000000000000000000000000..8b6edcca75ae4e05766b9d1d88dbe300192968fb --- /dev/null +++ b/MaziyarPanahi/Calme-4x7B-MoE-v0.1/result_2024-06-16 23:34:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3779863481228669, + "acc_stderr": 0.014169664520303101, + "acc_norm": 0.4377133105802048, + "acc_norm_stderr": 0.01449757388110828 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39613622784305913, + "acc_stderr": 0.004880937933163285, + "acc_norm": 0.5210117506472814, + "acc_norm_stderr": 0.004985373550775107 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4559386973180077, + "acc_stderr": 0.01781040392543537, + "acc_norm": 0.4559386973180077, + "acc_norm_stderr": 0.01781040392543537 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.025254485424799605, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.025254485424799605 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.03067609659938917, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.03067609659938917 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.036030385453603826, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.036030385453603826 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.037657466938651504, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.037657466938651504 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.024870815251057093, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.024870815251057093 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.02690290045866664, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.02690290045866664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539277, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539277 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5082568807339449, + "acc_stderr": 0.021434399918214338, + "acc_norm": 0.5082568807339449, + "acc_norm_stderr": 0.021434399918214338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142638, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142638 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849726, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849726 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.019886221037501862, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.019886221037501862 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095268, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095268 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.02858270975389843, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.02858270975389843 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687578, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3396349413298566, + "acc_stderr": 0.012095592506931969, + "acc_norm": 0.3396349413298566, + "acc_norm_stderr": 0.012095592506931969 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.40269277845777235, + "mc1_stderr": 0.017168830935187226, + "mc2": 0.56994352466602, + "mc2_stderr": 0.01638269401679135 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41086186540731995, + "acc_stderr": 0.016914972767841062, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.016977101932601525 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MaziyarPanahi/Calme-4x7B-MoE-v0.1", + "model_sha": "e2fab90eef37977002947684043f139a1660f519", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MaziyarPanahi/Calme-4x7B-MoE-v0.2/result_2024-07-17 20:17:34.json b/MaziyarPanahi/Calme-4x7B-MoE-v0.2/result_2024-07-17 20:17:34.json new file mode 100644 index 0000000000000000000000000000000000000000..00fc769a9d29ae2c86d264b7b3af9545560701e3 --- /dev/null +++ b/MaziyarPanahi/Calme-4x7B-MoE-v0.2/result_2024-07-17 20:17:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.014175915490000322, + "acc_norm": 0.4351535836177474, + "acc_norm_stderr": 0.014487986197186045 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3951404102768373, + "acc_stderr": 0.004878816961012046, + "acc_norm": 0.5198167695678152, + "acc_norm_stderr": 0.004985860853427639 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45849297573435505, + "acc_stderr": 0.017818248603465554, + "acc_norm": 0.45849297573435505, + "acc_norm_stderr": 0.017818248603465554 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996793, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996793 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.02527589207024063, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.02527589207024063 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.02860595370200425, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.02860595370200425 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556538, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556538 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524586, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524586 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.026907849856282532, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.026907849856282532 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376543, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376543 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142635, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142635 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061177, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347247, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347247 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220504, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220504 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.02850145286039657, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.02850145286039657 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687578, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34485006518904826, + "acc_stderr": 0.012139881006287061, + "acc_norm": 0.34485006518904826, + "acc_norm_stderr": 0.012139881006287061 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.41003671970624234, + "mc1_stderr": 0.017217844717449318, + "mc2": 0.5669240615729016, + "mc2_stderr": 0.016376341109227546 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41204250295159384, + "acc_stderr": 0.01692227673852836, + "acc_norm": 0.42266824085005905, + "acc_norm_stderr": 0.0169835060795776 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MaziyarPanahi/Calme-4x7B-MoE-v0.2", + "model_sha": "ffef41baf94b3f88b30cf0aeb3fd72d9e4187161", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MaziyarPanahi/Llama-3-8B-Instruct-v0.10/result_2024-08-04 00:41:24.json b/MaziyarPanahi/Llama-3-8B-Instruct-v0.10/result_2024-08-04 00:41:24.json new file mode 100644 index 0000000000000000000000000000000000000000..ca1bbe6667191beeaf1beae776d11a6d78085616 --- /dev/null +++ b/MaziyarPanahi/Llama-3-8B-Instruct-v0.10/result_2024-08-04 00:41:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4462457337883959, + "acc_stderr": 0.014526705548539983, + "acc_norm": 0.5042662116040956, + "acc_norm_stderr": 0.014610858923956952 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3884684325831508, + "acc_stderr": 0.004864058877626285, + "acc_norm": 0.5145389364668392, + "acc_norm_stderr": 0.004987671478640939 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468544, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468544 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5884244372990354, + "acc_stderr": 0.027950481494401273, + "acc_norm": 0.5884244372990354, + "acc_norm_stderr": 0.027950481494401273 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.035607165165310595, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.035607165165310595 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.048971049527263666, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.048971049527263666 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5307692307692308, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.5307692307692308, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5451612903225806, + "acc_stderr": 0.028327743091561074, + "acc_norm": 0.5451612903225806, + "acc_norm_stderr": 0.028327743091561074 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.02891120880274947, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.02891120880274947 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.02956070739246571, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.02956070739246571 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.039580272311215706, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.039580272311215706 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.0320384104021332, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.0320384104021332 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.038073017265045105, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.038073017265045105 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41005291005291006, + "acc_stderr": 0.02533120243894443, + "acc_norm": 0.41005291005291006, + "acc_norm_stderr": 0.02533120243894443 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5578034682080925, + "acc_stderr": 0.026738603643807403, + "acc_norm": 0.5578034682080925, + "acc_norm_stderr": 0.026738603643807403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5339506172839507, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.5339506172839507, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6110091743119266, + "acc_stderr": 0.020902300887392866, + "acc_norm": 0.6110091743119266, + "acc_norm_stderr": 0.020902300887392866 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.04065771002562603, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.04065771002562603 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02013038831290453, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02013038831290453 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.033922384053216174, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.033922384053216174 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2860335195530726, + "acc_stderr": 0.015113972129062127, + "acc_norm": 0.2860335195530726, + "acc_norm_stderr": 0.015113972129062127 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.02981263070156974, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.02981263070156974 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702358, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702358 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.01242554841630295, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.01242554841630295 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.03454236585380608, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.03454236585380608 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3843329253365973, + "mc1_stderr": 0.01702870730124519, + "mc2": 0.5670517530117019, + "mc2_stderr": 0.01604743650386516 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5360094451003542, + "acc_stderr": 0.01714571536548666, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.01707725413155622 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MaziyarPanahi/Llama-3-8B-Instruct-v0.10", + "model_sha": "55a6fc03e04f1a68a5e2df16f3d0485d9ea357c8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MaziyarPanahi/Llama-3-8B-Instruct-v0.8/result_2024-05-29 09:26:20.json b/MaziyarPanahi/Llama-3-8B-Instruct-v0.8/result_2024-05-29 09:26:20.json new file mode 100644 index 0000000000000000000000000000000000000000..0620f78d3c414b62c4b92626e86541a23e97a62b --- /dev/null +++ b/MaziyarPanahi/Llama-3-8B-Instruct-v0.8/result_2024-05-29 09:26:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.431740614334471, + "acc_stderr": 0.014474591427196204, + "acc_norm": 0.492320819112628, + "acc_norm_stderr": 0.01460966744089257 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3824935271858196, + "acc_stderr": 0.004850028813189975, + "acc_norm": 0.5057757418840868, + "acc_norm_stderr": 0.004989448490164434 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45721583652618136, + "acc_stderr": 0.01781438523853443, + "acc_norm": 0.45721583652618136, + "acc_norm_stderr": 0.01781438523853443 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5594855305466238, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.5594855305466238, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.593103448275862, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.593103448275862, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.048580835742663454, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.048580835742663454 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5630252100840336, + "acc_stderr": 0.03221943636566197, + "acc_norm": 0.5630252100840336, + "acc_norm_stderr": 0.03221943636566197 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5153846153846153, + "acc_stderr": 0.025339003010106505, + "acc_norm": 0.5153846153846153, + "acc_norm_stderr": 0.025339003010106505 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.03510766597959217, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.03510766597959217 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5548387096774193, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.5548387096774193, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.029723278961476664, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.029723278961476664 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7014925373134329, + "acc_stderr": 0.03235743789355043, + "acc_norm": 0.7014925373134329, + "acc_norm_stderr": 0.03235743789355043 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.038073017265045105, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.038073017265045105 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851105, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851105 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.74, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5432098765432098, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.5432098765432098, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6110091743119266, + "acc_stderr": 0.020902300887392866, + "acc_norm": 0.6110091743119266, + "acc_norm_stderr": 0.020902300887392866 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.04060127035236395, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.04060127035236395 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4493464052287582, + "acc_stderr": 0.020123766528027262, + "acc_norm": 0.4493464052287582, + "acc_norm_stderr": 0.020123766528027262 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.03395322726375798, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.03395322726375798 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28938547486033517, + "acc_stderr": 0.015166544550490308, + "acc_norm": 0.28938547486033517, + "acc_norm_stderr": 0.015166544550490308 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3820078226857888, + "acc_stderr": 0.012409564470235581, + "acc_norm": 0.3820078226857888, + "acc_norm_stderr": 0.012409564470235581 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03460228327239172, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03460228327239172 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.036462049632538136, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.036462049632538136 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.37576499388004897, + "mc1_stderr": 0.016954584060214307, + "mc2": 0.5529292583191235, + "mc2_stderr": 0.01605767505101566 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5242030696576151, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5525383707201889, + "acc_norm_stderr": 0.01709519030150058 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MaziyarPanahi/Llama-3-8B-Instruct-v0.8", + "model_sha": "94d222b8447b600b9836da4036df9490b59fe966", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MaziyarPanahi/Mistral-7B-Instruct-v0.3/result_2024-05-29 07:48:29.json b/MaziyarPanahi/Mistral-7B-Instruct-v0.3/result_2024-05-29 07:48:29.json new file mode 100644 index 0000000000000000000000000000000000000000..4b2af1d3e05a78477b937987398de8b1db3beac6 --- /dev/null +++ b/MaziyarPanahi/Mistral-7B-Instruct-v0.3/result_2024-05-29 07:48:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3438566552901024, + "acc_stderr": 0.013880644570156213, + "acc_norm": 0.39590443686006827, + "acc_norm_stderr": 0.01429122839353659 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37094204341764586, + "acc_stderr": 0.004820697457420414, + "acc_norm": 0.4682334196375224, + "acc_norm_stderr": 0.0049797006957479515 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.017867695938429774, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.017867695938429774 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.031489558297455304, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.031489558297455304 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197604, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234353, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234353 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179325, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179325 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.02508830145469484, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.02508830145469484 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036546, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036546 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.028040981380761547, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.028040981380761547 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749458, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749458 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596437, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596437 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47339449541284406, + "acc_stderr": 0.02140695268815158, + "acc_norm": 0.47339449541284406, + "acc_norm_stderr": 0.02140695268815158 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.019910377463105935, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.019910377463105935 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639886, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639886 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31843575418994413, + "acc_stderr": 0.015581008080360276, + "acc_norm": 0.31843575418994413, + "acc_norm_stderr": 0.015581008080360276 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3670143415906128, + "acc_stderr": 0.01231026424484213, + "acc_norm": 0.3670143415906128, + "acc_norm_stderr": 0.01231026424484213 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.34149326805385555, + "mc1_stderr": 0.016600688619950826, + "mc2": 0.5092416852183381, + "mc2_stderr": 0.015906547750317673 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3907910271546635, + "acc_stderr": 0.01677529846510825, + "acc_norm": 0.4037780401416765, + "acc_norm_stderr": 0.016869031540298632 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MaziyarPanahi/Mistral-7B-Instruct-v0.3", + "model_sha": "a49cb4ae9497bb64b9c0270caf31a3d1c4f45d46", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MaziyarPanahi/Mistral-7B-v0.3/result_2024-05-29 07:48:04.json b/MaziyarPanahi/Mistral-7B-v0.3/result_2024-05-29 07:48:04.json new file mode 100644 index 0000000000000000000000000000000000000000..6cdecfb34f5ede8d133a277c6d969f5c47ca0a6d --- /dev/null +++ b/MaziyarPanahi/Mistral-7B-v0.3/result_2024-05-29 07:48:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32081911262798635, + "acc_stderr": 0.013640943091946526, + "acc_norm": 0.371160409556314, + "acc_norm_stderr": 0.01411797190114282 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3663612826130253, + "acc_stderr": 0.004808251269682431, + "acc_norm": 0.47341167098187614, + "acc_norm_stderr": 0.004982721472407335 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4674329501915709, + "acc_stderr": 0.01784199575052086, + "acc_norm": 0.4674329501915709, + "acc_norm_stderr": 0.01784199575052086 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416545, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416545 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431194, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520203, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520203 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470867, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470867 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47339449541284406, + "acc_stderr": 0.02140695268815158, + "acc_norm": 0.47339449541284406, + "acc_norm_stderr": 0.02140695268815158 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127152, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127152 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.01988622103750186, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.01988622103750186 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590943, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590943 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647206, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647206 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3016759776536313, + "acc_stderr": 0.015350767572220286, + "acc_norm": 0.3016759776536313, + "acc_norm_stderr": 0.015350767572220286 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776125, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776125 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5959183673469388, + "acc_stderr": 0.03141470802586588, + "acc_norm": 0.5959183673469388, + "acc_norm_stderr": 0.03141470802586588 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3644067796610169, + "acc_stderr": 0.012291694983056477, + "acc_norm": 0.3644067796610169, + "acc_norm_stderr": 0.012291694983056477 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.46018406335411455, + "mc2_stderr": 0.015440408515127919 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.525383707201889, + "acc_stderr": 0.017168187201429257, + "acc_norm": 0.5796930342384888, + "acc_norm_stderr": 0.01697059828117771 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MaziyarPanahi/Mistral-7B-v0.3", + "model_sha": "ee7f43d70b6cb6939c237341eb53fdaaa2d4f1bb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MaziyarPanahi/Topxtral-4x7B-v0.1/result_2024-06-10 09:33:18.json b/MaziyarPanahi/Topxtral-4x7B-v0.1/result_2024-06-10 09:33:18.json new file mode 100644 index 0000000000000000000000000000000000000000..fbceb5ddb48fb2c0b6b32c3ccad98f5eb5421e47 --- /dev/null +++ b/MaziyarPanahi/Topxtral-4x7B-v0.1/result_2024-06-10 09:33:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38054607508532423, + "acc_stderr": 0.014188277712349815, + "acc_norm": 0.43430034129692835, + "acc_norm_stderr": 0.014484703048857357 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3952399920334595, + "acc_stderr": 0.004879030010598924, + "acc_norm": 0.5141406094403506, + "acc_norm_stderr": 0.004987785530475672 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45721583652618136, + "acc_stderr": 0.017814385238534423, + "acc_norm": 0.45721583652618136, + "acc_norm_stderr": 0.017814385238534423 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.02821768355665231, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.02821768355665231 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48717948717948717, + "acc_stderr": 0.025342671293807267, + "acc_norm": 0.48717948717948717, + "acc_norm_stderr": 0.025342671293807267 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.02829205683011273, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.02829205683011273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.028286324075564404, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.028286324075564404 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.03053333843046751, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.03053333843046751 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159788, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159788 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960719, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960719 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376543, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376543 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094597, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094597 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.028838921471251458, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.028838921471251458 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.01513160884996376, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.01513160884996376 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.028245687391462913, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.028245687391462913 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.03186785930004128, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.03186785930004128 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.031843998738112236, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.031843998738112236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34485006518904826, + "acc_stderr": 0.012139881006287061, + "acc_norm": 0.34485006518904826, + "acc_norm_stderr": 0.012139881006287061 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4149326805385557, + "mc1_stderr": 0.01724831446580598, + "mc2": 0.5771885682851433, + "mc2_stderr": 0.016292878490691633 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4002361275088548, + "acc_stderr": 0.016844693510505045, + "acc_norm": 0.41086186540731995, + "acc_norm_stderr": 0.016914972767841062 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MaziyarPanahi/Topxtral-4x7B-v0.1", + "model_sha": "1a219935a01db03820ddabb2e29c199222a772e5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Megastudy/M-SOLAR-10.7B-v1.1-beta/result_2023-12-29 00:13:06.json b/Megastudy/M-SOLAR-10.7B-v1.1-beta/result_2023-12-29 00:13:06.json new file mode 100644 index 0000000000000000000000000000000000000000..6f36fe37f0dc2ffa7983f904302ef19807c51d22 --- /dev/null +++ b/Megastudy/M-SOLAR-10.7B-v1.1-beta/result_2023-12-29 00:13:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4590443686006826, + "acc_stderr": 0.014562291073601234, + "acc_norm": 0.5170648464163823, + "acc_norm_stderr": 0.014602878388536591 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4457279426409082, + "acc_stderr": 0.004960299952519394, + "acc_norm": 0.6086436964748058, + "acc_norm_stderr": 0.004870563921220625 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6491228070175439, + "acc_stderr": 0.03660298834049164, + "acc_norm": 0.6491228070175439, + "acc_norm_stderr": 0.03660298834049164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.0458212416016155, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.0458212416016155 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.669220945083014, + "acc_stderr": 0.01682481846256376, + "acc_norm": 0.669220945083014, + "acc_norm_stderr": 0.01682481846256376 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5852090032154341, + "acc_stderr": 0.027982680459759567, + "acc_norm": 0.5852090032154341, + "acc_norm_stderr": 0.027982680459759567 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5829596412556054, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.5829596412556054, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262973, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262973 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.031156269519646847, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.031156269519646847 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.047840607041056527, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.047840607041056527 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5358974358974359, + "acc_stderr": 0.025285585990017862, + "acc_norm": 0.5358974358974359, + "acc_norm_stderr": 0.025285585990017862 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036543, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036543 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6258064516129033, + "acc_stderr": 0.027528904299845683, + "acc_norm": 0.6258064516129033, + "acc_norm_stderr": 0.027528904299845683 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5509433962264151, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.5509433962264151, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.02925290592725198, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.02925290592725198 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.032200241045342054, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.032200241045342054 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43915343915343913, + "acc_stderr": 0.02555992055053101, + "acc_norm": 0.43915343915343913, + "acc_norm_stderr": 0.02555992055053101 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5625, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.5625, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5895953757225434, + "acc_stderr": 0.02648339204209818, + "acc_norm": 0.5895953757225434, + "acc_norm_stderr": 0.02648339204209818 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.027002521034516468, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.027002521034516468 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7202072538860104, + "acc_stderr": 0.03239637046735703, + "acc_norm": 0.7202072538860104, + "acc_norm_stderr": 0.03239637046735703 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7119266055045872, + "acc_stderr": 0.019416445892636025, + "acc_norm": 0.7119266055045872, + "acc_norm_stderr": 0.019416445892636025 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.02849199358617156, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.02849199358617156 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.03984979653302871, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.03984979653302871 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.040179012759817494, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.040179012759817494 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.02022394600507431, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.02022394600507431 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41843971631205673, + "acc_stderr": 0.02942799403941999, + "acc_norm": 0.41843971631205673, + "acc_norm_stderr": 0.02942799403941999 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.1877094972067039, + "acc_stderr": 0.013059605303257065, + "acc_norm": 0.1877094972067039, + "acc_norm_stderr": 0.013059605303257065 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5183823529411765, + "acc_stderr": 0.03035230339535196, + "acc_norm": 0.5183823529411765, + "acc_norm_stderr": 0.03035230339535196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7679324894514767, + "acc_stderr": 0.02747974455080851, + "acc_norm": 0.7679324894514767, + "acc_norm_stderr": 0.02747974455080851 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41851368970013036, + "acc_stderr": 0.01259950560833648, + "acc_norm": 0.41851368970013036, + "acc_norm_stderr": 0.01259950560833648 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6519607843137255, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.6519607843137255, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30966952264381886, + "mc1_stderr": 0.0161857443551449, + "mc2": 0.47119481512690015, + "mc2_stderr": 0.0153427666032473 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5749704840613932, + "acc_stderr": 0.016996016308362887, + "acc_norm": 0.6233766233766234, + "acc_norm_stderr": 0.016658799874051985 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Megastudy/M-SOLAR-10.7B-v1.1-beta", + "model_sha": "2bdd9a00217c3fdd9ec6f4d966e5383a529bd0f5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Megastudy/M-SOLAR-10.7B-v1.2/result_2024-01-01 11:21:10.json b/Megastudy/M-SOLAR-10.7B-v1.2/result_2024-01-01 11:21:10.json new file mode 100644 index 0000000000000000000000000000000000000000..5da61482033b470e2cf586969f0e8445e0019961 --- /dev/null +++ b/Megastudy/M-SOLAR-10.7B-v1.2/result_2024-01-01 11:21:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4598976109215017, + "acc_stderr": 0.01456431885692485, + "acc_norm": 0.5170648464163823, + "acc_norm_stderr": 0.014602878388536591 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45598486357299345, + "acc_stderr": 0.004970410081009441, + "acc_norm": 0.6200955984863573, + "acc_norm_stderr": 0.004843708550386534 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6549707602339181, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.6549707602339181, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.669220945083014, + "acc_stderr": 0.016824818462563756, + "acc_norm": 0.669220945083014, + "acc_norm_stderr": 0.016824818462563756 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.502127659574468, + "acc_stderr": 0.03268572658667493, + "acc_norm": 0.502127659574468, + "acc_norm_stderr": 0.03268572658667493 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5980707395498392, + "acc_stderr": 0.027846476005930473, + "acc_norm": 0.5980707395498392, + "acc_norm_stderr": 0.027846476005930473 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6641221374045801, + "acc_stderr": 0.04142313771996665, + "acc_norm": 0.6641221374045801, + "acc_norm_stderr": 0.04142313771996665 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7373737373737373, + "acc_stderr": 0.03135305009533087, + "acc_norm": 0.7373737373737373, + "acc_norm_stderr": 0.03135305009533087 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6596638655462185, + "acc_stderr": 0.03077805742293167, + "acc_norm": 0.6596638655462185, + "acc_norm_stderr": 0.03077805742293167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5564102564102564, + "acc_stderr": 0.025189149894764208, + "acc_norm": 0.5564102564102564, + "acc_norm_stderr": 0.025189149894764208 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438804, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438804 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.603225806451613, + "acc_stderr": 0.027831231605767944, + "acc_norm": 0.603225806451613, + "acc_norm_stderr": 0.027831231605767944 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.811965811965812, + "acc_stderr": 0.025598193686652247, + "acc_norm": 0.811965811965812, + "acc_norm_stderr": 0.025598193686652247 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.031343283582089536, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.031343283582089536 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43386243386243384, + "acc_stderr": 0.025525034382474884, + "acc_norm": 0.43386243386243384, + "acc_norm_stderr": 0.025525034382474884 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.73, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6069364161849711, + "acc_stderr": 0.02629622791561367, + "acc_norm": 0.6069364161849711, + "acc_norm_stderr": 0.02629622791561367 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6234567901234568, + "acc_stderr": 0.026959344518747784, + "acc_norm": 0.6234567901234568, + "acc_norm_stderr": 0.026959344518747784 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7098445595854922, + "acc_stderr": 0.03275264467791515, + "acc_norm": 0.7098445595854922, + "acc_norm_stderr": 0.03275264467791515 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.04598188057816542, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.04598188057816542 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7229357798165138, + "acc_stderr": 0.019188482590169538, + "acc_norm": 0.7229357798165138, + "acc_norm_stderr": 0.019188482590169538 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.028304576673141114, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.028304576673141114 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5921052631578947, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.5921052631578947, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.020226106567657807, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.020226106567657807 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.42907801418439717, + "acc_stderr": 0.029525914302558555, + "acc_norm": 0.42907801418439717, + "acc_norm_stderr": 0.029525914302558555 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.18435754189944134, + "acc_stderr": 0.01296915281188346, + "acc_norm": 0.18435754189944134, + "acc_norm_stderr": 0.01296915281188346 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6530612244897959, + "acc_stderr": 0.0304725260267265, + "acc_norm": 0.6530612244897959, + "acc_norm_stderr": 0.0304725260267265 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7426160337552743, + "acc_stderr": 0.028458820991460305, + "acc_norm": 0.7426160337552743, + "acc_norm_stderr": 0.028458820991460305 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4002607561929596, + "acc_stderr": 0.012513582529136208, + "acc_norm": 0.4002607561929596, + "acc_norm_stderr": 0.012513582529136208 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6323529411764706, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.6323529411764706, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.037131580674819135, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.037131580674819135 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.016002651487361002, + "mc2": 0.45096541545871954, + "mc2_stderr": 0.015351270163058638 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5076741440377804, + "acc_stderr": 0.017188329219654273, + "acc_norm": 0.5938606847697757, + "acc_norm_stderr": 0.0168847495031914 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Megastudy/M-SOLAR-10.7B-v1.2", + "model_sha": "21d08bb6676a9ad729f4e048c3eaae9a4897149a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/Merge_test01/result_2023-12-08 04:54:05.json b/Minirecord/Merge_test01/result_2023-12-08 04:54:05.json new file mode 100644 index 0000000000000000000000000000000000000000..61a098b55484a91e4c681bef87d1b77f9fded6d4 --- /dev/null +++ b/Minirecord/Merge_test01/result_2023-12-08 04:54:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42918088737201365, + "acc_stderr": 0.014464085894870651, + "acc_norm": 0.48208191126279865, + "acc_norm_stderr": 0.01460200558549098 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41256721768571997, + "acc_stderr": 0.0049129004503708365, + "acc_norm": 0.5376419040031866, + "acc_norm_stderr": 0.004975621147406099 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5874840357598978, + "acc_stderr": 0.017604149108671925, + "acc_norm": 0.5874840357598978, + "acc_norm_stderr": 0.017604149108671925 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999998, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999998 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232963, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232963 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5076923076923077, + "acc_stderr": 0.02534800603153475, + "acc_norm": 0.5076923076923077, + "acc_norm_stderr": 0.02534800603153475 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.0487831731214563, + "acc_norm": 0.62, + "acc_norm_stderr": 0.0487831731214563 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067877, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067877 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960719, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960719 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206184, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206184 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5596330275229358, + "acc_stderr": 0.021284310623761543, + "acc_norm": 0.5596330275229358, + "acc_norm_stderr": 0.021284310623761543 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.0439025926537756, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.0439025926537756 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.041733491480834974, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.041733491480834974 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.040633027314866704, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.040633027314866704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.01992211568278668, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.01992211568278668 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20670391061452514, + "acc_stderr": 0.013543260867834457, + "acc_norm": 0.20670391061452514, + "acc_norm_stderr": 0.013543260867834457 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125468, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125468 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.03189141832421396, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.03189141832421396 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6624472573839663, + "acc_stderr": 0.03078154910202621, + "acc_norm": 0.6624472573839663, + "acc_norm_stderr": 0.03078154910202621 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35528031290743156, + "acc_stderr": 0.01222362336404404, + "acc_norm": 0.35528031290743156, + "acc_norm_stderr": 0.01222362336404404 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3537331701346389, + "mc1_stderr": 0.01673781435884615, + "mc2": 0.5166103125164593, + "mc2_stderr": 0.015886971566784123 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46162927981109797, + "acc_stderr": 0.01713966022184556, + "acc_norm": 0.4793388429752066, + "acc_norm_stderr": 0.017175671279836446 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/Merge_test01", + "model_sha": "b65c745bab164c7292374a9df9119c61117a382e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/Mini_DPO_test02/result_2023-11-30 05:44:03.json b/Minirecord/Mini_DPO_test02/result_2023-11-30 05:44:03.json new file mode 100644 index 0000000000000000000000000000000000000000..ad61a762fa7777610f2e56de3b8c5d332424e488 --- /dev/null +++ b/Minirecord/Mini_DPO_test02/result_2023-11-30 05:44:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38993174061433444, + "acc_stderr": 0.01425295984889288, + "acc_norm": 0.447098976109215, + "acc_norm_stderr": 0.014529380160526838 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4025094602668791, + "acc_stderr": 0.0048940125556426375, + "acc_norm": 0.5183230432184823, + "acc_norm_stderr": 0.004986429808146767 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47381864623243936, + "acc_stderr": 0.017855434554041982, + "acc_norm": 0.47381864623243936, + "acc_norm_stderr": 0.017855434554041982 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108102, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.03524068951567449, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.03524068951567449 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431183, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431183 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808086, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808086 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066482, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066482 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562417, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02764847787741332, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02764847787741332 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.019691459052354147, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.019691459052354147 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534774, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534774 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025445, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025445 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2346368715083799, + "acc_stderr": 0.014173044098303672, + "acc_norm": 0.2346368715083799, + "acc_norm_stderr": 0.014173044098303672 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254184, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254184 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3239895697522816, + "acc_stderr": 0.01195284080964657, + "acc_norm": 0.3239895697522816, + "acc_norm_stderr": 0.01195284080964657 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006518, + "mc2": 0.41568147235613934, + "mc2_stderr": 0.016165339412432606 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.017182864434998567, + "acc_norm": 0.5395513577331759, + "acc_norm_stderr": 0.017136487626049846 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/Mini_DPO_test02", + "model_sha": "cd417467644c4178100083e342bad88a3f968be6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/Mini_DPO_test_01/result_2023-11-29 11:24:57.json b/Minirecord/Mini_DPO_test_01/result_2023-11-29 11:24:57.json new file mode 100644 index 0000000000000000000000000000000000000000..d079b7359502396642ecf95585c391eea0401ae1 --- /dev/null +++ b/Minirecord/Mini_DPO_test_01/result_2023-11-29 11:24:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44197952218430037, + "acc_stderr": 0.014512682523128343, + "acc_norm": 0.48293515358361777, + "acc_norm_stderr": 0.014602878388536595 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4159529974108743, + "acc_stderr": 0.004918781662373929, + "acc_norm": 0.5468034256124278, + "acc_norm_stderr": 0.00496787247538328 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.03815827365913237, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.03815827365913237 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5759897828863346, + "acc_stderr": 0.01767226332908422, + "acc_norm": 0.5759897828863346, + "acc_norm_stderr": 0.01767226332908422 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.032321469162244675, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.032321469162244675 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.041379310344827586, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.041379310344827586 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171451, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171451 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.025348006031534805, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.025348006031534805 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145631, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145631 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883233, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883233 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.02920254015343118, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.02920254015343118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6616915422885572, + "acc_stderr": 0.033455630703391914, + "acc_norm": 0.6616915422885572, + "acc_norm_stderr": 0.033455630703391914 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137282, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137282 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670788, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670788 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.026720034380514998, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.026720034380514998 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5394495412844037, + "acc_stderr": 0.021370494609995103, + "acc_norm": 0.5394495412844037, + "acc_norm_stderr": 0.021370494609995103 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556054, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556054 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.040675331363091746, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.040675331363091746 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061177, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503803, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503803 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19664804469273742, + "acc_stderr": 0.01329318302745465, + "acc_norm": 0.19664804469273742, + "acc_norm_stderr": 0.01329318302745465 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.028418208619406794, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.028418208619406794 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5918367346938775, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.5918367346938775, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.679324894514768, + "acc_stderr": 0.03038193194999042, + "acc_norm": 0.679324894514768, + "acc_norm_stderr": 0.03038193194999042 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897632, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897632 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386844, + "mc2": 0.47779817930728524, + "mc2_stderr": 0.015859968375893638 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5100354191263282, + "acc_stderr": 0.01718689128689406, + "acc_norm": 0.5489964580873672, + "acc_norm_stderr": 0.017107618859549346 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/Mini_DPO_test_01", + "model_sha": "ca901b8afa7f11d4cd192b3086eeaed0960edcd6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/Mini_Orca_daekeun_llama13b/result_2023-11-30 09:21:03.json b/Minirecord/Mini_Orca_daekeun_llama13b/result_2023-11-30 09:21:03.json new file mode 100644 index 0000000000000000000000000000000000000000..3c2cfb4b89059250e4f4fc9ee9bc17abb0fcdea3 --- /dev/null +++ b/Minirecord/Mini_Orca_daekeun_llama13b/result_2023-11-30 09:21:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4035836177474403, + "acc_stderr": 0.014337158914268436, + "acc_norm": 0.47013651877133106, + "acc_norm_stderr": 0.014585305840007109 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43019318860784705, + "acc_stderr": 0.004940911779273365, + "acc_norm": 0.5732921728739295, + "acc_norm_stderr": 0.004935882666250484 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5274584929757343, + "acc_stderr": 0.017852981266633938, + "acc_norm": 0.5274584929757343, + "acc_norm_stderr": 0.017852981266633938 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.02832032583010592, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.02832032583010592 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.025069094387296546, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.025069094387296546 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561953, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561953 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145654, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145654 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432564, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432564 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347666, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347666 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.021364122533881695, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.021364122533881695 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.027870745278290313, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.027870745278290313 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.019542101564854107, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.019542101564854107 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741518, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741518 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.01594506858123661, + "mc2": 0.46568684481633393, + "mc2_stderr": 0.015215490629764046 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.512396694214876, + "acc_stderr": 0.01718506973267653, + "acc_norm": 0.5903187721369539, + "acc_norm_stderr": 0.01690756819221948 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/Mini_Orca_daekeun_llama13b", + "model_sha": "6b20fd25f7afade5959ab4529b151eac8fba7798", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/Mini_llama13b_test123/result_2023-12-12 09:38:52.json b/Minirecord/Mini_llama13b_test123/result_2023-12-12 09:38:52.json new file mode 100644 index 0000000000000000000000000000000000000000..a033c6fc45a160caef940060844d809e1a94a741 --- /dev/null +++ b/Minirecord/Mini_llama13b_test123/result_2023-12-12 09:38:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4121160409556314, + "acc_stderr": 0.014383915302225398, + "acc_norm": 0.48208191126279865, + "acc_norm_stderr": 0.014602005585490982 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44064927305317664, + "acc_stderr": 0.004954503606471607, + "acc_norm": 0.5898227444732125, + "acc_norm_stderr": 0.004908604732082816 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.545338441890166, + "acc_stderr": 0.017806304585052595, + "acc_norm": 0.545338441890166, + "acc_norm_stderr": 0.017806304585052595 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.02832032583010592, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.02832032583010592 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.025230381238934833, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.025230381238934833 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228426, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228426 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.023393826500484875, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.023393826500484875 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.021311335009708575, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.021311335009708575 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523812, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523812 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292535, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292535 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.019659922493623333, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.019659922493623333 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103982, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103982 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687754, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687754 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330375, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330375 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.0159835951018114, + "mc2": 0.4673869792275578, + "mc2_stderr": 0.01544648001501871 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5017709563164109, + "acc_stderr": 0.01719024627623186, + "acc_norm": 0.5608028335301063, + "acc_norm_stderr": 0.017062775744780705 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/Mini_llama13b_test123", + "model_sha": "8793a7abc5806d5796463aeed91a4b2406d7b9f2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/Mini_synatra_7b_01/result_2023-11-21 10:22:04.json b/Minirecord/Mini_synatra_7b_01/result_2023-11-21 10:22:04.json new file mode 100644 index 0000000000000000000000000000000000000000..a7f14c5a3db0f69e6a3d6ad87dcd1e388091c421 --- /dev/null +++ b/Minirecord/Mini_synatra_7b_01/result_2023-11-21 10:22:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.439419795221843, + "acc_stderr": 0.014503747823580127, + "acc_norm": 0.4778156996587031, + "acc_norm_stderr": 0.014597001927076136 + }, + "harness|ko_hellaswag|10": { + "acc": 0.407787293367855, + "acc_stderr": 0.004904189257891273, + "acc_norm": 0.5388368850826528, + "acc_norm_stderr": 0.004974706428434281 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.03786720706234214, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.03786720706234214 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5721583652618135, + "acc_stderr": 0.017692787927803724, + "acc_norm": 0.5721583652618135, + "acc_norm_stderr": 0.017692787927803724 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056127, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056127 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764194, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006114, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006114 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.02535574126305527, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.02535574126305527 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.026882643434022895, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.026882643434022895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.02780165621232366, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.02780165621232366 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5357798165137615, + "acc_stderr": 0.021382364775701906, + "acc_norm": 0.5357798165137615, + "acc_norm_stderr": 0.021382364775701906 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449848, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449848 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101362, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101362 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21340782122905028, + "acc_stderr": 0.0137028599321961, + "acc_norm": 0.21340782122905028, + "acc_norm_stderr": 0.0137028599321961 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824873, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824873 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702368, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702368 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.012198140605353592, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.012198140605353592 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03888176921674101, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03888176921674101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713606, + "mc2": 0.4699224627284617, + "mc2_stderr": 0.015489423242321306 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.01717567127983645, + "acc_norm": 0.5218417945690673, + "acc_norm_stderr": 0.017173944474294378 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/Mini_synatra_7b_01", + "model_sha": "1851fd437f8956055d6dc61bb1e1628b9ffdbaed", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/Mini_synatra_7b_02/result_2023-11-22 01:45:18.json b/Minirecord/Mini_synatra_7b_02/result_2023-11-22 01:45:18.json new file mode 100644 index 0000000000000000000000000000000000000000..69694ab224552f66b8bab4eea9c7b4b106d89fc3 --- /dev/null +++ b/Minirecord/Mini_synatra_7b_02/result_2023-11-22 01:45:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4351535836177474, + "acc_stderr": 0.014487986197186047, + "acc_norm": 0.48464163822525597, + "acc_norm_stderr": 0.01460449612939491 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4062935670185222, + "acc_stderr": 0.0049013686295334155, + "acc_norm": 0.5382393945429197, + "acc_norm_stderr": 0.004975167382061836 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5721583652618135, + "acc_stderr": 0.017692787927803724, + "acc_norm": 0.5721583652618135, + "acc_norm_stderr": 0.017692787927803724 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.032278345101462685, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.032278345101462685 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.038743715565879536, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.038743715565879536 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707546, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707546 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49743589743589745, + "acc_stderr": 0.025350672979412205, + "acc_norm": 0.49743589743589745, + "acc_norm_stderr": 0.025350672979412205 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.034711928605184676, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.034711928605184676 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.029343114798094462, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.029343114798094462 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067877, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067877 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.026772990653361823, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.026772990653361823 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336936, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.021364122533881695, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.021364122533881695 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.02862930519400355, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.02862930519400355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626057, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626057 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.019861155193829166, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.019861155193829166 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219588, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219588 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19553072625698323, + "acc_stderr": 0.013264579220945082, + "acc_norm": 0.19553072625698323, + "acc_norm_stderr": 0.013264579220945082 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.028418208619406794, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.028418208619406794 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556166, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556166 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.679324894514768, + "acc_stderr": 0.030381931949990424, + "acc_norm": 0.679324894514768, + "acc_norm_stderr": 0.030381931949990424 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452224, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452224 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070262, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070262 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.01605899902610063, + "mc2": 0.47052567317541716, + "mc2_stderr": 0.015507149282229802 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.017168187201429253, + "acc_norm": 0.5324675324675324, + "acc_norm_stderr": 0.017154073716682865 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/Mini_synatra_7b_02", + "model_sha": "cdea3a61219bec471946ed8330250119b6d45df0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/Mini_synatra_7b_03/result_2023-11-23 08:17:10.json b/Minirecord/Mini_synatra_7b_03/result_2023-11-23 08:17:10.json new file mode 100644 index 0000000000000000000000000000000000000000..861ba3792b7f9425b5f55a6bea87f063b431548d --- /dev/null +++ b/Minirecord/Mini_synatra_7b_03/result_2023-11-23 08:17:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4334470989761092, + "acc_stderr": 0.014481376224558896, + "acc_norm": 0.4684300341296928, + "acc_norm_stderr": 0.014582236460866977 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4069906393148775, + "acc_stderr": 0.004902690765066419, + "acc_norm": 0.5370444134634534, + "acc_norm_stderr": 0.004976067726432564 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.561941251596424, + "acc_stderr": 0.017742232238257254, + "acc_norm": 0.561941251596424, + "acc_norm_stderr": 0.017742232238257254 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.03446897738659333, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.03446897738659333 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431187, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431187 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02831753349606647, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02831753349606647 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02510742548113728, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02510742548113728 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.026882643434022895, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.026882643434022895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583302, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583302 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.042857142857142816, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.042857142857142816 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626057, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626057 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762626, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762626 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.18435754189944134, + "acc_stderr": 0.012969152811883447, + "acc_norm": 0.18435754189944134, + "acc_norm_stderr": 0.012969152811883447 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.031680911612338825, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.031680911612338825 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452225, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452225 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.035086373586305716, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.035086373586305716 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398393, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398393 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882473, + "mc2": 0.4780289931552814, + "mc2_stderr": 0.015543517557297408 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4757969303423849, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5407319952774499, + "acc_norm_stderr": 0.01713321827653767 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/Mini_synatra_7b_03", + "model_sha": "6cd5c432930e923b0d73453fb22f817726da99bc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/llama13b_2s_dpo/result_2023-12-15 08:29:12.json b/Minirecord/llama13b_2s_dpo/result_2023-12-15 08:29:12.json new file mode 100644 index 0000000000000000000000000000000000000000..127546cd80ce6b9b890fa22ec698c2d37dca4ece --- /dev/null +++ b/Minirecord/llama13b_2s_dpo/result_2023-12-15 08:29:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4232081911262799, + "acc_stderr": 0.014438036220848029, + "acc_norm": 0.4812286689419795, + "acc_norm_stderr": 0.014601090150633964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42850029874526985, + "acc_stderr": 0.004938500303990289, + "acc_norm": 0.5737900816570405, + "acc_norm_stderr": 0.004935143791573811 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5274584929757343, + "acc_stderr": 0.017852981266633938, + "acc_norm": 0.5274584929757343, + "acc_norm_stderr": 0.017852981266633938 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328167, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328167 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707546, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707546 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.02521731518484648, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.02521731518484648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036544, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036544 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228405, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228405 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261114, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261114 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5688073394495413, + "acc_stderr": 0.021233365030319567, + "acc_norm": 0.5688073394495413, + "acc_norm_stderr": 0.021233365030319567 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.02838425670488304, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.02838425670488304 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529672, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529672 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776122, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776122 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33572359843546284, + "acc_stderr": 0.012061304157664621, + "acc_norm": 0.33572359843546284, + "acc_norm_stderr": 0.012061304157664621 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31456548347613217, + "mc1_stderr": 0.016255241993179185, + "mc2": 0.48394875080657285, + "mc2_stderr": 0.015560465036731122 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.017188904359077307, + "acc_norm": 0.615112160566706, + "acc_norm_stderr": 0.01672857970149866 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/llama13b_2s_dpo", + "model_sha": "feec8b40b923b8862a98a726d581770210316715", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/llama13b_dpo_loss0_OTL/result_2023-12-07 02:09:10.json b/Minirecord/llama13b_dpo_loss0_OTL/result_2023-12-07 02:09:10.json new file mode 100644 index 0000000000000000000000000000000000000000..ad2066757d0f9d52e4b164aeb314150598ec9303 --- /dev/null +++ b/Minirecord/llama13b_dpo_loss0_OTL/result_2023-12-07 02:09:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4274744027303754, + "acc_stderr": 0.014456862944650652, + "acc_norm": 0.47525597269624575, + "acc_norm_stderr": 0.01459348769493774 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43527185819557856, + "acc_stderr": 0.004947793051042669, + "acc_norm": 0.5808603863772157, + "acc_norm_stderr": 0.004924098711864585 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977239, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977239 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5351213282247765, + "acc_stderr": 0.017835798806290645, + "acc_norm": 0.5351213282247765, + "acc_norm_stderr": 0.017835798806290645 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5466237942122186, + "acc_stderr": 0.028274359854894245, + "acc_norm": 0.5466237942122186, + "acc_norm_stderr": 0.028274359854894245 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.02523038123893483, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.02523038123893483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883232, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883232 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458006, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458006 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823017, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823017 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5688073394495413, + "acc_stderr": 0.021233365030319563, + "acc_norm": 0.5688073394495413, + "acc_norm_stderr": 0.021233365030319563 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604675, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604675 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.027870745278290317, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.027870745278290317 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.019524316744866342, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.019524316744866342 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608044, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125478, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125478 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.03137624072561619, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.03137624072561619 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35723598435462844, + "acc_stderr": 0.0122386157503165, + "acc_norm": 0.35723598435462844, + "acc_norm_stderr": 0.0122386157503165 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711276, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711276 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087295, + "mc2": 0.43857065442012383, + "mc2_stderr": 0.015188413191278269 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5525383707201889, + "acc_stderr": 0.01709519030150058, + "acc_norm": 0.6269185360094451, + "acc_norm_stderr": 0.01662731827513745 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/llama13b_dpo_loss0_OTL", + "model_sha": "d0fe605aa90c5f43e188cdfa7e07456f4ad3eef9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/llama13b_test02/result_2023-12-01 09:43:54.json b/Minirecord/llama13b_test02/result_2023-12-01 09:43:54.json new file mode 100644 index 0000000000000000000000000000000000000000..568e737dd8803c273658bfc3c4aa9bd6fded367a --- /dev/null +++ b/Minirecord/llama13b_test02/result_2023-12-01 09:43:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42406143344709896, + "acc_stderr": 0.0144418896274644, + "acc_norm": 0.4709897610921502, + "acc_norm_stderr": 0.014586776355294324 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43507269468233417, + "acc_stderr": 0.0049475331587120995, + "acc_norm": 0.5792670782712607, + "acc_norm_stderr": 0.004926678108601345 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5300127713920817, + "acc_stderr": 0.017847723086649073, + "acc_norm": 0.5300127713920817, + "acc_norm_stderr": 0.017847723086649073 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.02521731518484648, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.02521731518484648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036543, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036543 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.031342504862454025, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.031342504862454025 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.023330654054535892, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.023330654054535892 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.040894654493255835, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.040894654493255835 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144809, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144809 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5651376146788991, + "acc_stderr": 0.021254631465609273, + "acc_norm": 0.5651376146788991, + "acc_norm_stderr": 0.021254631465609273 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.02782610930728369, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.02782610930728369 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.01957695312208885, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.01957695312208885 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464622, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464622 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36114732724902215, + "acc_stderr": 0.01226793547751903, + "acc_norm": 0.36114732724902215, + "acc_norm_stderr": 0.01226793547751903 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.01546102762725359, + "mc2": 0.43217399968615017, + "mc2_stderr": 0.015131631313211043 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5442739079102715, + "acc_stderr": 0.01712282914329265, + "acc_norm": 0.6245572609208973, + "acc_norm_stderr": 0.016648411589511088 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/llama13b_test02", + "model_sha": "97748555478dff9ff9c3e05e0b72e02f7e04bdd0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/minyi_5k_6B/result_2023-12-27 06:48:32.json b/Minirecord/minyi_5k_6B/result_2023-12-27 06:48:32.json new file mode 100644 index 0000000000000000000000000000000000000000..34d2d56c4d0c91bc4636534d159846961410b05b --- /dev/null +++ b/Minirecord/minyi_5k_6B/result_2023-12-27 06:48:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3703071672354949, + "acc_stderr": 0.01411129875167495, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.014494421584256527 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40201155148376816, + "acc_stderr": 0.004893022130229093, + "acc_norm": 0.536247759410476, + "acc_norm_stderr": 0.004976651989757641 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.017784034534992436, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.017784034534992436 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.033408675019233246, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.033408675019233246 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182087, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182087 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986476, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986476 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173092, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173092 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114982, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114982 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149135, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149135 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.02684298551961537, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.02684298551961537 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.027807490044276198, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.027807490044276198 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6256880733944954, + "acc_stderr": 0.020748959408988327, + "acc_norm": 0.6256880733944954, + "acc_norm_stderr": 0.020748959408988327 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.028408302020332694, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.028408302020332694 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.01967580813528152, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.01967580813528152 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30837988826815643, + "acc_stderr": 0.015445716910998884, + "acc_norm": 0.30837988826815643, + "acc_norm_stderr": 0.015445716910998884 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001663, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001663 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.03178471874564729, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.03178471874564729 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34224250325945244, + "acc_stderr": 0.012117939998705878, + "acc_norm": 0.34224250325945244, + "acc_norm_stderr": 0.012117939998705878 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.034924061041636124, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.034924061041636124 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777305, + "mc2": 0.42634355103551425, + "mc2_stderr": 0.0150819870012603 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5112160566706021, + "acc_stderr": 0.017186028469489287, + "acc_norm": 0.5820543093270366, + "acc_norm_stderr": 0.016957292005279703 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/minyi_5k_6B", + "model_sha": "966f5dd17f723d62b8d5ab7387461eae5a048fc6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/minyi_6b/result_2023-12-07 10:34:18.json b/Minirecord/minyi_6b/result_2023-12-07 10:34:18.json new file mode 100644 index 0000000000000000000000000000000000000000..ecbeedab897df9561d7a8cf98cb0caa5c6a8c1e7 --- /dev/null +++ b/Minirecord/minyi_6b/result_2023-12-07 10:34:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3651877133105802, + "acc_stderr": 0.0140702655192688, + "acc_norm": 0.4206484641638225, + "acc_norm_stderr": 0.014426211252508397 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40509858593905596, + "acc_stderr": 0.004899078300184257, + "acc_norm": 0.5417247560246963, + "acc_norm_stderr": 0.004972377085916328 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.545338441890166, + "acc_stderr": 0.017806304585052602, + "acc_norm": 0.545338441890166, + "acc_norm_stderr": 0.017806304585052602 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5594855305466238, + "acc_stderr": 0.02819640057419743, + "acc_norm": 0.5594855305466238, + "acc_norm_stderr": 0.02819640057419743 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126174, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126174 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.02842920317672455, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.02842920317672455 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.029996951858349483, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.029996951858349483 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523864, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.041227287076512825, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.041227287076512825 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6091743119266055, + "acc_stderr": 0.020920058346111062, + "acc_norm": 0.6091743119266055, + "acc_norm_stderr": 0.020920058346111062 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169938, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169938 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.015131608849963757, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.015131608849963757 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452225, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452225 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380025, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380025 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.01582614243950234, + "mc2": 0.4285492447923733, + "mc2_stderr": 0.015103565647608173 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.551357733175915, + "acc_stderr": 0.017099430514725778, + "acc_norm": 0.6162927981109799, + "acc_norm_stderr": 0.016718924637231826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/minyi_6b", + "model_sha": "f8137f2ed10ff1496e75729ed15fad480073a7e4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/minyi_dpo_6b/result_2023-12-18 09:01:03.json b/Minirecord/minyi_dpo_6b/result_2023-12-18 09:01:03.json new file mode 100644 index 0000000000000000000000000000000000000000..212b69dfe1e57e8d7da62a6bba31fdd03e686d21 --- /dev/null +++ b/Minirecord/minyi_dpo_6b/result_2023-12-18 09:01:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3643344709897611, + "acc_stderr": 0.014063260279882417, + "acc_norm": 0.4283276450511945, + "acc_norm_stderr": 0.014460496367599019 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4122684724158534, + "acc_stderr": 0.004912370023913013, + "acc_norm": 0.5438159729137622, + "acc_norm_stderr": 0.004970585328297623 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5798212005108557, + "acc_stderr": 0.017650651363078022, + "acc_norm": 0.5798212005108557, + "acc_norm_stderr": 0.017650651363078022 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04316378599511324, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04316378599511324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056128, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056128 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643945, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643945 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.02523038123893483, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.02523038123893483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036543, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036543 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.030572811310299604, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.030572811310299604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.03400598505599015, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.03400598505599015 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.02441923496681906, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.02441923496681906 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651283, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651283 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.026918645383239004, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.026918645383239004 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336938, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336938 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6018348623853211, + "acc_stderr": 0.02098798942265426, + "acc_norm": 0.6018348623853211, + "acc_norm_stderr": 0.02098798942265426 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309172, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309172 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061173, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061173 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.0278079901413202, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.0278079901413202 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915185, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915185 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220504, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220504 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.0290294228156814, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.0290294228156814 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34028683181225555, + "acc_stderr": 0.0121012176102238, + "acc_norm": 0.34028683181225555, + "acc_norm_stderr": 0.0121012176102238 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.4432499193765067, + "mc2_stderr": 0.01527417237825953 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5548996458087367, + "acc_stderr": 0.017086417431005467, + "acc_norm": 0.6269185360094451, + "acc_norm_stderr": 0.01662731827513745 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/minyi_dpo_6b", + "model_sha": "61066958700e4fda47e6381d3cbc4b2736373868", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/psm_170k_llama_13b/result_2023-12-18 09:04:29.json b/Minirecord/psm_170k_llama_13b/result_2023-12-18 09:04:29.json new file mode 100644 index 0000000000000000000000000000000000000000..e55f3d7261e6f194b5fb268c5221de05199d4fa8 --- /dev/null +++ b/Minirecord/psm_170k_llama_13b/result_2023-12-18 09:04:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4138225255972696, + "acc_stderr": 0.014392730009221009, + "acc_norm": 0.46245733788395904, + "acc_norm_stderr": 0.01457014449507558 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43696474805815577, + "acc_stderr": 0.004949969363017663, + "acc_norm": 0.5916152160924119, + "acc_norm_stderr": 0.0049053043710908725 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.541507024265645, + "acc_stderr": 0.01781824860346558, + "acc_norm": 0.541507024265645, + "acc_norm_stderr": 0.01781824860346558 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.028333277109562807, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.028333277109562807 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.0497569851956243, + "acc_norm": 0.43, + "acc_norm_stderr": 0.0497569851956243 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179961, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179961 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969567, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813332, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768817, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768817 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5346820809248555, + "acc_stderr": 0.026854257928258886, + "acc_norm": 0.5346820809248555, + "acc_norm_stderr": 0.026854257928258886 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.02780165621232366, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.02780165621232366 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.02131133500970858, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.02131133500970858 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650147, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.042032772914677614, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.042032772914677614 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030685820596610805, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030685820596610805 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330366, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330366 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5637254901960784, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.5637254901960784, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386844, + "mc2": 0.4779022257635535, + "mc2_stderr": 0.015265649173984836 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5324675324675324, + "acc_stderr": 0.01715407371668287, + "acc_norm": 0.6127508854781583, + "acc_norm_stderr": 0.016747577991642785 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/psm_170k_llama_13b", + "model_sha": "0a20211ff6d1c3414f1afe566f98c3619c29b074", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/psm_llama13b/result_2023-12-11 00:31:15.json b/Minirecord/psm_llama13b/result_2023-12-11 00:31:15.json new file mode 100644 index 0000000000000000000000000000000000000000..6d64e707ddf9bb4df01caf927e140cdcd7f410fe --- /dev/null +++ b/Minirecord/psm_llama13b/result_2023-12-11 00:31:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41552901023890787, + "acc_stderr": 0.014401366641216377, + "acc_norm": 0.47440273037542663, + "acc_norm_stderr": 0.014592230885298964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4320852419836686, + "acc_stderr": 0.004943537242344414, + "acc_norm": 0.5786695877315275, + "acc_norm_stderr": 0.004927631806477556 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.03815827365913237, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.03815827365913237 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5261813537675607, + "acc_stderr": 0.017855434554041996, + "acc_norm": 0.5261813537675607, + "acc_norm_stderr": 0.017855434554041996 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.03550702465131342, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.03550702465131342 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.02523038123893483, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.02523038123893483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5161290322580645, + "acc_stderr": 0.028429203176724562, + "acc_norm": 0.5161290322580645, + "acc_norm_stderr": 0.028429203176724562 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556552, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556552 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261117, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261117 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637793, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637793 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334384, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334384 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.03561587327685884, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.03561587327685884 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.021311335009708575, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.021311335009708575 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529675, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529675 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0301619119307671, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0301619119307671 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.03121956944530185, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.03121956944530185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.01204966898321494, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.01204966898321494 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589674, + "mc2": 0.46116641220870364, + "mc2_stderr": 0.015268627780335149 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5596221959858324, + "acc_stderr": 0.017067699774312977, + "acc_norm": 0.6292798110979929, + "acc_norm_stderr": 0.01660580128921262 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/psm_llama13b", + "model_sha": "37f58c653ff3f239a4dee90249728c1529e442e0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Minirecord/solar_informal_10.7b/result_2024-01-02 11:49:16.json b/Minirecord/solar_informal_10.7b/result_2024-01-02 11:49:16.json new file mode 100644 index 0000000000000000000000000000000000000000..eb051331a56a11c29d5397e47331e594fd1127b0 --- /dev/null +++ b/Minirecord/solar_informal_10.7b/result_2024-01-02 11:49:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44880546075085326, + "acc_stderr": 0.014534599585097664, + "acc_norm": 0.5170648464163823, + "acc_norm_stderr": 0.01460287838853659 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4569806811392153, + "acc_stderr": 0.004971278309204199, + "acc_norm": 0.6095399322844055, + "acc_norm_stderr": 0.004868564301540814 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.03786720706234214, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.03786720706234214 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6628352490421456, + "acc_stderr": 0.016905207420803547, + "acc_norm": 0.6628352490421456, + "acc_norm_stderr": 0.016905207420803547 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936338, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936338 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5819935691318328, + "acc_stderr": 0.028013651891995076, + "acc_norm": 0.5819935691318328, + "acc_norm_stderr": 0.028013651891995076 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5919282511210763, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.5919282511210763, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.04243869242230524, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.04243869242230524 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.02534267129380724, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.02534267129380724 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04712821257426769, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04712821257426769 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5741935483870968, + "acc_stderr": 0.028129112709165904, + "acc_norm": 0.5741935483870968, + "acc_norm_stderr": 0.028129112709165904 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.02685345037700915, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.02685345037700915 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5358490566037736, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.5358490566037736, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524586, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524586 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6865671641791045, + "acc_stderr": 0.03280188205348642, + "acc_norm": 0.6865671641791045, + "acc_norm_stderr": 0.03280188205348642 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.037940126746970296, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.037940126746970296 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.025305906241590632, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.025305906241590632 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.026720034380514998, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.026720034380514998 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5987654320987654, + "acc_stderr": 0.027272582849839796, + "acc_norm": 0.5987654320987654, + "acc_norm_stderr": 0.027272582849839796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7150259067357513, + "acc_stderr": 0.032577140777096614, + "acc_norm": 0.7150259067357513, + "acc_norm_stderr": 0.032577140777096614 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.046570472605949625, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.046570472605949625 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6752293577981652, + "acc_stderr": 0.020077729109310327, + "acc_norm": 0.6752293577981652, + "acc_norm_stderr": 0.020077729109310327 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.02843109544417664, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.02843109544417664 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.040516463428741434, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.040516463428741434 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5016339869281046, + "acc_stderr": 0.020227726838150124, + "acc_norm": 0.5016339869281046, + "acc_norm_stderr": 0.020227726838150124 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41843971631205673, + "acc_stderr": 0.029427994039419994, + "acc_norm": 0.41843971631205673, + "acc_norm_stderr": 0.029427994039419994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.033769221512523345, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.033769221512523345 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.34301675977653634, + "acc_stderr": 0.015876912673057745, + "acc_norm": 0.34301675977653634, + "acc_norm_stderr": 0.015876912673057745 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.030254372573976694, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.030254372573976694 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763127, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763127 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38396349413298564, + "acc_stderr": 0.01242158783313423, + "acc_norm": 0.38396349413298564, + "acc_norm_stderr": 0.01242158783313423 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.03393388584958404, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.03393388584958404 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03756335775187896, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03756335775187896 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.0159835951018114, + "mc2": 0.47237043227217157, + "mc2_stderr": 0.01595447958123581 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5277449822904369, + "acc_stderr": 0.017163867979456012, + "acc_norm": 0.5796930342384888, + "acc_norm_stderr": 0.016970598281177706 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Minirecord/solar_informal_10.7b", + "model_sha": "40e56be12a5cb6a4de493e31c5397d36fa286497", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MoaData/MoA_solar_10.7b_1.0/result_2024-04-11 01:31:25.json b/MoaData/MoA_solar_10.7b_1.0/result_2024-04-11 01:31:25.json new file mode 100644 index 0000000000000000000000000000000000000000..1d94dc152eef523edf00c6f32f10c6873b8b292a --- /dev/null +++ b/MoaData/MoA_solar_10.7b_1.0/result_2024-04-11 01:31:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.734641638225256, + "acc_stderr": 0.012902554762313966, + "acc_norm": 0.7764505119453925, + "acc_norm_stderr": 0.012174896631202605 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6904999004182434, + "acc_stderr": 0.004613427745209498, + "acc_norm": 0.795857398924517, + "acc_norm_stderr": 0.00402249921076073 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7572815533980582, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.7572815533980582, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6781609195402298, + "acc_stderr": 0.0167063814150579, + "acc_norm": 0.6781609195402298, + "acc_norm_stderr": 0.0167063814150579 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.02736807824397163, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.02736807824397163 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.672645739910314, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.672645739910314, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.031156269519646847, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.031156269519646847 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.031566630992154156, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.031566630992154156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6256410256410256, + "acc_stderr": 0.024537591572830496, + "acc_norm": 0.6256410256410256, + "acc_norm_stderr": 0.024537591572830496 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6193548387096774, + "acc_stderr": 0.027621717832907036, + "acc_norm": 0.6193548387096774, + "acc_norm_stderr": 0.027621717832907036 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8247863247863247, + "acc_stderr": 0.024904439098918214, + "acc_norm": 0.8247863247863247, + "acc_norm_stderr": 0.024904439098918214 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5924528301886792, + "acc_stderr": 0.030242233800854494, + "acc_norm": 0.5924528301886792, + "acc_norm_stderr": 0.030242233800854494 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.04653429807913507, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.04653429807913507 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465073, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465073 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.02563425811555496, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.02563425811555496 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.625, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.625, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6184971098265896, + "acc_stderr": 0.0261521986197268, + "acc_norm": 0.6184971098265896, + "acc_norm_stderr": 0.0261521986197268 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5828220858895705, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.5828220858895705, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6450617283950617, + "acc_stderr": 0.026624152478845853, + "acc_norm": 0.6450617283950617, + "acc_norm_stderr": 0.026624152478845853 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7321100917431193, + "acc_stderr": 0.018987462257978652, + "acc_norm": 0.7321100917431193, + "acc_norm_stderr": 0.018987462257978652 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5915032679738562, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.5915032679738562, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.618421052631579, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.618421052631579, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.019977422600227477, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.019977422600227477 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.02946218923337059, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.02946218923337059 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.034063153607115086, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.034063153607115086 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3016759776536313, + "acc_stderr": 0.015350767572220285, + "acc_norm": 0.3016759776536313, + "acc_norm_stderr": 0.015350767572220285 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.030320243265004137, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.030320243265004137 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6653061224489796, + "acc_stderr": 0.030209235226242304, + "acc_norm": 0.6653061224489796, + "acc_norm_stderr": 0.030209235226242304 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.029818024749753095, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.029818024749753095 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4517601043024772, + "acc_stderr": 0.012710662233660245, + "acc_norm": 0.4517601043024772, + "acc_norm_stderr": 0.012710662233660245 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.0332057461294543, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.0332057461294543 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7539779681762546, + "mc1_stderr": 0.015077219200662595, + "mc2": 0.81437767576557, + "mc2_stderr": 0.013311821841355344 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5076741440377804, + "acc_stderr": 0.017188329219654276, + "acc_norm": 0.5348288075560803, + "acc_norm_stderr": 0.017148598015747422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MoaData/MoA_solar_10.7b_1.0", + "model_sha": "ace24156f1cfaecf5837436a8c45253f61b60a25", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MoaData/Myrrh_solar_10.7b_2.0/result_2024-04-16 06:35:53.json b/MoaData/Myrrh_solar_10.7b_2.0/result_2024-04-16 06:35:53.json new file mode 100644 index 0000000000000000000000000000000000000000..fe67de94cc321671c7df5eb43fc5c55dcc8e9a89 --- /dev/null +++ b/MoaData/Myrrh_solar_10.7b_2.0/result_2024-04-16 06:35:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7440273037542662, + "acc_stderr": 0.012753013241244518, + "acc_norm": 0.7824232081911263, + "acc_norm_stderr": 0.012057262020972506 + }, + "harness|ko_hellaswag|10": { + "acc": 0.7132045409281019, + "acc_stderr": 0.004513409114983804, + "acc_norm": 0.8098984266082454, + "acc_norm_stderr": 0.003915792315457838 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7076023391812866, + "acc_stderr": 0.034886477134579215, + "acc_norm": 0.7076023391812866, + "acc_norm_stderr": 0.034886477134579215 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7572815533980582, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.7572815533980582, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6909323116219668, + "acc_stderr": 0.016524988919702176, + "acc_norm": 0.6909323116219668, + "acc_norm_stderr": 0.016524988919702176 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4978723404255319, + "acc_stderr": 0.03268572658667491, + "acc_norm": 0.4978723404255319, + "acc_norm_stderr": 0.03268572658667491 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.0389136449583582, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.0389136449583582 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6366559485530546, + "acc_stderr": 0.027316847674192717, + "acc_norm": 0.6366559485530546, + "acc_norm_stderr": 0.027316847674192717 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6322869955156951, + "acc_stderr": 0.03236198350928276, + "acc_norm": 0.6322869955156951, + "acc_norm_stderr": 0.03236198350928276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.043171711948702556, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.043171711948702556 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.031156269519646847, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.031156269519646847 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062946, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062946 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.031041941304059288, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.031041941304059288 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.617948717948718, + "acc_stderr": 0.024635549163908237, + "acc_norm": 0.617948717948718, + "acc_norm_stderr": 0.024635549163908237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301811, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301811 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6258064516129033, + "acc_stderr": 0.0275289042998457, + "acc_norm": 0.6258064516129033, + "acc_norm_stderr": 0.0275289042998457 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.02514093595033543, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.02514093595033543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5811320754716981, + "acc_stderr": 0.030365050829115208, + "acc_norm": 0.5811320754716981, + "acc_norm_stderr": 0.030365050829115208 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465073, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465073 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555403, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555403 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4470899470899471, + "acc_stderr": 0.025606723995777025, + "acc_norm": 0.4470899470899471, + "acc_norm_stderr": 0.025606723995777025 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.625, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.625, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6040462427745664, + "acc_stderr": 0.02632981334194625, + "acc_norm": 0.6040462427745664, + "acc_norm_stderr": 0.02632981334194625 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5828220858895705, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.5828220858895705, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6604938271604939, + "acc_stderr": 0.026348564412011624, + "acc_norm": 0.6604938271604939, + "acc_norm_stderr": 0.026348564412011624 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.03074890536390989, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.03074890536390989 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7357798165137615, + "acc_stderr": 0.018904164171510203, + "acc_norm": 0.7357798165137615, + "acc_norm_stderr": 0.018904164171510203 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6143790849673203, + "acc_stderr": 0.027870745278290286, + "acc_norm": 0.6143790849673203, + "acc_norm_stderr": 0.027870745278290286 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.03984979653302872, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.03984979653302872 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6447368421052632, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.6447368421052632, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5866013071895425, + "acc_stderr": 0.019922115682786692, + "acc_norm": 0.5866013071895425, + "acc_norm_stderr": 0.019922115682786692 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.42907801418439717, + "acc_stderr": 0.029525914302558562, + "acc_norm": 0.42907801418439717, + "acc_norm_stderr": 0.029525914302558562 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.034086558679777494, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.034086558679777494 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3407821229050279, + "acc_stderr": 0.015852002449862096, + "acc_norm": 0.3407821229050279, + "acc_norm_stderr": 0.015852002449862096 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5698529411764706, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.5698529411764706, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6857142857142857, + "acc_stderr": 0.029719329422417458, + "acc_norm": 0.6857142857142857, + "acc_norm_stderr": 0.029719329422417458 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7172995780590717, + "acc_stderr": 0.029312814153955924, + "acc_norm": 0.7172995780590717, + "acc_norm_stderr": 0.029312814153955924 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44654498044328556, + "acc_stderr": 0.012697046024399663, + "acc_norm": 0.44654498044328556, + "acc_norm_stderr": 0.012697046024399663 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.033086111132364364, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.033086111132364364 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7662178702570379, + "mc1_stderr": 0.014816195991931588, + "mc2": 0.8288347273189988, + "mc2_stderr": 0.012937811970925488 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5053128689492326, + "acc_stderr": 0.01718938362722971, + "acc_norm": 0.5182998819362455, + "acc_norm_stderr": 0.017178836639177762 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MoaData/Myrrh_solar_10.7b_2.0", + "model_sha": "667846461bf5cc07d0b4ea084c1614eb8b02e2dd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MoaData/Myrrh_solar_10.7b_2.0_sft/result_2024-04-22 02:02:28.json b/MoaData/Myrrh_solar_10.7b_2.0_sft/result_2024-04-22 02:02:28.json new file mode 100644 index 0000000000000000000000000000000000000000..0fa4496c12ab4a10d6b34bde77381e619f410d25 --- /dev/null +++ b/MoaData/Myrrh_solar_10.7b_2.0_sft/result_2024-04-22 02:02:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7457337883959044, + "acc_stderr": 0.012724999945157744, + "acc_norm": 0.7832764505119454, + "acc_norm_stderr": 0.012040156713481189 + }, + "harness|ko_hellaswag|10": { + "acc": 0.702549292969528, + "acc_stderr": 0.004562022467161899, + "acc_norm": 0.8051185022903804, + "acc_norm_stderr": 0.003952999181084511 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7134502923976608, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.7134502923976608, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7572815533980582, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.7572815533980582, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6934865900383141, + "acc_stderr": 0.01648695289304151, + "acc_norm": 0.6934865900383141, + "acc_norm_stderr": 0.01648695289304151 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936338, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936338 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6430868167202572, + "acc_stderr": 0.027210420375934026, + "acc_norm": 0.6430868167202572, + "acc_norm_stderr": 0.027210420375934026 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6367713004484304, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.6367713004484304, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.031156269519646847, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.031156269519646847 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5655172413793104, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.5655172413793104, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062946, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062946 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.617948717948718, + "acc_stderr": 0.024635549163908237, + "acc_norm": 0.617948717948718, + "acc_norm_stderr": 0.024635549163908237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301811, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301811 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6290322580645161, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.6290322580645161, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8247863247863247, + "acc_stderr": 0.024904439098918214, + "acc_norm": 0.8247863247863247, + "acc_norm_stderr": 0.024904439098918214 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5811320754716981, + "acc_stderr": 0.030365050829115208, + "acc_norm": 0.5811320754716981, + "acc_norm_stderr": 0.030365050829115208 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.029318203645206865, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.029318203645206865 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555403, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555403 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.025591857761382182, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.025591857761382182 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.625, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.625, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6040462427745664, + "acc_stderr": 0.02632981334194625, + "acc_norm": 0.6040462427745664, + "acc_norm_stderr": 0.02632981334194625 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334384, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334384 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6635802469135802, + "acc_stderr": 0.02628973494595293, + "acc_norm": 0.6635802469135802, + "acc_norm_stderr": 0.02628973494595293 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7668393782383419, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.7668393782383419, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7357798165137615, + "acc_stderr": 0.018904164171510203, + "acc_norm": 0.7357798165137615, + "acc_norm_stderr": 0.018904164171510203 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6143790849673203, + "acc_stderr": 0.027870745278290286, + "acc_norm": 0.6143790849673203, + "acc_norm_stderr": 0.027870745278290286 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6513157894736842, + "acc_stderr": 0.03878139888797611, + "acc_norm": 0.6513157894736842, + "acc_norm_stderr": 0.03878139888797611 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5849673202614379, + "acc_stderr": 0.01993362777685743, + "acc_norm": 0.5849673202614379, + "acc_norm_stderr": 0.01993362777685743 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.02946218923337059, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.02946218923337059 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.034086558679777494, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.034086558679777494 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.34413407821229053, + "acc_stderr": 0.015889221313307094, + "acc_norm": 0.34413407821229053, + "acc_norm_stderr": 0.015889221313307094 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.030042615832714874, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.030042615832714874 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6857142857142857, + "acc_stderr": 0.029719329422417458, + "acc_norm": 0.6857142857142857, + "acc_norm_stderr": 0.029719329422417458 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7172995780590717, + "acc_stderr": 0.029312814153955924, + "acc_norm": 0.7172995780590717, + "acc_norm_stderr": 0.029312814153955924 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4491525423728814, + "acc_stderr": 0.012704030518851474, + "acc_norm": 0.4491525423728814, + "acc_norm_stderr": 0.012704030518851474 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.03646204963253813, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.03646204963253813 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.762545899632803, + "mc1_stderr": 0.014896277441041824, + "mc2": 0.8293269397357657, + "mc2_stderr": 0.012913501876887029 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5041322314049587, + "acc_stderr": 0.017189767032130817, + "acc_norm": 0.5171192443919717, + "acc_norm_stderr": 0.017180275246085633 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MoaData/Myrrh_solar_10.7b_2.0_sft", + "model_sha": "a841cad97f5a4d9fb266ad6f2542a874e1720e95", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MoaData/Myrrh_solar_10.7b_3.0/result_2024-04-26 01:04:18.json b/MoaData/Myrrh_solar_10.7b_3.0/result_2024-04-26 01:04:18.json new file mode 100644 index 0000000000000000000000000000000000000000..8b7d24090ae509942fa725be8a06c2f26b6bc866 --- /dev/null +++ b/MoaData/Myrrh_solar_10.7b_3.0/result_2024-04-26 01:04:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7517064846416383, + "acc_stderr": 0.012624912868089753, + "acc_norm": 0.7832764505119454, + "acc_norm_stderr": 0.012040156713481189 + }, + "harness|ko_hellaswag|10": { + "acc": 0.7186815375423222, + "acc_stderr": 0.0044872356579556925, + "acc_norm": 0.8103963353913562, + "acc_norm_stderr": 0.003911862797736166 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6842105263157895, + "acc_stderr": 0.03565079670708311, + "acc_norm": 0.6842105263157895, + "acc_norm_stderr": 0.03565079670708311 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7766990291262136, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.7766990291262136, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6934865900383141, + "acc_stderr": 0.01648695289304151, + "acc_norm": 0.6934865900383141, + "acc_norm_stderr": 0.01648695289304151 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.502127659574468, + "acc_stderr": 0.03268572658667493, + "acc_norm": 0.502127659574468, + "acc_norm_stderr": 0.03268572658667493 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6495176848874598, + "acc_stderr": 0.027098652621301744, + "acc_norm": 0.6495176848874598, + "acc_norm_stderr": 0.027098652621301744 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6547085201793722, + "acc_stderr": 0.03191100192835795, + "acc_norm": 0.6547085201793722, + "acc_norm_stderr": 0.03191100192835795 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.03154449888270286, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.03154449888270286 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6307692307692307, + "acc_stderr": 0.024468615241478923, + "acc_norm": 0.6307692307692307, + "acc_norm_stderr": 0.024468615241478923 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6290322580645161, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.6290322580645161, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.025372139671722933, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.025372139671722933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5924528301886792, + "acc_stderr": 0.030242233800854498, + "acc_norm": 0.5924528301886792, + "acc_norm_stderr": 0.030242233800854498 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857403, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857403 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555403, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555403 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.0255428468174005, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.0255428468174005 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.02626167760780665, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.02626167760780665 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334384, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334384 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6697530864197531, + "acc_stderr": 0.026168298456732846, + "acc_norm": 0.6697530864197531, + "acc_norm_stderr": 0.026168298456732846 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7229357798165138, + "acc_stderr": 0.01918848259016954, + "acc_norm": 0.7229357798165138, + "acc_norm_stderr": 0.01918848259016954 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.027914055510467998, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.027914055510467998 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.768595041322314, + "acc_stderr": 0.03849856098794088, + "acc_norm": 0.768595041322314, + "acc_norm_stderr": 0.03849856098794088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849725, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5915032679738562, + "acc_stderr": 0.019886221037501865, + "acc_norm": 0.5915032679738562, + "acc_norm_stderr": 0.019886221037501865 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4432624113475177, + "acc_stderr": 0.029634838473766006, + "acc_norm": 0.4432624113475177, + "acc_norm_stderr": 0.029634838473766006 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3553072625698324, + "acc_stderr": 0.01600698993480319, + "acc_norm": 0.3553072625698324, + "acc_norm_stderr": 0.01600698993480319 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5183823529411765, + "acc_stderr": 0.03035230339535196, + "acc_norm": 0.5183823529411765, + "acc_norm_stderr": 0.03035230339535196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6408163265306123, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.6408163265306123, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7172995780590717, + "acc_stderr": 0.029312814153955924, + "acc_norm": 0.7172995780590717, + "acc_norm_stderr": 0.029312814153955924 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4439374185136897, + "acc_stderr": 0.01268970816778768, + "acc_norm": 0.4439374185136897, + "acc_norm_stderr": 0.01268970816778768 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.0332057461294543, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.0332057461294543 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7613219094247246, + "mc1_stderr": 0.014922629695456416, + "mc2": 0.8241161425829271, + "mc2_stderr": 0.013033693513794863 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.512396694214876, + "acc_stderr": 0.01718506973267654, + "acc_norm": 0.5407319952774499, + "acc_norm_stderr": 0.017133218276537673 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MoaData/Myrrh_solar_10.7b_3.0", + "model_sha": "2d083f69216152d5cd7cf0af44e0cfc9d118e2ce", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MrBananaHuman/kogpt2_small/result_2023-10-31 22:00:17.json b/MrBananaHuman/kogpt2_small/result_2023-10-31 22:00:17.json new file mode 100644 index 0000000000000000000000000000000000000000..dbf46a7089e5d61f632e51d438b6477b8bc3084a --- /dev/null +++ b/MrBananaHuman/kogpt2_small/result_2023-10-31 22:00:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20136518771331058, + "acc_stderr": 0.011718927477444262, + "acc_norm": 0.23976109215017063, + "acc_norm_stderr": 0.012476304127453949 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2751443935471022, + "acc_stderr": 0.004456743108170736, + "acc_norm": 0.2885879306910974, + "acc_norm_stderr": 0.00452179857792214 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.01588988836256049, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.01588988836256049 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.0402477840197711, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.0402477840197711 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.19574468085106383, + "acc_stderr": 0.025937853139977148, + "acc_norm": 0.19574468085106383, + "acc_norm_stderr": 0.025937853139977148 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944967, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944967 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.029376616484945633, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.029376616484945633 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.02835962087053395, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.02835962087053395 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21025641025641026, + "acc_stderr": 0.020660597485026928, + "acc_norm": 0.21025641025641026, + "acc_norm_stderr": 0.020660597485026928 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042767, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042767 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.02560423347089911, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.02560423347089911 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959323, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959323 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.02992941540834839, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.02992941540834839 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641144, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641144 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615625, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615625 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22839506172839505, + "acc_stderr": 0.023358211840626267, + "acc_norm": 0.22839506172839505, + "acc_norm_stderr": 0.023358211840626267 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147602, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147602 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.01812566918086149, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.01812566918086149 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.21487603305785125, + "acc_stderr": 0.03749492448709695, + "acc_norm": 0.21487603305785125, + "acc_norm_stderr": 0.03749492448709695 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.016729937565537537, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.016729937565537537 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2198581560283688, + "acc_stderr": 0.024706141070705477, + "acc_norm": 0.2198581560283688, + "acc_norm_stderr": 0.024706141070705477 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.02752963744017493, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.02752963744017493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23533246414602346, + "acc_stderr": 0.010834432543912231, + "acc_norm": 0.23533246414602346, + "acc_norm_stderr": 0.010834432543912231 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253602, + "mc2": 0.4869463974456989, + "mc2_stderr": 0.015810411656532714 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3412042502951594, + "acc_stderr": 0.016300368742137313, + "acc_norm": 0.4982290436835891, + "acc_norm_stderr": 0.01719024627623187 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MrBananaHuman/kogpt2_small", + "model_sha": "3fb81f245efeec7837bc92af2b9ffaa5d1157a48", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/MrBananaHuman/polyglot-ko-5.8b/result_2023-11-06 09:18:33.json b/MrBananaHuman/polyglot-ko-5.8b/result_2023-11-06 09:18:33.json new file mode 100644 index 0000000000000000000000000000000000000000..7291f3ddda9eb20289b3dd196ef37ba8dea5a407 --- /dev/null +++ b/MrBananaHuman/polyglot-ko-5.8b/result_2023-11-06 09:18:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2986348122866894, + "acc_stderr": 0.013374078615068754, + "acc_norm": 0.34982935153583616, + "acc_norm_stderr": 0.013936809212158296 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39294961163114916, + "acc_stderr": 0.004874076250521577, + "acc_norm": 0.4985062736506672, + "acc_norm_stderr": 0.004989759144812286 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0330140594698725, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0330140594698725 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26053639846743293, + "acc_stderr": 0.015696008563807096, + "acc_norm": 0.26053639846743293, + "acc_norm_stderr": 0.015696008563807096 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.028659179374292326, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.028659179374292326 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.03329394119073529, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.03329394119073529 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.21864951768488747, + "acc_stderr": 0.02347558141786111, + "acc_norm": 0.21864951768488747, + "acc_norm_stderr": 0.02347558141786111 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2556053811659193, + "acc_stderr": 0.029275891003969927, + "acc_norm": 0.2556053811659193, + "acc_norm_stderr": 0.029275891003969927 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.1984732824427481, + "acc_stderr": 0.0349814938546247, + "acc_norm": 0.1984732824427481, + "acc_norm_stderr": 0.0349814938546247 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.0307463007421245, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.0307463007421245 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135303, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135303 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.02835962087053395, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.02835962087053395 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20512820512820512, + "acc_stderr": 0.020473233173551986, + "acc_norm": 0.20512820512820512, + "acc_norm_stderr": 0.020473233173551986 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.029678333141444455, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.029678333141444455 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23548387096774193, + "acc_stderr": 0.02413763242933771, + "acc_norm": 0.23548387096774193, + "acc_norm_stderr": 0.02413763242933771 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.02795182680892433, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.02795182680892433 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3283018867924528, + "acc_stderr": 0.02890159361241178, + "acc_norm": 0.3283018867924528, + "acc_norm_stderr": 0.02890159361241178 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.02549753263960954, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.02549753263960954 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014652, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014652 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047875, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047875 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.02193587808118476, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.02193587808118476 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.023083658586984204, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.023083658586984204 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2085889570552147, + "acc_stderr": 0.03192193448934725, + "acc_norm": 0.2085889570552147, + "acc_norm_stderr": 0.03192193448934725 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.02357688174400572, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.02357688174400572 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22279792746113988, + "acc_stderr": 0.030031147977641545, + "acc_norm": 0.22279792746113988, + "acc_norm_stderr": 0.030031147977641545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.017923087667803053, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.017923087667803053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604672, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604672 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.023152722439402303, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.023152722439402303 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.23140495867768596, + "acc_stderr": 0.03849856098794088, + "acc_norm": 0.23140495867768596, + "acc_norm_stderr": 0.03849856098794088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.01774089950917779, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.01774089950917779 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290392, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290392 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26927374301675977, + "acc_stderr": 0.014835616582882603, + "acc_norm": 0.26927374301675977, + "acc_norm_stderr": 0.014835616582882603 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2977941176470588, + "acc_stderr": 0.02777829870154544, + "acc_norm": 0.2977941176470588, + "acc_norm_stderr": 0.02777829870154544 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.027212835884073142, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.027212835884073142 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842538, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842538 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24315514993481094, + "acc_stderr": 0.010956556654417355, + "acc_norm": 0.24315514993481094, + "acc_norm_stderr": 0.010956556654417355 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.03192271569548299, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.03192271569548299 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522512, + "mc2": 0.4146123621762204, + "mc2_stderr": 0.015628722266635826 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27390791027154665, + "acc_stderr": 0.01533249947479102, + "acc_norm": 0.29988193624557263, + "acc_norm_stderr": 0.01575344761542946 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "MrBananaHuman/polyglot-ko-5.8b", + "model_sha": "05f8800a617b483b1c502d8c965ff434e63d387c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/NLPlab-skku/42dot_v1/result_2024-06-28 05:05:12.json b/NLPlab-skku/42dot_v1/result_2024-06-28 05:05:12.json new file mode 100644 index 0000000000000000000000000000000000000000..2366574c734bdab051a422c6c072b20f6db8c009 --- /dev/null +++ b/NLPlab-skku/42dot_v1/result_2024-06-28 05:05:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3037542662116041, + "acc_stderr": 0.013438909184778747, + "acc_norm": 0.34215017064846415, + "acc_norm_stderr": 0.013864152159177278 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36247759410476, + "acc_stderr": 0.004797332565990083, + "acc_norm": 0.45120493925512845, + "acc_norm_stderr": 0.004965963647210318 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21637426900584794, + "acc_stderr": 0.03158149539338733, + "acc_norm": 0.21637426900584794, + "acc_norm_stderr": 0.03158149539338733 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2886334610472541, + "acc_stderr": 0.016203792703197793, + "acc_norm": 0.2886334610472541, + "acc_norm_stderr": 0.016203792703197793 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.028957342788342343, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.028957342788342343 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24758842443729903, + "acc_stderr": 0.024513879973621967, + "acc_norm": 0.24758842443729903, + "acc_norm_stderr": 0.024513879973621967 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.0324430528300873, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.0324430528300873 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.20610687022900764, + "acc_stderr": 0.03547771004159463, + "acc_norm": 0.20610687022900764, + "acc_norm_stderr": 0.03547771004159463 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.033586181457325226, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.033586181457325226 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.1793103448275862, + "acc_stderr": 0.03196766433373187, + "acc_norm": 0.1793103448275862, + "acc_norm_stderr": 0.03196766433373187 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31092436974789917, + "acc_stderr": 0.030066761582977945, + "acc_norm": 0.31092436974789917, + "acc_norm_stderr": 0.030066761582977945 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3230769230769231, + "acc_stderr": 0.02371088850197057, + "acc_norm": 0.3230769230769231, + "acc_norm_stderr": 0.02371088850197057 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.1921182266009852, + "acc_stderr": 0.027719315709614768, + "acc_norm": 0.1921182266009852, + "acc_norm_stderr": 0.027719315709614768 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27741935483870966, + "acc_stderr": 0.02547019683590005, + "acc_norm": 0.27741935483870966, + "acc_norm_stderr": 0.02547019683590005 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.02704685763071668, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.02704685763071668 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072774, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072774 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.035676037996391706, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.035676037996391706 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.22832369942196531, + "acc_stderr": 0.022598703804321624, + "acc_norm": 0.22832369942196531, + "acc_norm_stderr": 0.022598703804321624 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.024922001168886335, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.024922001168886335 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3626943005181347, + "acc_stderr": 0.034697137917043715, + "acc_norm": 0.3626943005181347, + "acc_norm_stderr": 0.034697137917043715 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3174311926605505, + "acc_stderr": 0.0199571521984605, + "acc_norm": 0.3174311926605505, + "acc_norm_stderr": 0.0199571521984605 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020514, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020514 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.024051029739912258, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.024051029739912258 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036843, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036843 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2892561983471074, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.20394736842105263, + "acc_stderr": 0.03279000406310049, + "acc_norm": 0.20394736842105263, + "acc_norm_stderr": 0.03279000406310049 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902013, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902013 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767105, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2911392405063291, + "acc_stderr": 0.029571601065753374, + "acc_norm": 0.2911392405063291, + "acc_norm_stderr": 0.029571601065753374 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.010986307870045514, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.010986307870045514 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350194, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350194 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087293, + "mc2": 0.41795085174852425, + "mc2_stderr": 0.01543828023741821 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.23376623376623376, + "acc_stderr": 0.014550782587103133, + "acc_norm": 0.29515938606847697, + "acc_norm_stderr": 0.015681535229192203 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "NLPlab-skku/42dot_v1", + "model_sha": "366036f85f0d1ce26f866dc5bacb7ed0e805a837", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Nara-Lab/nallm-bart/result_2023-09-27 06:09:37.json b/Nara-Lab/nallm-bart/result_2023-09-27 06:09:37.json new file mode 100644 index 0000000000000000000000000000000000000000..19b0755feab761f48a37dee61246012bb98ce9d3 --- /dev/null +++ b/Nara-Lab/nallm-bart/result_2023-09-27 06:09:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20733788395904437, + "acc_stderr": 0.011846905782971363, + "acc_norm": 0.2593856655290102, + "acc_norm_stderr": 0.012808273573927088 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25124477195777734, + "acc_stderr": 0.00432842570099869, + "acc_norm": 0.2526389165504879, + "acc_norm_stderr": 0.004336375492801796 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3054662379421222, + "acc_stderr": 0.026160584450140474, + "acc_norm": 0.3054662379421222, + "acc_norm_stderr": 0.026160584450140474 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.11659192825112108, + "acc_stderr": 0.021539639816244467, + "acc_norm": 0.11659192825112108, + "acc_norm_stderr": 0.021539639816244467 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036625, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036625 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124505, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124505 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.02851025151234192, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.02851025151234192 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.024121125416941183, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.024121125416941183 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.02528839450289136, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.02528839450289136 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.0222896388526179, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.0222896388526179 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.02525117393649501, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.02525117393649501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.344954128440367, + "acc_stderr": 0.02038060540506697, + "acc_norm": 0.344954128440367, + "acc_norm_stderr": 0.02038060540506697 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.1487603305785124, + "acc_stderr": 0.03248470083807195, + "acc_norm": 0.1487603305785124, + "acc_norm_stderr": 0.03248470083807195 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.017952449196987866, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.017952449196987866 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.026244920349843007, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.026244920349843007 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755808, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755808 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.026537045312145277, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.026537045312145277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.28270042194092826, + "acc_stderr": 0.02931281415395592, + "acc_norm": 0.28270042194092826, + "acc_norm_stderr": 0.02931281415395592 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.011005971399927246, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.011005971399927246 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604243, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604243 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.03192271569548299, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.03192271569548299 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707684, + "mc2": 0.5034077613881154, + "mc2_stderr": 0.016935414149113398 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.12750885478158205, + "acc_stderr": 0.011467414350410923, + "acc_norm": 0.42384887839433294, + "acc_norm_stderr": 0.01698981083462824 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Nara-Lab/nallm-bart", + "model_sha": "a3a334adbae67f890b4186b5ce5aa4c5d7fbceaf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Nara-Lab/nallm-polyglot-ko-1.3b-base/result_2023-09-27 06:09:53.json b/Nara-Lab/nallm-polyglot-ko-1.3b-base/result_2023-09-27 06:09:53.json new file mode 100644 index 0000000000000000000000000000000000000000..146feda9a29fe40c313e7cdd3f4b64ceeb72fa04 --- /dev/null +++ b/Nara-Lab/nallm-polyglot-ko-1.3b-base/result_2023-09-27 06:09:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25170648464163825, + "acc_stderr": 0.01268249633404297, + "acc_norm": 0.3054607508532423, + "acc_norm_stderr": 0.013460080478002496 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3325034853614818, + "acc_stderr": 0.004701474865207019, + "acc_norm": 0.4032065325632344, + "acc_norm_stderr": 0.004895390341445625 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.22860791826309068, + "acc_stderr": 0.015016884698539873, + "acc_norm": 0.22860791826309068, + "acc_norm_stderr": 0.015016884698539873 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2170212765957447, + "acc_stderr": 0.026947483121496238, + "acc_norm": 0.2170212765957447, + "acc_norm_stderr": 0.026947483121496238 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2765273311897106, + "acc_stderr": 0.025403832978179622, + "acc_norm": 0.2765273311897106, + "acc_norm_stderr": 0.025403832978179622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168264, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.04010358942462203, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.04010358942462203 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365914, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365914 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149354, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149354 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868963, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868963 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.021444547301560476, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.021444547301560476 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.030315099285617722, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.030315099285617722 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.025736542745594528, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.025736542745594528 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.029872577708891162, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.029872577708891162 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.02648035717989569, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.02648035717989569 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072775, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230165, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230165 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.29850746268656714, + "acc_stderr": 0.032357437893550424, + "acc_norm": 0.29850746268656714, + "acc_norm_stderr": 0.032357437893550424 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918417, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.024659685185967277, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.024659685185967277 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26055045871559634, + "acc_stderr": 0.01881918203485007, + "acc_norm": 0.26055045871559634, + "acc_norm_stderr": 0.01881918203485007 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.038932596106046734, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.038932596106046734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.238562091503268, + "acc_stderr": 0.024404394928087873, + "acc_norm": 0.238562091503268, + "acc_norm_stderr": 0.024404394928087873 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.03896878985070417, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.03896878985070417 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.20394736842105263, + "acc_stderr": 0.03279000406310049, + "acc_norm": 0.20394736842105263, + "acc_norm_stderr": 0.03279000406310049 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.017740899509177795, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.017740899509177795 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290396, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176849, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176849 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210756, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210756 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.13, + "acc_stderr": 0.03379976689896309, + "acc_norm": 0.13, + "acc_norm_stderr": 0.03379976689896309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.02922719246003203, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.02922719246003203 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.02737294220178817, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.02737294220178817 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23663624511082137, + "acc_stderr": 0.010855137351572747, + "acc_norm": 0.23663624511082137, + "acc_norm_stderr": 0.010855137351572747 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02933116229425173, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02933116229425173 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.03192271569548297, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.03192271569548297 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862685, + "mc2": 0.4101668259727761, + "mc2_stderr": 0.01554453474117709 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26210153482880755, + "acc_stderr": 0.015119864670254154, + "acc_norm": 0.29043683589138136, + "acc_norm_stderr": 0.015607602569814628 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Nara-Lab/nallm-polyglot-ko-1.3b-base", + "model_sha": "8fd7fa9b1b5bbe857f65576e2e37bd600e10ce8c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Nara-Lab/nallm-polyglot-ko-3.8b-base/result_2023-09-27 06:10:01.json b/Nara-Lab/nallm-polyglot-ko-3.8b-base/result_2023-09-27 06:10:01.json new file mode 100644 index 0000000000000000000000000000000000000000..402bee4be306875594dce70d42ee7d6f35b3e515 --- /dev/null +++ b/Nara-Lab/nallm-polyglot-ko-3.8b-base/result_2023-09-27 06:10:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26109215017064846, + "acc_stderr": 0.012835523909473864, + "acc_norm": 0.32337883959044367, + "acc_norm_stderr": 0.013669421630012123 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3641704839673372, + "acc_stderr": 0.004802133511654235, + "acc_norm": 0.45727942640908187, + "acc_norm_stderr": 0.004971534874389935 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21637426900584794, + "acc_stderr": 0.031581495393387345, + "acc_norm": 0.21637426900584794, + "acc_norm_stderr": 0.031581495393387345 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690876, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690876 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2388250319284802, + "acc_stderr": 0.015246803197398687, + "acc_norm": 0.2388250319284802, + "acc_norm_stderr": 0.015246803197398687 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800254, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800254 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.028504856470514196, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.028504856470514196 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288086, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288086 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.29260450160771706, + "acc_stderr": 0.025839898334877983, + "acc_norm": 0.29260450160771706, + "acc_norm_stderr": 0.025839898334877983 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21973094170403587, + "acc_stderr": 0.0277901770643836, + "acc_norm": 0.21973094170403587, + "acc_norm_stderr": 0.0277901770643836 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.20610687022900764, + "acc_stderr": 0.03547771004159463, + "acc_norm": 0.20610687022900764, + "acc_norm_stderr": 0.03547771004159463 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.029857515673386414, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.029857515673386414 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149351, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149351 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.027025433498882367, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.027025433498882367 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24102564102564103, + "acc_stderr": 0.021685546665333184, + "acc_norm": 0.24102564102564103, + "acc_norm_stderr": 0.021685546665333184 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678245, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678245 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.025560604721022884, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.025560604721022884 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.27350427350427353, + "acc_stderr": 0.029202540153431194, + "acc_norm": 0.27350427350427353, + "acc_norm_stderr": 0.029202540153431194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.0270087660907081, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.0270087660907081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.040693063197213775, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.040693063197213775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.03445406271987054, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.03445406271987054 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.029705284056772436, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.029705284056772436 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.023445826276545546, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.023445826276545546 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.0220213661002202, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.0220213661002202 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700293, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700293 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.01792308766780305, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.01792308766780305 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928724, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928724 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.023929155517351284, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.023929155517351284 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.19834710743801653, + "acc_stderr": 0.03640118271990945, + "acc_norm": 0.19834710743801653, + "acc_norm_stderr": 0.03640118271990945 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312337, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312337 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.01716058723504635, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.01716058723504635 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.20212765957446807, + "acc_stderr": 0.02395666823785024, + "acc_norm": 0.20212765957446807, + "acc_norm_stderr": 0.02395666823785024 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22569832402234638, + "acc_stderr": 0.013981395058455059, + "acc_norm": 0.22569832402234638, + "acc_norm_stderr": 0.013981395058455059 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.25735294117647056, + "acc_stderr": 0.026556519470041513, + "acc_norm": 0.25735294117647056, + "acc_norm_stderr": 0.026556519470041513 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.027212835884073167, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.027212835884073167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842534, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842534 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2438070404172099, + "acc_stderr": 0.010966507972178475, + "acc_norm": 0.2438070404172099, + "acc_norm_stderr": 0.010966507972178475 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.015000674373570338, + "mc2": 0.4144742012895836, + "mc2_stderr": 0.015299571868403075 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27508854781582054, + "acc_stderr": 0.01535301075795265, + "acc_norm": 0.3105076741440378, + "acc_norm_stderr": 0.015908004528762017 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Nara-Lab/nallm-polyglot-ko-3.8b-base", + "model_sha": "8d20c1e3d77f2a9a58046b58fb229c809476d350", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/NotAiLOL/Yi-1.5-dolphin-9B/result_2024-05-17 20:47:22.json b/NotAiLOL/Yi-1.5-dolphin-9B/result_2024-05-17 20:47:22.json new file mode 100644 index 0000000000000000000000000000000000000000..ed0b78ea88953d352c87b204b5a079cdf7f1c469 --- /dev/null +++ b/NotAiLOL/Yi-1.5-dolphin-9B/result_2024-05-17 20:47:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28071672354948807, + "acc_stderr": 0.013131238126975584, + "acc_norm": 0.3225255972696246, + "acc_norm_stderr": 0.013659980894277368 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32463652658832903, + "acc_stderr": 0.004672819355838543, + "acc_norm": 0.3906592312288389, + "acc_norm_stderr": 0.004869010152280748 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.04931801994220414, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.04931801994220414 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4367816091954023, + "acc_stderr": 0.01773647083780067, + "acc_norm": 0.4367816091954023, + "acc_norm_stderr": 0.01773647083780067 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996793, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996793 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.028043399858210638, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.028043399858210638 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643945, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643945 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938145, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938145 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019413, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019413 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009805, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009805 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.02967090612463088, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.02967090612463088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.47619047619047616, + "acc_stderr": 0.025722097064388535, + "acc_norm": 0.47619047619047616, + "acc_norm_stderr": 0.025722097064388535 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.026864624366756643, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.026864624366756643 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384486, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384486 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41834862385321103, + "acc_stderr": 0.021149548596443878, + "acc_norm": 0.41834862385321103, + "acc_norm_stderr": 0.021149548596443878 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366684, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366684 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.01929196189506637, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.01929196189506637 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590957, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.033448873829978666, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.033448873829978666 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29832402234636873, + "acc_stderr": 0.015301840045129278, + "acc_norm": 0.29832402234636873, + "acc_norm_stderr": 0.015301840045129278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.027146271936625166, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.027146271936625166 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45569620253164556, + "acc_stderr": 0.03241920684693335, + "acc_norm": 0.45569620253164556, + "acc_norm_stderr": 0.03241920684693335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3324641460234681, + "acc_stderr": 0.012032022332260523, + "acc_norm": 0.3324641460234681, + "acc_norm_stderr": 0.012032022332260523 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.034107853389047184, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.034107853389047184 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3292533659730722, + "mc1_stderr": 0.016451264440068235, + "mc2": 0.5048923529042146, + "mc2_stderr": 0.015836773404828836 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.512396694214876, + "acc_stderr": 0.017185069732676538, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.01712282914329265 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "NotAiLOL/Yi-1.5-dolphin-9B", + "model_sha": "75c21182bbdeb222b4fe6a25aafbc08293b47f18", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/NotoriousH2/42dot_1.3B_notolab/result_2024-01-03 18:01:56.json b/NotoriousH2/42dot_1.3B_notolab/result_2024-01-03 18:01:56.json new file mode 100644 index 0000000000000000000000000000000000000000..fa923d00d5e764cc5321d8fda50364fa617d60fa --- /dev/null +++ b/NotoriousH2/42dot_1.3B_notolab/result_2024-01-03 18:01:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21843003412969283, + "acc_stderr": 0.012074291605700985, + "acc_norm": 0.26023890784982934, + "acc_norm_stderr": 0.012821930225112566 + }, + "harness|ko_hellaswag|10": { + "acc": 0.293168691495718, + "acc_stderr": 0.004542847935606014, + "acc_norm": 0.32792272455686117, + "acc_norm_stderr": 0.004684970696902951 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.034240429246915824, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.034240429246915824 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.04453254836326467, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.04453254836326467 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21839080459770116, + "acc_stderr": 0.014774358319934486, + "acc_norm": 0.21839080459770116, + "acc_norm_stderr": 0.014774358319934486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066655, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066655 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036843, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036843 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23829787234042554, + "acc_stderr": 0.027851252973889802, + "acc_norm": 0.23829787234042554, + "acc_norm_stderr": 0.027851252973889802 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2604501607717042, + "acc_stderr": 0.02492672322484554, + "acc_norm": 0.2604501607717042, + "acc_norm_stderr": 0.02492672322484554 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19730941704035873, + "acc_stderr": 0.02670985334496796, + "acc_norm": 0.19730941704035873, + "acc_norm_stderr": 0.02670985334496796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22137404580152673, + "acc_stderr": 0.0364129708131373, + "acc_norm": 0.22137404580152673, + "acc_norm_stderr": 0.0364129708131373 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.030088629490217487, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.030088629490217487 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2689655172413793, + "acc_stderr": 0.03695183311650232, + "acc_norm": 0.2689655172413793, + "acc_norm_stderr": 0.03695183311650232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416542, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416542 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31092436974789917, + "acc_stderr": 0.030066761582977938, + "acc_norm": 0.31092436974789917, + "acc_norm_stderr": 0.030066761582977938 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35128205128205126, + "acc_stderr": 0.024203665177902792, + "acc_norm": 0.35128205128205126, + "acc_norm_stderr": 0.024203665177902792 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.0401910747255735, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.0401910747255735 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.03108982600293752, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.03108982600293752 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.025560604721022884, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.025560604721022884 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712156, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712156 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436777, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436777 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.03152439186555404, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.03152439186555404 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548574, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548574 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.023948512905468348, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.023948512905468348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.03559039531617342, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.03559039531617342 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.02465968518596728, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.02465968518596728 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2849740932642487, + "acc_stderr": 0.03257714077709661, + "acc_norm": 0.2849740932642487, + "acc_norm_stderr": 0.03257714077709661 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03835153954399419, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03835153954399419 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26422018348623855, + "acc_stderr": 0.018904164171510196, + "acc_norm": 0.26422018348623855, + "acc_norm_stderr": 0.018904164171510196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.14285714285714285, + "acc_stderr": 0.03129843185743809, + "acc_norm": 0.14285714285714285, + "acc_norm_stderr": 0.03129843185743809 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.024739981355113596, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.024739981355113596 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403165, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403165 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.017035229258034038, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.017035229258034038 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.024414612974307727, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.024414612974307727 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347018, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347018 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03141554629402548, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03141554629402548 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2, + "acc_stderr": 0.025607375986579153, + "acc_norm": 0.2, + "acc_norm_stderr": 0.025607375986579153 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842548, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842548 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24967405475880053, + "acc_stderr": 0.011054538377832327, + "acc_norm": 0.24967405475880053, + "acc_norm_stderr": 0.011054538377832327 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511784, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511784 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766373, + "mc2": 0.44855810538452157, + "mc2_stderr": 0.016424785826621612 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3022432113341204, + "acc_stderr": 0.015788654863022375, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.016689333596980115 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "NotoriousH2/42dot_1.3B_notolab", + "model_sha": "08bd5489239167a845e456bc7151746a82fb5ab3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/NousResearch/Nous-Capybara-7B/result_2023-09-29 13:26:34.json b/NousResearch/Nous-Capybara-7B/result_2023-09-29 13:26:34.json new file mode 100644 index 0000000000000000000000000000000000000000..4ece585f21b49bd1fb5c7220c504d79850cdf4a1 --- /dev/null +++ b/NousResearch/Nous-Capybara-7B/result_2023-09-29 13:26:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2790102389078498, + "acc_stderr": 0.013106784883601348, + "acc_norm": 0.318259385665529, + "acc_norm_stderr": 0.013611993916971453 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3414658434574786, + "acc_stderr": 0.0047323221721537485, + "acc_norm": 0.41884086835291773, + "acc_norm_stderr": 0.004923609207861538 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3933588761174968, + "acc_stderr": 0.017468556724503162, + "acc_norm": 0.3933588761174968, + "acc_norm_stderr": 0.017468556724503162 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357797, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357797 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.03571609230053481, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.03571609230053481 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3408360128617363, + "acc_stderr": 0.026920841260776162, + "acc_norm": 0.3408360128617363, + "acc_norm_stderr": 0.026920841260776162 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.37373737373737376, + "acc_stderr": 0.03446897738659333, + "acc_norm": 0.37373737373737376, + "acc_norm_stderr": 0.03446897738659333 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.03941707632064889, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.03941707632064889 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121633, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121633 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3871794871794872, + "acc_stderr": 0.024697216930878944, + "acc_norm": 0.3871794871794872, + "acc_norm_stderr": 0.024697216930878944 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04750077341199985, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04750077341199985 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358611, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358611 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3580645161290323, + "acc_stderr": 0.02727389059430064, + "acc_norm": 0.3580645161290323, + "acc_norm_stderr": 0.02727389059430064 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03255326307272486, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03255326307272486 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33584905660377357, + "acc_stderr": 0.029067220146644826, + "acc_norm": 0.33584905660377357, + "acc_norm_stderr": 0.029067220146644826 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230193, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230193 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473834, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473834 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.035344398485395806, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.035344398485395806 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36127167630057805, + "acc_stderr": 0.02586220185227789, + "acc_norm": 0.36127167630057805, + "acc_norm_stderr": 0.02586220185227789 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.027163686038271215, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.027163686038271215 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3688073394495413, + "acc_stderr": 0.020686227560729548, + "acc_norm": 0.3688073394495413, + "acc_norm_stderr": 0.020686227560729548 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.03878139888797611, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.03878139888797611 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32516339869281047, + "acc_stderr": 0.01895088677080631, + "acc_norm": 0.32516339869281047, + "acc_norm_stderr": 0.01895088677080631 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.030998666304560538, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.030998666304560538 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.02922719246003203, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.02922719246003203 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3673469387755102, + "acc_stderr": 0.030862144921087558, + "acc_norm": 0.3673469387755102, + "acc_norm_stderr": 0.030862144921087558 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.43037974683544306, + "acc_stderr": 0.03223017195937599, + "acc_norm": 0.43037974683544306, + "acc_norm_stderr": 0.03223017195937599 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26140808344198174, + "acc_stderr": 0.011222528169771316, + "acc_norm": 0.26140808344198174, + "acc_norm_stderr": 0.011222528169771316 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.03393388584958406, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.03393388584958406 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.038517163193983954, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.038517163193983954 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.4712238434154724, + "mc2_stderr": 0.016160223034293618 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24557260920897284, + "acc_stderr": 0.014798357154972828, + "acc_norm": 0.27390791027154665, + "acc_norm_stderr": 0.01533249947479102 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "NousResearch/Nous-Capybara-7B", + "model_sha": "f57199fd0189f2472ee567279e9fb292beeee343", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/NousResearch/Nous-Hermes-2-SOLAR-10.7B/result_2024-05-30 16:49:36.json b/NousResearch/Nous-Hermes-2-SOLAR-10.7B/result_2024-05-30 16:49:36.json new file mode 100644 index 0000000000000000000000000000000000000000..6b59bd9aa7f7a86b022aec5031d48daee234a205 --- /dev/null +++ b/NousResearch/Nous-Hermes-2-SOLAR-10.7B/result_2024-05-30 16:49:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42150170648464164, + "acc_stderr": 0.014430197069326018, + "acc_norm": 0.46928327645051193, + "acc_norm_stderr": 0.014583792546304037 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42710615415255926, + "acc_stderr": 0.004936470085238481, + "acc_norm": 0.563931487751444, + "acc_norm_stderr": 0.00494882450135549 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6023391812865497, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.6023391812865497, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6053639846743295, + "acc_stderr": 0.017478464305911556, + "acc_norm": 0.6053639846743295, + "acc_norm_stderr": 0.017478464305911556 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5498392282958199, + "acc_stderr": 0.028256660723360173, + "acc_norm": 0.5498392282958199, + "acc_norm_stderr": 0.028256660723360173 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5605381165919282, + "acc_stderr": 0.033310925110381785, + "acc_norm": 0.5605381165919282, + "acc_norm_stderr": 0.033310925110381785 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6616161616161617, + "acc_stderr": 0.033711241426263035, + "acc_norm": 0.6616161616161617, + "acc_norm_stderr": 0.033711241426263035 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5672268907563025, + "acc_stderr": 0.032183581077426124, + "acc_norm": 0.5672268907563025, + "acc_norm_stderr": 0.032183581077426124 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4948717948717949, + "acc_stderr": 0.025349672906838667, + "acc_norm": 0.4948717948717949, + "acc_norm_stderr": 0.025349672906838667 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356462, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356462 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5645161290322581, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.5645161290322581, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.026246772946890477, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.026246772946890477 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075657, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075657 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857406, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857406 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.032941184790540944, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.032941184790540944 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.02513809138885111, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.02513809138885111 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.569364161849711, + "acc_stderr": 0.026658800273672376, + "acc_norm": 0.569364161849711, + "acc_norm_stderr": 0.026658800273672376 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.027339546640662727, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.027339546640662727 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.03308818594415749, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.03308818594415749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594964, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594964 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6330275229357798, + "acc_stderr": 0.02066467565952053, + "acc_norm": 0.6330275229357798, + "acc_norm_stderr": 0.02066467565952053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.044359328928514664, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.044359328928514664 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591206, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591206 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.02022686271003946, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.02022686271003946 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.18435754189944134, + "acc_stderr": 0.012969152811883452, + "acc_norm": 0.18435754189944134, + "acc_norm_stderr": 0.012969152811883452 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7172995780590717, + "acc_stderr": 0.029312814153955924, + "acc_norm": 0.7172995780590717, + "acc_norm_stderr": 0.029312814153955924 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.40221642764015647, + "acc_stderr": 0.012523646856180178, + "acc_norm": 0.40221642764015647, + "acc_norm_stderr": 0.012523646856180178 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.03354092437591519, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.03354092437591519 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165633, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165633 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.34149326805385555, + "mc1_stderr": 0.016600688619950826, + "mc2": 0.48863771668734807, + "mc2_stderr": 0.01620640944492232 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5584415584415584, + "acc_stderr": 0.017072525875563103, + "acc_norm": 0.5690672963400236, + "acc_norm_stderr": 0.01702555819604314 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "NousResearch/Nous-Hermes-2-SOLAR-10.7B", + "model_sha": "14c1fbe2f71acdcd58247b30d5439bd572d52386", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/NousResearch/Nous-Hermes-llama-2-7b/result_2023-09-29 13:26:53.json b/NousResearch/Nous-Hermes-llama-2-7b/result_2023-09-29 13:26:53.json new file mode 100644 index 0000000000000000000000000000000000000000..ca6d1b3acd3781c68af6c2d0046a17eaf3a48c44 --- /dev/null +++ b/NousResearch/Nous-Hermes-llama-2-7b/result_2023-09-29 13:26:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27559726962457337, + "acc_stderr": 0.013057169655761838, + "acc_norm": 0.3191126279863481, + "acc_norm_stderr": 0.013621696119173304 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33718382792272455, + "acc_stderr": 0.004717820714968746, + "acc_norm": 0.416849233220474, + "acc_norm_stderr": 0.004920298437884909 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39208173690932313, + "acc_stderr": 0.017458524050147643, + "acc_norm": 0.39208173690932313, + "acc_norm_stderr": 0.017458524050147643 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.03484331592680589, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.03484331592680589 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3440514469453376, + "acc_stderr": 0.02698147804364803, + "acc_norm": 0.3440514469453376, + "acc_norm_stderr": 0.02698147804364803 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.03170882426845501, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.03170882426845501 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.32323232323232326, + "acc_stderr": 0.03332299921070645, + "acc_norm": 0.32323232323232326, + "acc_norm_stderr": 0.03332299921070645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307808, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307808 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3277310924369748, + "acc_stderr": 0.030489911417673227, + "acc_norm": 0.3277310924369748, + "acc_norm_stderr": 0.030489911417673227 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31025641025641026, + "acc_stderr": 0.02345467488940429, + "acc_norm": 0.31025641025641026, + "acc_norm_stderr": 0.02345467488940429 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3419354838709677, + "acc_stderr": 0.02698528957655274, + "acc_norm": 0.3419354838709677, + "acc_norm_stderr": 0.02698528957655274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.49572649572649574, + "acc_stderr": 0.032754892643821316, + "acc_norm": 0.49572649572649574, + "acc_norm_stderr": 0.032754892643821316 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32075471698113206, + "acc_stderr": 0.028727502957880263, + "acc_norm": 0.32075471698113206, + "acc_norm_stderr": 0.028727502957880263 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910507, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910507 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145668, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145668 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008937, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008937 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.472636815920398, + "acc_stderr": 0.03530235517334682, + "acc_norm": 0.472636815920398, + "acc_norm_stderr": 0.03530235517334682 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.034765996075164785, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.034765996075164785 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3554913294797688, + "acc_stderr": 0.025770292082977247, + "acc_norm": 0.3554913294797688, + "acc_norm_stderr": 0.025770292082977247 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02686949074481525, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02686949074481525 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.034998072761933376, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.034998072761933376 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3376146788990826, + "acc_stderr": 0.020275265986638903, + "acc_norm": 0.3376146788990826, + "acc_norm_stderr": 0.020275265986638903 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.35947712418300654, + "acc_stderr": 0.027475969910660956, + "acc_norm": 0.35947712418300654, + "acc_norm_stderr": 0.027475969910660956 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.045641987674327526, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.045641987674327526 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03583496176361061, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03583496176361061 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.018690850273595284, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.018690850273595284 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.02718712701150381, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.02718712701150381 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26727509778357234, + "acc_stderr": 0.011302607515637513, + "acc_norm": 0.26727509778357234, + "acc_norm_stderr": 0.011302607515637513 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.03256685484460388, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.03256685484460388 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.03756335775187897, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.03756335775187897 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31334149326805383, + "mc1_stderr": 0.016238065069059605, + "mc2": 0.48487503732289583, + "mc2_stderr": 0.015806306421646696 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2644628099173554, + "acc_stderr": 0.015163499477892407, + "acc_norm": 0.30342384887839435, + "acc_norm_stderr": 0.015806072717909566 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "NousResearch/Nous-Hermes-llama-2-7b", + "model_sha": "b7c3ec54b754175e006ef75696a2ba3802697078", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/NousResearch/Yarn-Mistral-7b-128k/result_2023-12-08 01:35:30.json b/NousResearch/Yarn-Mistral-7b-128k/result_2023-12-08 01:35:30.json new file mode 100644 index 0000000000000000000000000000000000000000..85d713e1fb68463ee09838647af150e13bb1ba77 --- /dev/null +++ b/NousResearch/Yarn-Mistral-7b-128k/result_2023-12-08 01:35:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.318259385665529, + "acc_stderr": 0.013611993916971453, + "acc_norm": 0.3583617747440273, + "acc_norm_stderr": 0.01401288333485986 + }, + "harness|ko_hellaswag|10": { + "acc": 0.359788886675961, + "acc_stderr": 0.004789575163418655, + "acc_norm": 0.4539932284405497, + "acc_norm_stderr": 0.0049686135393092485 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273482, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273482 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44189016602809705, + "acc_stderr": 0.017758800534214417, + "acc_norm": 0.44189016602809705, + "acc_norm_stderr": 0.017758800534214417 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4180064308681672, + "acc_stderr": 0.02801365189199507, + "acc_norm": 0.4180064308681672, + "acc_norm_stderr": 0.02801365189199507 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.03314190222110656, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.03314190222110656 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.02530295889085015, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.02530295889085015 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264714, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.030197611600197946, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.030197611600197946 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113114, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.03765746693865149, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.03765746693865149 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332783, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.026772990653361816, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.026772990653361816 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584926, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584926 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47339449541284406, + "acc_stderr": 0.021406952688151588, + "acc_norm": 0.47339449541284406, + "acc_norm_stderr": 0.021406952688151588 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749255, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.02847293847803353, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.02847293847803353 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3545751633986928, + "acc_stderr": 0.0193533605475537, + "acc_norm": 0.3545751633986928, + "acc_norm_stderr": 0.0193533605475537 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31843575418994413, + "acc_stderr": 0.015581008080360276, + "acc_norm": 0.31843575418994413, + "acc_norm_stderr": 0.015581008080360276 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280058, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280058 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3109517601043025, + "acc_stderr": 0.011822252917799207, + "acc_norm": 0.3109517601043025, + "acc_norm_stderr": 0.011822252917799207 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.038049136539710114, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.038049136539710114 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713606, + "mc2": 0.4818640958328875, + "mc2_stderr": 0.015614192429758816 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3919716646989374, + "acc_stderr": 0.016784332119424088, + "acc_norm": 0.4946871310507674, + "acc_norm_stderr": 0.01718938362722969 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "NousResearch/Yarn-Mistral-7b-128k", + "model_sha": "d09f1f8ed437d61c1aff94c1beabee554843dcdd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/NousResearch/Yarn-Mistral-7b-64k/result_2023-12-08 01:34:46.json b/NousResearch/Yarn-Mistral-7b-64k/result_2023-12-08 01:34:46.json new file mode 100644 index 0000000000000000000000000000000000000000..45326443718417c61cb236792c0749408938e507 --- /dev/null +++ b/NousResearch/Yarn-Mistral-7b-64k/result_2023-12-08 01:34:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3225255972696246, + "acc_stderr": 0.013659980894277373, + "acc_norm": 0.35921501706484643, + "acc_norm_stderr": 0.014020224155839155 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3604859589723163, + "acc_stderr": 0.004791601975612767, + "acc_norm": 0.4561840270862378, + "acc_norm_stderr": 0.0049705853282976204 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273482, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273482 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4393358876117497, + "acc_stderr": 0.017747874245683616, + "acc_norm": 0.4393358876117497, + "acc_norm_stderr": 0.017747874245683616 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236784, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236784 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42443729903536975, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.42443729903536975, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330315, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330315 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.043482080516448585, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.043482080516448585 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.041379310344827586, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.041379310344827586 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993179, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993179 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.02531063925493391, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.02531063925493391 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.034524539038220385, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.034524539038220385 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776296, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776296 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.04760548821460325, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.04760548821460325 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.03461199429040013, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.03461199429040013 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332783, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.026788811931562753, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.026788811931562753 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470867, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470867 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47155963302752296, + "acc_stderr": 0.021402615697348044, + "acc_norm": 0.47155963302752296, + "acc_norm_stderr": 0.021402615697348044 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.028408302020332687, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.028408302020332687 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35130718954248363, + "acc_stderr": 0.01931267606578656, + "acc_norm": 0.35130718954248363, + "acc_norm_stderr": 0.01931267606578656 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31508379888268156, + "acc_stderr": 0.015536850852473638, + "acc_norm": 0.31508379888268156, + "acc_norm_stderr": 0.015536850852473638 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125474, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125474 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.031996152328062875, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.031996152328062875 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.03239360017397471, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.03239360017397471 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30964797913950454, + "acc_stderr": 0.011808598262503318, + "acc_norm": 0.30964797913950454, + "acc_norm_stderr": 0.011808598262503318 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03815494308688929, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03815494308688929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.016002651487361012, + "mc2": 0.4812011290103991, + "mc2_stderr": 0.015609821540397258 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4049586776859504, + "acc_stderr": 0.01687694116504561, + "acc_norm": 0.49940968122786306, + "acc_norm_stderr": 0.017190342123448665 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "NousResearch/Yarn-Mistral-7b-64k", + "model_sha": "0273c624561fcecc8e8f4030492a9307aa60f945", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/OMK510/ko-llama2-toy/result_2023-10-18 09:10:50.json b/OMK510/ko-llama2-toy/result_2023-10-18 09:10:50.json new file mode 100644 index 0000000000000000000000000000000000000000..cbc9608c20b2c9495ac6790ff8a96526dcba00ce --- /dev/null +++ b/OMK510/ko-llama2-toy/result_2023-10-18 09:10:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2150170648464164, + "acc_stderr": 0.012005717634133611, + "acc_norm": 0.27474402730375425, + "acc_norm_stderr": 0.013044617212771227 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25403306114319857, + "acc_stderr": 0.00434426617963492, + "acc_norm": 0.2605058753236407, + "acc_norm_stderr": 0.004380136468543937 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245231, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245231 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27330779054916987, + "acc_stderr": 0.015936681062628556, + "acc_norm": 0.27330779054916987, + "acc_norm_stderr": 0.015936681062628556 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.19574468085106383, + "acc_stderr": 0.025937853139977148, + "acc_norm": 0.19574468085106383, + "acc_norm_stderr": 0.025937853139977148 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1927710843373494, + "acc_stderr": 0.03070982405056527, + "acc_norm": 0.1927710843373494, + "acc_norm_stderr": 0.03070982405056527 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788513, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788513 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.18834080717488788, + "acc_stderr": 0.026241132996407256, + "acc_norm": 0.18834080717488788, + "acc_norm_stderr": 0.026241132996407256 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.039417076320648906, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.039417076320648906 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24789915966386555, + "acc_stderr": 0.028047967224176892, + "acc_norm": 0.24789915966386555, + "acc_norm_stderr": 0.028047967224176892 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31025641025641026, + "acc_stderr": 0.023454674889404295, + "acc_norm": 0.31025641025641026, + "acc_norm_stderr": 0.023454674889404295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.33225806451612905, + "acc_stderr": 0.026795560848122787, + "acc_norm": 0.33225806451612905, + "acc_norm_stderr": 0.026795560848122787 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.02634148037111834, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.02634148037111834 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21890547263681592, + "acc_stderr": 0.029239174636647, + "acc_norm": 0.21890547263681592, + "acc_norm_stderr": 0.029239174636647 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483098, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483098 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566016, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.02525117393649502, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.02525117393649502 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.03027690994517826, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.03027690994517826 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.01792308766780305, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.01792308766780305 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.02505850331695815, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.02505850331695815 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.03690677986137283, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.03690677986137283 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791013, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932267, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932267 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329387, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329387 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658335, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.01132873440314031, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.01132873440314031 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139406, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139406 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476196, + "mc2": 0.5244892940135847, + "mc2_stderr": 0.01678983173354145 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09799291617473435, + "acc_stderr": 0.010221558855214903, + "acc_norm": 0.30460448642266824, + "acc_norm_stderr": 0.01582336727312938 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "OMK510/ko-llama2-toy", + "model_sha": "5ad4552d4e7990214eec24fcadf84eba584fba1f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/OMK510/omk_mixed2/result_2023-10-20 04:05:12.json b/OMK510/omk_mixed2/result_2023-10-20 04:05:12.json new file mode 100644 index 0000000000000000000000000000000000000000..68f1bcab1c67a846e91a9670802b640022e7d666 --- /dev/null +++ b/OMK510/omk_mixed2/result_2023-10-20 04:05:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.014175915490000324, + "acc_norm": 0.4300341296928328, + "acc_norm_stderr": 0.014467631559137998 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4086835291774547, + "acc_stderr": 0.004905859114942308, + "acc_norm": 0.5462059350726947, + "acc_norm_stderr": 0.0049684294763450345 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49808429118773945, + "acc_stderr": 0.017879832259026677, + "acc_norm": 0.49808429118773945, + "acc_norm_stderr": 0.017879832259026677 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596239, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596239 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.02834504586484068, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.02834504586484068 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563918, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563918 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179327, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179327 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36153846153846153, + "acc_stderr": 0.024359581465396955, + "acc_norm": 0.36153846153846153, + "acc_norm_stderr": 0.024359581465396955 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978813, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978813 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.02834378725054064, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.02834378725054064 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5854700854700855, + "acc_stderr": 0.03227396567623779, + "acc_norm": 0.5854700854700855, + "acc_norm_stderr": 0.03227396567623779 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655795, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655795 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.03746668325470021, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.03746668325470021 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680814, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680814 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.02807415894760066, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.02807415894760066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.03977749934622074, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.03977749934622074 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.01885008469646872, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.01885008469646872 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169945, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169945 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.03085199299325701, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.03085199299325701 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.025187786660227276, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.025187786660227276 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4978902953586498, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.4978902953586498, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29595827900912647, + "acc_stderr": 0.011658518525277039, + "acc_norm": 0.29595827900912647, + "acc_norm_stderr": 0.011658518525277039 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.0153218216884762, + "mc2": 0.4217472836360241, + "mc2_stderr": 0.014796357378387609 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4769775678866588, + "acc_stderr": 0.017172121546727637, + "acc_norm": 0.5655253837072018, + "acc_norm_stderr": 0.017042098620824928 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "OMK510/omk_mixed2", + "model_sha": "8fb8a29ecba1b69a023885fcf11ea223b491bbac", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ONS-AI-RESEARCH/ONS-SOLAR-10.7B-v1.1/result_2024-02-12 10:23:17.json b/ONS-AI-RESEARCH/ONS-SOLAR-10.7B-v1.1/result_2024-02-12 10:23:17.json new file mode 100644 index 0000000000000000000000000000000000000000..dc6f7553f3979ce3d098a5092741f51db0db4034 --- /dev/null +++ b/ONS-AI-RESEARCH/ONS-SOLAR-10.7B-v1.1/result_2024-02-12 10:23:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4974402730375427, + "acc_stderr": 0.014611199329843784, + "acc_norm": 0.5580204778156996, + "acc_norm_stderr": 0.014512682523128342 + }, + "harness|ko_hellaswag|10": { + "acc": 0.47122087233618803, + "acc_stderr": 0.004981509099276345, + "acc_norm": 0.6430989842660825, + "acc_norm_stderr": 0.004781061390873917 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7017543859649122, + "acc_stderr": 0.035087719298245654, + "acc_norm": 0.7017543859649122, + "acc_norm_stderr": 0.035087719298245654 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.045821241601615506, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.045821241601615506 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7215836526181354, + "acc_stderr": 0.01602829518899245, + "acc_norm": 0.7215836526181354, + "acc_norm_stderr": 0.01602829518899245 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5404255319148936, + "acc_stderr": 0.032579014820998335, + "acc_norm": 0.5404255319148936, + "acc_norm_stderr": 0.032579014820998335 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6053811659192825, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.6053811659192825, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7828282828282829, + "acc_stderr": 0.029376616484945633, + "acc_norm": 0.7828282828282829, + "acc_norm_stderr": 0.029376616484945633 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207763, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207763 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.634453781512605, + "acc_stderr": 0.03128217706368462, + "acc_norm": 0.634453781512605, + "acc_norm_stderr": 0.03128217706368462 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5794871794871795, + "acc_stderr": 0.02502861027671087, + "acc_norm": 0.5794871794871795, + "acc_norm_stderr": 0.02502861027671087 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406795, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406795 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6838709677419355, + "acc_stderr": 0.026450874489042767, + "acc_norm": 0.6838709677419355, + "acc_norm_stderr": 0.026450874489042767 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8333333333333334, + "acc_stderr": 0.024414947304543674, + "acc_norm": 0.8333333333333334, + "acc_norm_stderr": 0.024414947304543674 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5773584905660377, + "acc_stderr": 0.030402331445769544, + "acc_norm": 0.5773584905660377, + "acc_norm_stderr": 0.030402331445769544 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948485, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681681, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681681 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7512437810945274, + "acc_stderr": 0.030567675938916718, + "acc_norm": 0.7512437810945274, + "acc_norm_stderr": 0.030567675938916718 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3941798941798942, + "acc_stderr": 0.02516798233389415, + "acc_norm": 0.3941798941798942, + "acc_norm_stderr": 0.02516798233389415 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932263, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932263 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.02626167760780665, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.02626167760780665 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6073619631901841, + "acc_stderr": 0.0383674090783103, + "acc_norm": 0.6073619631901841, + "acc_norm_stderr": 0.0383674090783103 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6049382716049383, + "acc_stderr": 0.027201117666925647, + "acc_norm": 0.6049382716049383, + "acc_norm_stderr": 0.027201117666925647 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04644602091222317, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04644602091222317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.728440366972477, + "acc_stderr": 0.019069098363191442, + "acc_norm": 0.728440366972477, + "acc_norm_stderr": 0.019069098363191442 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.027363593284684972, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.027363593284684972 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.03941897526516304, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.03941897526516304 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.618421052631579, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.618421052631579, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.020196594933541197, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.020196594933541197 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3971631205673759, + "acc_stderr": 0.029189805673587105, + "acc_norm": 0.3971631205673759, + "acc_norm_stderr": 0.029189805673587105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.034086558679777494, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.034086558679777494 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2, + "acc_stderr": 0.013378001241813077, + "acc_norm": 0.2, + "acc_norm_stderr": 0.013378001241813077 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5220588235294118, + "acc_stderr": 0.030343264224213528, + "acc_norm": 0.5220588235294118, + "acc_norm_stderr": 0.030343264224213528 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.689795918367347, + "acc_stderr": 0.029613459872484378, + "acc_norm": 0.689795918367347, + "acc_norm_stderr": 0.029613459872484378 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7721518987341772, + "acc_stderr": 0.027303484599069422, + "acc_norm": 0.7721518987341772, + "acc_norm_stderr": 0.027303484599069422 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4061277705345502, + "acc_stderr": 0.01254315458841292, + "acc_norm": 0.4061277705345502, + "acc_norm_stderr": 0.01254315458841292 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7696078431372549, + "acc_stderr": 0.029554292605695077, + "acc_norm": 0.7696078431372549, + "acc_norm_stderr": 0.029554292605695077 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7818181818181819, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.7818181818181819, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.34149326805385555, + "mc1_stderr": 0.016600688619950822, + "mc2": 0.4898476400498463, + "mc2_stderr": 0.015526635106646595 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5041322314049587, + "acc_stderr": 0.017189767032130817, + "acc_norm": 0.5348288075560803, + "acc_norm_stderr": 0.017148598015747425 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ONS-AI-RESEARCH/ONS-SOLAR-10.7B-v1.1", + "model_sha": "d746eb66b91f53c754e7cb8eca327d956ffe9437", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ONS-AI-RESEARCH/ONS-SOLAR-10.7B-v1.2/result_2024-03-14 08:02:28.json b/ONS-AI-RESEARCH/ONS-SOLAR-10.7B-v1.2/result_2024-03-14 08:02:28.json new file mode 100644 index 0000000000000000000000000000000000000000..f5fd740dc178841ec47c1144d139945f5b0f829e --- /dev/null +++ b/ONS-AI-RESEARCH/ONS-SOLAR-10.7B-v1.2/result_2024-03-14 08:02:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5170648464163823, + "acc_stderr": 0.014602878388536591, + "acc_norm": 0.5767918088737202, + "acc_norm_stderr": 0.014438036220848039 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46614220274845647, + "acc_stderr": 0.004978328190775523, + "acc_norm": 0.6345349531965744, + "acc_norm_stderr": 0.004805761513803401 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.03546976959393163, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.03546976959393163 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.722860791826309, + "acc_stderr": 0.016005636294122435, + "acc_norm": 0.722860791826309, + "acc_norm_stderr": 0.016005636294122435 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5574468085106383, + "acc_stderr": 0.03246956919789957, + "acc_norm": 0.5574468085106383, + "acc_norm_stderr": 0.03246956919789957 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.038922121953330446, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.038922121953330446 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6237942122186495, + "acc_stderr": 0.02751392568354943, + "acc_norm": 0.6237942122186495, + "acc_norm_stderr": 0.02751392568354943 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6322869955156951, + "acc_stderr": 0.032361983509282745, + "acc_norm": 0.6322869955156951, + "acc_norm_stderr": 0.032361983509282745 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7676767676767676, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.7676767676767676, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5692307692307692, + "acc_stderr": 0.025106820660539757, + "acc_norm": 0.5692307692307692, + "acc_norm_stderr": 0.025106820660539757 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6806451612903226, + "acc_stderr": 0.02652270967466777, + "acc_norm": 0.6806451612903226, + "acc_norm_stderr": 0.02652270967466777 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.025372139671722933, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.025372139671722933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5886792452830188, + "acc_stderr": 0.030285009259009798, + "acc_norm": 0.5886792452830188, + "acc_norm_stderr": 0.030285009259009798 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.03096590312357304, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.03096590312357304 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.455026455026455, + "acc_stderr": 0.02564692836104939, + "acc_norm": 0.455026455026455, + "acc_norm_stderr": 0.02564692836104939 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5982658959537572, + "acc_stderr": 0.026394104177643634, + "acc_norm": 0.5982658959537572, + "acc_norm_stderr": 0.026394104177643634 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334385, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334385 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.02672586880910079, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.02672586880910079 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7668393782383419, + "acc_stderr": 0.03051611137147601, + "acc_norm": 0.7668393782383419, + "acc_norm_stderr": 0.03051611137147601 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7376146788990826, + "acc_stderr": 0.018861885021534738, + "acc_norm": 0.7376146788990826, + "acc_norm_stderr": 0.018861885021534738 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6078431372549019, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.6078431372549019, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.545751633986928, + "acc_stderr": 0.020142974553795205, + "acc_norm": 0.545751633986928, + "acc_norm_stderr": 0.020142974553795205 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4432624113475177, + "acc_stderr": 0.029634838473766006, + "acc_norm": 0.4432624113475177, + "acc_norm_stderr": 0.029634838473766006 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3553072625698324, + "acc_stderr": 0.016006989934803192, + "acc_norm": 0.3553072625698324, + "acc_norm_stderr": 0.016006989934803192 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768081, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768081 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5477941176470589, + "acc_stderr": 0.03023375855159645, + "acc_norm": 0.5477941176470589, + "acc_norm_stderr": 0.03023375855159645 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6857142857142857, + "acc_stderr": 0.029719329422417468, + "acc_norm": 0.6857142857142857, + "acc_norm_stderr": 0.029719329422417468 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7679324894514767, + "acc_stderr": 0.027479744550808514, + "acc_norm": 0.7679324894514767, + "acc_norm_stderr": 0.027479744550808514 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41395045632333766, + "acc_stderr": 0.012579699631289258, + "acc_norm": 0.41395045632333766, + "acc_norm_stderr": 0.012579699631289258 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7058823529411765, + "acc_stderr": 0.03198001660115071, + "acc_norm": 0.7058823529411765, + "acc_norm_stderr": 0.03198001660115071 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7151515151515152, + "acc_stderr": 0.03524390844511781, + "acc_norm": 0.7151515151515152, + "acc_norm_stderr": 0.03524390844511781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5042839657282742, + "mc1_stderr": 0.017502858577371248, + "mc2": 0.6714270780791007, + "mc2_stderr": 0.015246580466487622 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.564344746162928, + "acc_stderr": 0.01704741522947633, + "acc_norm": 0.5737898465171193, + "acc_norm_stderr": 0.017002122609489256 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ONS-AI-RESEARCH/ONS-SOLAR-10.7B-v1.2", + "model_sha": "31feb5543bfd5a36c100e799708adbca5cfcef58", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ONS-AI-RESEARCH/ONS-SOLAR-10.7B/result_2024-01-15 04:48:50.json b/ONS-AI-RESEARCH/ONS-SOLAR-10.7B/result_2024-01-15 04:48:50.json new file mode 100644 index 0000000000000000000000000000000000000000..ee0bc6d41418d4f01fdffabe7ce560cb3f9cb968 --- /dev/null +++ b/ONS-AI-RESEARCH/ONS-SOLAR-10.7B/result_2024-01-15 04:48:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.49829351535836175, + "acc_stderr": 0.014611305705056995, + "acc_norm": 0.5554607508532423, + "acc_norm_stderr": 0.014521226405627079 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4596693885680143, + "acc_stderr": 0.004973522582431226, + "acc_norm": 0.6323441545508863, + "acc_norm_stderr": 0.004811815959388847 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6432748538011696, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.6432748538011696, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6819923371647509, + "acc_stderr": 0.016653486275615404, + "acc_norm": 0.6819923371647509, + "acc_norm_stderr": 0.016653486275615404 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108102, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5852090032154341, + "acc_stderr": 0.027982680459759577, + "acc_norm": 0.5852090032154341, + "acc_norm_stderr": 0.027982680459759577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5605381165919282, + "acc_stderr": 0.033310925110381785, + "acc_norm": 0.5605381165919282, + "acc_norm_stderr": 0.033310925110381785 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7070707070707071, + "acc_stderr": 0.032424979581788166, + "acc_norm": 0.7070707070707071, + "acc_norm_stderr": 0.032424979581788166 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5102564102564102, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.5102564102564102, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6129032258064516, + "acc_stderr": 0.027709359675032498, + "acc_norm": 0.6129032258064516, + "acc_norm_stderr": 0.027709359675032498 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922737, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922737 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389184, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562413, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562413 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5208333333333334, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.5208333333333334, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.026788811931562764, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.026788811931562764 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5493827160493827, + "acc_stderr": 0.02768472141565619, + "acc_norm": 0.5493827160493827, + "acc_norm_stderr": 0.02768472141565619 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6632124352331606, + "acc_stderr": 0.03410780251836184, + "acc_norm": 0.6632124352331606, + "acc_norm_stderr": 0.03410780251836184 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6477064220183486, + "acc_stderr": 0.020480568843999004, + "acc_norm": 0.6477064220183486, + "acc_norm_stderr": 0.020480568843999004 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.020226862710039463, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.020226862710039463 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347237, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347237 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095271, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095271 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.02993534270787775, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.02993534270787775 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7130801687763713, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.7130801687763713, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3741851368970013, + "acc_stderr": 0.012359335618172061, + "acc_norm": 0.3741851368970013, + "acc_norm_stderr": 0.012359335618172061 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03308611113236436, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03308611113236436 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3708690330477356, + "mc1_stderr": 0.016909693580248818, + "mc2": 0.5512609721020799, + "mc2_stderr": 0.015788162780218277 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5726092089728453, + "acc_stderr": 0.017008129844823156, + "acc_norm": 0.5796930342384888, + "acc_norm_stderr": 0.01697059828117771 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ONS-AI-RESEARCH/ONS-SOLAR-10.7B", + "model_sha": "52022d88f677caf84b766f42cdb2d36ce4152d26", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ONS-AI-RESEARCH/ONS-SOLAR-10.7B/result_2024-01-24 13:09:20.json b/ONS-AI-RESEARCH/ONS-SOLAR-10.7B/result_2024-01-24 13:09:20.json new file mode 100644 index 0000000000000000000000000000000000000000..2d40f63d0c0dac1ff06f69c51ee3ca7b2c0696d0 --- /dev/null +++ b/ONS-AI-RESEARCH/ONS-SOLAR-10.7B/result_2024-01-24 13:09:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4974402730375427, + "acc_stderr": 0.014611199329843784, + "acc_norm": 0.5597269624573379, + "acc_norm_stderr": 0.01450676952480425 + }, + "harness|ko_hellaswag|10": { + "acc": 0.477096195976897, + "acc_stderr": 0.004984543540932339, + "acc_norm": 0.6506671977693687, + "acc_norm_stderr": 0.004757849023411964 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6491228070175439, + "acc_stderr": 0.03660298834049162, + "acc_norm": 0.6491228070175439, + "acc_norm_stderr": 0.03660298834049162 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6845466155810983, + "acc_stderr": 0.016617501738763408, + "acc_norm": 0.6845466155810983, + "acc_norm_stderr": 0.016617501738763408 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4851063829787234, + "acc_stderr": 0.03267151848924777, + "acc_norm": 0.4851063829787234, + "acc_norm_stderr": 0.03267151848924777 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6237942122186495, + "acc_stderr": 0.02751392568354943, + "acc_norm": 0.6237942122186495, + "acc_norm_stderr": 0.02751392568354943 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6233183856502242, + "acc_stderr": 0.03252113489929188, + "acc_norm": 0.6233183856502242, + "acc_norm_stderr": 0.03252113489929188 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.03154449888270285, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.03154449888270285 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.047240073523838876, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.047240073523838876 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.634453781512605, + "acc_stderr": 0.031282177063684614, + "acc_norm": 0.634453781512605, + "acc_norm_stderr": 0.031282177063684614 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5487179487179488, + "acc_stderr": 0.02523038123893484, + "acc_norm": 0.5487179487179488, + "acc_norm_stderr": 0.02523038123893484 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.045245960070300476, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.045245960070300476 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6516129032258065, + "acc_stderr": 0.02710482632810094, + "acc_norm": 0.6516129032258065, + "acc_norm_stderr": 0.02710482632810094 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.569811320754717, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.569811320754717, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.029318203645206865, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.029318203645206865 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.02501074911613761, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.02501074911613761 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6502890173410405, + "acc_stderr": 0.025674281456531018, + "acc_norm": 0.6502890173410405, + "acc_norm_stderr": 0.025674281456531018 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5950920245398773, + "acc_stderr": 0.03856672163548914, + "acc_norm": 0.5950920245398773, + "acc_norm_stderr": 0.03856672163548914 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.027002521034516468, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.027002521034516468 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.03221024508041153, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.03221024508041153 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.726605504587156, + "acc_stderr": 0.019109299846098285, + "acc_norm": 0.726605504587156, + "acc_norm_stderr": 0.019109299846098285 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.04463112720677171, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.04463112720677171 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.02811092849280907, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.02811092849280907 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.625, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.625, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5359477124183006, + "acc_stderr": 0.020175488765484043, + "acc_norm": 0.5359477124183006, + "acc_norm_stderr": 0.020175488765484043 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.029462189233370593, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.029462189233370593 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.034063153607115086, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.034063153607115086 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225601, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225601 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5477941176470589, + "acc_stderr": 0.030233758551596452, + "acc_norm": 0.5477941176470589, + "acc_norm_stderr": 0.030233758551596452 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.689795918367347, + "acc_stderr": 0.02961345987248438, + "acc_norm": 0.689795918367347, + "acc_norm_stderr": 0.02961345987248438 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39308996088657105, + "acc_stderr": 0.012474899613873956, + "acc_norm": 0.39308996088657105, + "acc_norm_stderr": 0.012474899613873956 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7696078431372549, + "acc_stderr": 0.029554292605695066, + "acc_norm": 0.7696078431372549, + "acc_norm_stderr": 0.029554292605695066 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4039167686658507, + "mc1_stderr": 0.017177276822584284, + "mc2": 0.5603431439848606, + "mc2_stderr": 0.01563460689975224 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5289256198347108, + "acc_stderr": 0.017161563949916345, + "acc_norm": 0.5584415584415584, + "acc_norm_stderr": 0.017072525875563103 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ONS-AI-RESEARCH/ONS-SOLAR-10.7B", + "model_sha": "7cc81d3dd5ca7083e0cec1582fc5cc204cddd752", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ONS-AI-RESEARCH/ONS-SOLAR-KOEN-10.7B/result_2024-03-05 07:05:34.json b/ONS-AI-RESEARCH/ONS-SOLAR-KOEN-10.7B/result_2024-03-05 07:05:34.json new file mode 100644 index 0000000000000000000000000000000000000000..944033d31dad14731a24dc016821a49a9a2beb34 --- /dev/null +++ b/ONS-AI-RESEARCH/ONS-SOLAR-KOEN-10.7B/result_2024-03-05 07:05:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4453924914675768, + "acc_stderr": 0.014523987638344085, + "acc_norm": 0.5281569965870307, + "acc_norm_stderr": 0.014588204105102205 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44323839872535353, + "acc_stderr": 0.004957524197900426, + "acc_norm": 0.6009759012148974, + "acc_norm_stderr": 0.004886969266944273 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6768837803320562, + "acc_stderr": 0.016723726512343044, + "acc_norm": 0.6768837803320562, + "acc_norm_stderr": 0.016723726512343044 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.038922121953330446, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.038922121953330446 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5659163987138264, + "acc_stderr": 0.028150232244535597, + "acc_norm": 0.5659163987138264, + "acc_norm_stderr": 0.028150232244535597 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.57847533632287, + "acc_stderr": 0.03314190222110657, + "acc_norm": 0.57847533632287, + "acc_norm_stderr": 0.03314190222110657 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.042258754519696386, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.042258754519696386 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7070707070707071, + "acc_stderr": 0.032424979581788166, + "acc_norm": 0.7070707070707071, + "acc_norm_stderr": 0.032424979581788166 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5282051282051282, + "acc_stderr": 0.025310639254933855, + "acc_norm": 0.5282051282051282, + "acc_norm_stderr": 0.025310639254933855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.028040981380761533, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.028040981380761533 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228412, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228412 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555404, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555404 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4021164021164021, + "acc_stderr": 0.02525303255499768, + "acc_norm": 0.4021164021164021, + "acc_norm_stderr": 0.02525303255499768 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6196319018404908, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.6196319018404908, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5802469135802469, + "acc_stderr": 0.02746009955700513, + "acc_norm": 0.5802469135802469, + "acc_norm_stderr": 0.02746009955700513 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6424870466321243, + "acc_stderr": 0.034588160421810114, + "acc_norm": 0.6424870466321243, + "acc_norm_stderr": 0.034588160421810114 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6990825688073394, + "acc_stderr": 0.019664751366802114, + "acc_norm": 0.6990825688073394, + "acc_norm_stderr": 0.019664751366802114 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089775, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089775 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.625, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.625, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.020220920829626923, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.020220920829626923 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4078014184397163, + "acc_stderr": 0.02931601177634356, + "acc_norm": 0.4078014184397163, + "acc_norm_stderr": 0.02931601177634356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961464, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961464 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.0302114796091216, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.0302114796091216 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598018, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598018 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38722294654498046, + "acc_stderr": 0.012441155326854922, + "acc_norm": 0.38722294654498046, + "acc_norm_stderr": 0.012441155326854922 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6225490196078431, + "acc_stderr": 0.03402272044340703, + "acc_norm": 0.6225490196078431, + "acc_norm_stderr": 0.03402272044340703 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.703030303030303, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.703030303030303, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4430844553243574, + "mc1_stderr": 0.017389730346877096, + "mc2": 0.61367759778663, + "mc2_stderr": 0.015583862602637973 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5182998819362455, + "acc_stderr": 0.017178836639177762, + "acc_norm": 0.5419126328217237, + "acc_norm_stderr": 0.017129852117911144 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ONS-AI-RESEARCH/ONS-SOLAR-KOEN-10.7B", + "model_sha": "bf01f56c001fee7c03b1106ac27ea0db68116838", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Open-Orca/Mistral-7B-OpenOrca/result_2023-11-15 23:41:59.json b/Open-Orca/Mistral-7B-OpenOrca/result_2023-11-15 23:41:59.json new file mode 100644 index 0000000000000000000000000000000000000000..2cff1a3087dada9021faa1dbeec4b61ab0dde534 --- /dev/null +++ b/Open-Orca/Mistral-7B-OpenOrca/result_2023-11-15 23:41:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33361774744027306, + "acc_stderr": 0.013778687054176536, + "acc_norm": 0.39334470989761094, + "acc_norm_stderr": 0.014275101465693028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37054371639115713, + "acc_stderr": 0.004819633668832544, + "acc_norm": 0.47480581557458673, + "acc_norm_stderr": 0.004983442888677775 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4521072796934866, + "acc_stderr": 0.01779775149386562, + "acc_norm": 0.4521072796934866, + "acc_norm_stderr": 0.01779775149386562 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085328, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085328 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102308, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102308 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347354, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347354 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431187, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431187 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.03487558640462064, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.03487558640462064 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442205, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442205 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.019659922493623343, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.019659922493623343 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22681564245810057, + "acc_stderr": 0.014005843570897887, + "acc_norm": 0.22681564245810057, + "acc_norm_stderr": 0.014005843570897887 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841195, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763128, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763128 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.031784718745647304, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.031784718745647304 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3239895697522816, + "acc_stderr": 0.011952840809646566, + "acc_norm": 0.3239895697522816, + "acc_norm_stderr": 0.011952840809646566 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833344, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833344 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3157894736842105, + "mc1_stderr": 0.01627228795791693, + "mc2": 0.4954694202499875, + "mc2_stderr": 0.01593113231156763 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46989374262101535, + "acc_stderr": 0.017159163590170216, + "acc_norm": 0.500590318772137, + "acc_norm_stderr": 0.017190342123448586 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Open-Orca/Mistral-7B-OpenOrca", + "model_sha": "8f934b2bd2d4484b846a7faf1c53093b9d956367", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Open-Orca/Mistral-7B-SlimOrca/result_2023-10-25 02:17:52.json b/Open-Orca/Mistral-7B-SlimOrca/result_2023-10-25 02:17:52.json new file mode 100644 index 0000000000000000000000000000000000000000..315640acf2385434ea38b4bb20e1c9075cb6af95 --- /dev/null +++ b/Open-Orca/Mistral-7B-SlimOrca/result_2023-10-25 02:17:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33532423208191126, + "acc_stderr": 0.013796182947785559, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668526 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3732324238199562, + "acc_stderr": 0.004826746160830184, + "acc_norm": 0.48048197570205137, + "acc_norm_stderr": 0.004985978214937919 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.017869330154003705, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.017869330154003705 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.041716541613545426, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.041716541613545426 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.024939313906940777, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.024939313906940777 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159788, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159788 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48256880733944957, + "acc_stderr": 0.02142429187185315, + "acc_norm": 0.48256880733944957, + "acc_norm_stderr": 0.02142429187185315 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577447, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577447 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966727, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966727 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.1877094972067039, + "acc_stderr": 0.013059605303257046, + "acc_norm": 0.1877094972067039, + "acc_norm_stderr": 0.013059605303257046 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131775, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131775 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897639, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897639 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.03374499356319355, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.03374499356319355 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30966952264381886, + "mc1_stderr": 0.016185744355144895, + "mc2": 0.4995755882922268, + "mc2_stderr": 0.015845203460942626 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44037780401416765, + "acc_stderr": 0.017067699774312974, + "acc_norm": 0.45690672963400236, + "acc_norm_stderr": 0.017126389093086784 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Open-Orca/Mistral-7B-SlimOrca", + "model_sha": "da461634dccd94d2da6a8de3b3cd84a527f60588", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/OpenBuddy/openbuddy-llama2-13b-v8.1-fp16/result_2023-10-04 09:36:31.json b/OpenBuddy/openbuddy-llama2-13b-v8.1-fp16/result_2023-10-04 09:36:31.json new file mode 100644 index 0000000000000000000000000000000000000000..19a1887e9f75a0221b92ae751f38aada1a9de42a --- /dev/null +++ b/OpenBuddy/openbuddy-llama2-13b-v8.1-fp16/result_2023-10-04 09:36:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.23464163822525597, + "acc_stderr": 0.012383873560768673, + "acc_norm": 0.26023890784982934, + "acc_norm_stderr": 0.01282193022511256 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2562238597888867, + "acc_stderr": 0.0043565471858470406, + "acc_norm": 0.2517426807408883, + "acc_norm_stderr": 0.004331271717773835 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.034240429246915824, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.034240429246915824 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.280970625798212, + "acc_stderr": 0.01607312785122124, + "acc_norm": 0.280970625798212, + "acc_norm_stderr": 0.01607312785122124 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621502, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621502 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.02964400657700962, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.02964400657700962 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944966, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944966 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.03076935200822915, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.03076935200822915 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.03618664819936245, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.03618664819936245 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.02738140692786897, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.02738140692786897 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.0270087660907081, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.0270087660907081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.02564410863926762, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.02564410863926762 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.030965903123573026, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.030965903123573026 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594316, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.03396116205845335, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.03396116205845335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.02425790170532338, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.02425790170532338 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.03291099578615771, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.03291099578615771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023132376234543353, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023132376234543353 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333337, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333337 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.024170840879341016, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.024170840879341016 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.016992723465466236, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.016992723465466236 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755808, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755808 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983566, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983566 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25684485006518903, + "acc_stderr": 0.011158455853098844, + "acc_norm": 0.25684485006518903, + "acc_norm_stderr": 0.011158455853098844 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.031822318676475544, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.031822318676475544 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.01559475363200652, + "mc2": 0.4462776885774457, + "mc2_stderr": 0.01611369655251753 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.07083825265643448, + "acc_stderr": 0.008820524274864483, + "acc_norm": 0.21841794569067297, + "acc_norm_stderr": 0.014205164490995575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "OpenBuddy/openbuddy-llama2-13b-v8.1-fp16", + "model_sha": "982a6b50fe0fa7e1bc3816d6d28484038e9daf29", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/OpenBuddy/openbuddy-llama3-8b-v21.1-8k/result_2024-04-25 14:16:21.json b/OpenBuddy/openbuddy-llama3-8b-v21.1-8k/result_2024-04-25 14:16:21.json new file mode 100644 index 0000000000000000000000000000000000000000..5f21c8e9d5f677da637fa975d3936cdf86721efb --- /dev/null +++ b/OpenBuddy/openbuddy-llama3-8b-v21.1-8k/result_2024-04-25 14:16:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.01421244498065189, + "acc_norm": 0.4539249146757679, + "acc_norm_stderr": 0.01454922110517187 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3811989643497311, + "acc_stderr": 0.004846886929763465, + "acc_norm": 0.49522007568213505, + "acc_norm_stderr": 0.004989553396413102 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.037792759455032014, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.037792759455032014 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.04777615181156739, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.04777615181156739 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.01776925058353325, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.01776925058353325 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.043482080516448585, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.043482080516448585 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5290322580645161, + "acc_stderr": 0.028396016402761, + "acc_norm": 0.5290322580645161, + "acc_norm_stderr": 0.028396016402761 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.03074634997572347, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.03074634997572347 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.037657466938651504, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.037657466938651504 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699947, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699947 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.041614023984032786, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.041614023984032786 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.026817718130348923, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.026817718130348923 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5308641975308642, + "acc_stderr": 0.027767689606833915, + "acc_norm": 0.5308641975308642, + "acc_norm_stderr": 0.027767689606833915 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5596330275229358, + "acc_stderr": 0.021284310623761547, + "acc_norm": 0.5596330275229358, + "acc_norm_stderr": 0.021284310623761547 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5359477124183006, + "acc_stderr": 0.02855582751652878, + "acc_norm": 0.5359477124183006, + "acc_norm_stderr": 0.02855582751652878 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.040675331363091746, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.040675331363091746 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.0198984127176359, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.0198984127176359 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29720670391061454, + "acc_stderr": 0.01528531335364159, + "acc_norm": 0.29720670391061454, + "acc_norm_stderr": 0.01528531335364159 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.02841820861940679, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.02841820861940679 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37157757496740546, + "acc_stderr": 0.01234182851452829, + "acc_norm": 0.37157757496740546, + "acc_norm_stderr": 0.01234182851452829 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32068543451652387, + "mc1_stderr": 0.0163391703732809, + "mc2": 0.5110761157830792, + "mc2_stderr": 0.01564861997844267 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43919716646989376, + "acc_stderr": 0.017062775744780705, + "acc_norm": 0.5017709563164109, + "acc_norm_stderr": 0.017190246276231863 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "OpenBuddy/openbuddy-llama3-8b-v21.1-8k", + "model_sha": "658508bce03ccd61cea9657e0357bd4cd10503ba", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/OpenBuddy/openbuddy-llemma-34b-v13.2/result_2023-11-09 12:03:35.json b/OpenBuddy/openbuddy-llemma-34b-v13.2/result_2023-11-09 12:03:35.json new file mode 100644 index 0000000000000000000000000000000000000000..d7244e71f44c053310c450232bf46161a08a5995 --- /dev/null +++ b/OpenBuddy/openbuddy-llemma-34b-v13.2/result_2023-11-09 12:03:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3728668941979522, + "acc_stderr": 0.01413117676013117, + "acc_norm": 0.41467576791808874, + "acc_norm_stderr": 0.014397070564409174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36496713802031466, + "acc_stderr": 0.004804370563856228, + "acc_norm": 0.45717984465245964, + "acc_norm_stderr": 0.004971449552787176 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.04721188506097172, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.04721188506097172 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4904214559386973, + "acc_stderr": 0.017876682275340887, + "acc_norm": 0.4904214559386973, + "acc_norm_stderr": 0.017876682275340887 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.03266204299064678, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.03266204299064678 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077636, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.025158266016868554, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.025158266016868554 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813332, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.03067609659938918, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.03067609659938918 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131143, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131143 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4497354497354497, + "acc_stderr": 0.02562085704293665, + "acc_norm": 0.4497354497354497, + "acc_norm_stderr": 0.02562085704293665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5266055045871559, + "acc_stderr": 0.021406952688151574, + "acc_norm": 0.5266055045871559, + "acc_norm_stderr": 0.021406952688151574 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521664, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521664 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577447, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577447 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.033851779760448106, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.033851779760448106 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.014635185616527824, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.014635185616527824 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.03175195237583323, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.03175195237583323 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.032419206846933335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.032419206846933335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.012134433741002574, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.012134433741002574 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.46212381405853503, + "mc2_stderr": 0.01568383395016852 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4923258559622196, + "acc_stderr": 0.01718832921965428, + "acc_norm": 0.5360094451003542, + "acc_norm_stderr": 0.017145715365486664 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "OpenBuddy/openbuddy-llemma-34b-v13.2", + "model_sha": "1abff297a8eae622c0f106854f2a8fbfcfa9c119", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/OpenBuddy/openbuddy-mistral-22b-v21.1-32k/result_2024-05-17 08:50:05.json b/OpenBuddy/openbuddy-mistral-22b-v21.1-32k/result_2024-05-17 08:50:05.json new file mode 100644 index 0000000000000000000000000000000000000000..81f0cdf0e9db64251dfe6b15016c9c71416d8736 --- /dev/null +++ b/OpenBuddy/openbuddy-mistral-22b-v21.1-32k/result_2024-05-17 08:50:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19197952218430034, + "acc_stderr": 0.011509598906598093, + "acc_norm": 0.23890784982935154, + "acc_norm_stderr": 0.01246107137631662 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2574188408683529, + "acc_stderr": 0.004363185172047172, + "acc_norm": 0.264389563831906, + "acc_norm_stderr": 0.004401063265803206 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.031885780176863984, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.031885780176863984 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066654, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066654 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534436, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534436 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24758842443729903, + "acc_stderr": 0.024513879973621967, + "acc_norm": 0.24758842443729903, + "acc_norm_stderr": 0.024513879973621967 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.0359546161177469, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.0359546161177469 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.21379310344827587, + "acc_stderr": 0.034165204477475494, + "acc_norm": 0.21379310344827587, + "acc_norm_stderr": 0.034165204477475494 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416542, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416542 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931666, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931666 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.03295797566311271, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.03295797566311271 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.29914529914529914, + "acc_stderr": 0.02999695185834948, + "acc_norm": 0.29914529914529914, + "acc_norm_stderr": 0.02999695185834948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23018867924528302, + "acc_stderr": 0.025907897122408173, + "acc_norm": 0.23018867924528302, + "acc_norm_stderr": 0.025907897122408173 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724137, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724137 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.021411684393694203, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.021411684393694203 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.022894082489925992, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.022894082489925992 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.024569223600460845, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.024569223600460845 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29533678756476683, + "acc_stderr": 0.032922966391551414, + "acc_norm": 0.29533678756476683, + "acc_norm_stderr": 0.032922966391551414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26788990825688075, + "acc_stderr": 0.018987462257978652, + "acc_norm": 0.26788990825688075, + "acc_norm_stderr": 0.018987462257978652 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011743, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023805186524888153, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023805186524888153 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3140495867768595, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.3140495867768595, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.016819028375736386, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.016819028375736386 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347018, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347018 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.0142426300705749, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.0142426300705749 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2, + "acc_stderr": 0.025607375986579157, + "acc_norm": 0.2, + "acc_norm_stderr": 0.025607375986579157 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676653, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501964, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501964 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.032876667586034886, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.032876667586034886 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156493, + "mc2": 0.47855045830868376, + "mc2_stderr": 0.0170101538232276 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08736717827626919, + "acc_stderr": 0.009708162004168805, + "acc_norm": 0.282172373081464, + "acc_norm_stderr": 0.015473271583988433 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "OpenBuddy/openbuddy-mistral-22b-v21.1-32k", + "model_sha": "d90657108901e67b0067d38f329e74537921f839", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/OpenBuddy/openbuddy-mistral-7b-v17.1-32k/result_2024-02-12 04:21:09.json b/OpenBuddy/openbuddy-mistral-7b-v17.1-32k/result_2024-02-12 04:21:09.json new file mode 100644 index 0000000000000000000000000000000000000000..9a30b27f120f40bf5e5ee1ed71e49d45d3958aba --- /dev/null +++ b/OpenBuddy/openbuddy-mistral-7b-v17.1-32k/result_2024-02-12 04:21:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2030716723549488, + "acc_stderr": 0.011755899303705582, + "acc_norm": 0.24744027303754265, + "acc_norm_stderr": 0.012610352663292674 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2539334793865764, + "acc_stderr": 0.004343704512380101, + "acc_norm": 0.25194184425413263, + "acc_norm_stderr": 0.004332408005299344 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28735632183908044, + "acc_stderr": 0.0161824107306827, + "acc_norm": 0.28735632183908044, + "acc_norm_stderr": 0.0161824107306827 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386708, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386708 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.02608270069539966, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.02608270069539966 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732522, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732522 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21182266009852216, + "acc_stderr": 0.02874898368994106, + "acc_norm": 0.21182266009852216, + "acc_norm_stderr": 0.02874898368994106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885193, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2, + "acc_stderr": 0.02461829819586651, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02461829819586651 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782855, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782855 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.034765996075164785, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.034765996075164785 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21957671957671956, + "acc_stderr": 0.02132001859977037, + "acc_norm": 0.21957671957671956, + "acc_norm_stderr": 0.02132001859977037 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.02925282329180365, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.02925282329180365 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.024739981355113592, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.024739981355113592 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24673202614379086, + "acc_stderr": 0.0174408203674025, + "acc_norm": 0.24673202614379086, + "acc_norm_stderr": 0.0174408203674025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.014487500852850417, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.014487500852850417 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20253164556962025, + "acc_stderr": 0.026160568246601464, + "acc_norm": 0.20253164556962025, + "acc_norm_stderr": 0.026160568246601464 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.03096451792692341, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.03096451792692341 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.03287666758603489, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.03287666758603489 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826845, + "mc2": 0.4814309227649628, + "mc2_stderr": 0.01709409390072448 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.06965761511216056, + "acc_stderr": 0.008752266290032413, + "acc_norm": 0.31995277449822906, + "acc_norm_stderr": 0.016037153840280524 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "OpenBuddy/openbuddy-mistral-7b-v17.1-32k", + "model_sha": "43f9853350f222b3802d6df332d026d344626aee", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/OpenBuddy/openbuddy-mistral2-7b-v20.3-32k/result_2024-04-09 14:35:59.json b/OpenBuddy/openbuddy-mistral2-7b-v20.3-32k/result_2024-04-09 14:35:59.json new file mode 100644 index 0000000000000000000000000000000000000000..67dcec61c3a739fbf74f7acca8b8694213802a37 --- /dev/null +++ b/OpenBuddy/openbuddy-mistral2-7b-v20.3-32k/result_2024-04-09 14:35:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2721843003412969, + "acc_stderr": 0.013006600406423709, + "acc_norm": 0.3225255972696246, + "acc_norm_stderr": 0.01365998089427737 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2930691097390958, + "acc_stderr": 0.004542396269999215, + "acc_norm": 0.31846245767775344, + "acc_norm_stderr": 0.0046492781530738094 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.26900584795321636, + "acc_stderr": 0.0340105262010409, + "acc_norm": 0.26900584795321636, + "acc_norm_stderr": 0.0340105262010409 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.32950191570881227, + "acc_stderr": 0.016808322261740463, + "acc_norm": 0.32950191570881227, + "acc_norm_stderr": 0.016808322261740463 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03820169914517905, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03820169914517905 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.02865917937429232, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.02865917937429232 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.0362933532994786, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.0362933532994786 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3311897106109325, + "acc_stderr": 0.026730620728004913, + "acc_norm": 0.3311897106109325, + "acc_norm_stderr": 0.026730620728004913 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03481285338232963, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03481285338232963 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309993, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309993 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3277310924369748, + "acc_stderr": 0.030489911417673227, + "acc_norm": 0.3277310924369748, + "acc_norm_stderr": 0.030489911417673227 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3384615384615385, + "acc_stderr": 0.023991500500313033, + "acc_norm": 0.3384615384615385, + "acc_norm_stderr": 0.023991500500313033 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233485, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4064516129032258, + "acc_stderr": 0.027941727346256304, + "acc_norm": 0.4064516129032258, + "acc_norm_stderr": 0.027941727346256304 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5470085470085471, + "acc_stderr": 0.03261099873098619, + "acc_norm": 0.5470085470085471, + "acc_norm_stderr": 0.03261099873098619 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02606715922227579, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02606715922227579 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.35323383084577115, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.35323383084577115, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.0349610148119118, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.0349610148119118 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463084, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463084 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.026362437574546545, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.026362437574546545 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3374233128834356, + "acc_stderr": 0.03714908409935575, + "acc_norm": 0.3374233128834356, + "acc_norm_stderr": 0.03714908409935575 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37305699481865284, + "acc_stderr": 0.03490205592048573, + "acc_norm": 0.37305699481865284, + "acc_norm_stderr": 0.03490205592048573 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3174311926605505, + "acc_stderr": 0.019957152198460504, + "acc_norm": 0.3174311926605505, + "acc_norm_stderr": 0.019957152198460504 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.028304576673141114, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.028304576673141114 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952925, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952925 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.018152871051538823, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.018152871051538823 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.02780799014132019, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.02780799014132019 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.044642857142857144, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.044642857142857144 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225606, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225606 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687765, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687765 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45569620253164556, + "acc_stderr": 0.03241920684693334, + "acc_norm": 0.45569620253164556, + "acc_norm_stderr": 0.03241920684693334 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30638852672750977, + "acc_stderr": 0.01177398032938071, + "acc_norm": 0.30638852672750977, + "acc_norm_stderr": 0.01177398032938071 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.038049136539710114, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.038049136539710114 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766379, + "mc2": 0.4464517599251789, + "mc2_stderr": 0.016618603699884715 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.0743801652892562, + "acc_stderr": 0.009021104510906087, + "acc_norm": 0.30932703659976385, + "acc_norm_stderr": 0.01589132050552089 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "OpenBuddy/openbuddy-mistral2-7b-v20.3-32k", + "model_sha": "396cbf715542e92f870f105bc9a8ef17736d1b00", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/OpenBuddy/openbuddy-qwen1.5-14b-v21.1-32k/result_2024-04-09 14:35:43.json b/OpenBuddy/openbuddy-qwen1.5-14b-v21.1-32k/result_2024-04-09 14:35:43.json new file mode 100644 index 0000000000000000000000000000000000000000..793e1799f4289ca6acb6410e74c82d3fa8ce2c26 --- /dev/null +++ b/OpenBuddy/openbuddy-qwen1.5-14b-v21.1-32k/result_2024-04-09 14:35:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.014150631435111726, + "acc_norm": 0.44368600682593856, + "acc_norm_stderr": 0.014518421825670454 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38478390758812986, + "acc_stderr": 0.004855498343308387, + "acc_norm": 0.5080661222863971, + "acc_norm_stderr": 0.004989132075598771 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6432748538011696, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.6432748538011696, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6270753512132823, + "acc_stderr": 0.017292868269453938, + "acc_norm": 0.6270753512132823, + "acc_norm_stderr": 0.017292868269453938 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.042667634040995814, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.042667634040995814 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5617021276595745, + "acc_stderr": 0.032436186361081, + "acc_norm": 0.5617021276595745, + "acc_norm_stderr": 0.032436186361081 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5659163987138264, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.5659163987138264, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864907, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864907 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7121212121212122, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.7121212121212122, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5756302521008403, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.5756302521008403, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5769230769230769, + "acc_stderr": 0.025049197876042355, + "acc_norm": 0.5769230769230769, + "acc_norm_stderr": 0.025049197876042355 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199986, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199986 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4876847290640394, + "acc_stderr": 0.035169204442208966, + "acc_norm": 0.4876847290640394, + "acc_norm_stderr": 0.035169204442208966 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.603225806451613, + "acc_stderr": 0.027831231605767944, + "acc_norm": 0.603225806451613, + "acc_norm_stderr": 0.027831231605767944 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922754, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922754 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.03070948699255654, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.03070948699255654 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3814814814814815, + "acc_stderr": 0.02961671892749759, + "acc_norm": 0.3814814814814815, + "acc_norm_stderr": 0.02961671892749759 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.032200241045342054, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.032200241045342054 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.47883597883597884, + "acc_stderr": 0.025728230952130726, + "acc_norm": 0.47883597883597884, + "acc_norm_stderr": 0.025728230952130726 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.04177578950739993, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.04177578950739993 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5953757225433526, + "acc_stderr": 0.026424816594009852, + "acc_norm": 0.5953757225433526, + "acc_norm_stderr": 0.026424816594009852 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.558282208588957, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.558282208588957, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.027513747284379424, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.027513747284379424 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6424870466321243, + "acc_stderr": 0.034588160421810114, + "acc_norm": 0.6424870466321243, + "acc_norm_stderr": 0.034588160421810114 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.634862385321101, + "acc_stderr": 0.02064280145438401, + "acc_norm": 0.634862385321101, + "acc_norm_stderr": 0.02064280145438401 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.5158730158730159, + "acc_stderr": 0.044698818540726076, + "acc_norm": 0.5158730158730159, + "acc_norm_stderr": 0.044698818540726076 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6143790849673203, + "acc_stderr": 0.02787074527829027, + "acc_norm": 0.6143790849673203, + "acc_norm_stderr": 0.02787074527829027 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.618421052631579, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.618421052631579, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.48856209150326796, + "acc_stderr": 0.020222541515610863, + "acc_norm": 0.48856209150326796, + "acc_norm_stderr": 0.020222541515610863 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.02914454478159615, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.02914454478159615 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5446428571428571, + "acc_stderr": 0.04726835553719098, + "acc_norm": 0.5446428571428571, + "acc_norm_stderr": 0.04726835553719098 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.034076320938540516, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.034076320938540516 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30502793296089387, + "acc_stderr": 0.015398723510916713, + "acc_norm": 0.30502793296089387, + "acc_norm_stderr": 0.015398723510916713 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329387, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329387 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.673469387755102, + "acc_stderr": 0.030021056238440327, + "acc_norm": 0.673469387755102, + "acc_norm_stderr": 0.030021056238440327 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7426160337552743, + "acc_stderr": 0.028458820991460302, + "acc_norm": 0.7426160337552743, + "acc_norm_stderr": 0.028458820991460302 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3767926988265971, + "acc_stderr": 0.012376459593894397, + "acc_norm": 0.3767926988265971, + "acc_norm_stderr": 0.012376459593894397 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6519607843137255, + "acc_stderr": 0.033433112404884176, + "acc_norm": 0.6519607843137255, + "acc_norm_stderr": 0.033433112404884176 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.36107711138310894, + "mc1_stderr": 0.016814312844836886, + "mc2": 0.5164045998550099, + "mc2_stderr": 0.015796793534581084 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.017177301992342534, + "acc_norm": 0.5749704840613932, + "acc_norm_stderr": 0.016996016308362887 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "OpenBuddy/openbuddy-qwen1.5-14b-v21.1-32k", + "model_sha": "499957f6ef14deea113836647d8436274147fd3e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/OpenBuddy/openbuddy-zephyr-7b-v14.1/result_2023-11-15 05:52:01.json b/OpenBuddy/openbuddy-zephyr-7b-v14.1/result_2023-11-15 05:52:01.json new file mode 100644 index 0000000000000000000000000000000000000000..2c431439b63009570b6d351722b6bec2c977fcb9 --- /dev/null +++ b/OpenBuddy/openbuddy-zephyr-7b-v14.1/result_2023-11-15 05:52:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20136518771331058, + "acc_stderr": 0.011718927477444263, + "acc_norm": 0.25, + "acc_norm_stderr": 0.012653835621466646 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2535351523600876, + "acc_stderr": 0.004341454841892329, + "acc_norm": 0.2508464449312886, + "acc_norm_stderr": 0.004326143430360104 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.033773102522091945, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.033773102522091945 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3397190293742018, + "acc_stderr": 0.016936394114301645, + "acc_norm": 0.3397190293742018, + "acc_norm_stderr": 0.016936394114301645 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.02880998985410297, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.02880998985410297 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.03484331592680589, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.03484331592680589 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.35691318327974275, + "acc_stderr": 0.02721042037593403, + "acc_norm": 0.35691318327974275, + "acc_norm_stderr": 0.02721042037593403 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.29596412556053814, + "acc_stderr": 0.030636591348699796, + "acc_norm": 0.29596412556053814, + "acc_norm_stderr": 0.030636591348699796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.038061426873099935, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.038061426873099935 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.029344572500634342, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.029344572500634342 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.02428314052946729, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.02428314052946729 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.032826493853041504, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.032826493853041504 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.027709359675032495, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.027709359675032495 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4700854700854701, + "acc_stderr": 0.032697411068124425, + "acc_norm": 0.4700854700854701, + "acc_norm_stderr": 0.032697411068124425 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30566037735849055, + "acc_stderr": 0.028353298073322666, + "acc_norm": 0.30566037735849055, + "acc_norm_stderr": 0.028353298073322666 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.034791855725996614, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.034791855725996614 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655812, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655812 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554858, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554858 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3901734104046243, + "acc_stderr": 0.026261677607806636, + "acc_norm": 0.3901734104046243, + "acc_norm_stderr": 0.026261677607806636 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.026406145973625676, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.026406145973625676 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35751295336787564, + "acc_stderr": 0.03458816042181006, + "acc_norm": 0.35751295336787564, + "acc_norm_stderr": 0.03458816042181006 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30091743119266057, + "acc_stderr": 0.019664751366802114, + "acc_norm": 0.30091743119266057, + "acc_norm_stderr": 0.019664751366802114 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.027684181883302884, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.027684181883302884 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4297520661157025, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.025518731049537766, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.025518731049537766 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.04327040932578731, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.04327040932578731 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021595, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021595 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.01444415780826145, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.01444415780826145 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.379746835443038, + "acc_stderr": 0.03159188752965851, + "acc_norm": 0.379746835443038, + "acc_norm_stderr": 0.03159188752965851 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30964797913950454, + "acc_stderr": 0.011808598262503318, + "acc_norm": 0.30964797913950454, + "acc_norm_stderr": 0.011808598262503318 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501954, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501954 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.01529807750948508, + "mc2": 0.4426420327385365, + "mc2_stderr": 0.016343415069680026 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08146399055489964, + "acc_stderr": 0.00940471744194626, + "acc_norm": 0.32585596221959856, + "acc_norm_stderr": 0.01611402389480034 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "OpenBuddy/openbuddy-zephyr-7b-v14.1", + "model_sha": "208b6fb841239a36fb0ea675179a231e0ad9d287", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/OpenModels4all/gemma-1.1-7b-it/result_2024-04-09 02:55:14.json b/OpenModels4all/gemma-1.1-7b-it/result_2024-04-09 02:55:14.json new file mode 100644 index 0000000000000000000000000000000000000000..61ce60f3377a3c68122e533fe7baac0f94941630 --- /dev/null +++ b/OpenModels4all/gemma-1.1-7b-it/result_2024-04-09 02:55:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2235494880546075, + "acc_stderr": 0.012174896631202605, + "acc_norm": 0.25853242320819114, + "acc_norm_stderr": 0.01279455375428868 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2610037841067516, + "acc_stderr": 0.004382844128643415, + "acc_norm": 0.2634933280223063, + "acc_norm_stderr": 0.004396273173717454 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.0356507967070831, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.0356507967070831 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.01598281477469563, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.01598281477469563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.02655698211783875, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.02655698211783875 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.03384429155233138, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.03384429155233138 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.02608270069539966, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.02608270069539966 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.03076935200822914, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.03076935200822914 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.0359546161177469, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.0359546161177469 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713546, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713546 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.226890756302521, + "acc_stderr": 0.027205371538279472, + "acc_norm": 0.226890756302521, + "acc_norm_stderr": 0.027205371538279472 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.258974358974359, + "acc_stderr": 0.02221110681006167, + "acc_norm": 0.258974358974359, + "acc_norm_stderr": 0.02221110681006167 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21367521367521367, + "acc_stderr": 0.02685345037700917, + "acc_norm": 0.21367521367521367, + "acc_norm_stderr": 0.02685345037700917 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.02560423347089911, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.02560423347089911 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072774, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072774 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959912, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959912 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483098, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483098 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.023786203255508287, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.023786203255508287 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.024659685185967277, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.024659685185967277 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23486238532110093, + "acc_stderr": 0.01817511051034357, + "acc_norm": 0.23486238532110093, + "acc_norm_stderr": 0.01817511051034357 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.024170840879341016, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.024170840879341016 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.238562091503268, + "acc_stderr": 0.017242385828779593, + "acc_norm": 0.238562091503268, + "acc_norm_stderr": 0.017242385828779593 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.02525786135943241, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.02525786135943241 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23897058823529413, + "acc_stderr": 0.025905280644893, + "acc_norm": 0.23897058823529413, + "acc_norm_stderr": 0.025905280644893 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.32653061224489793, + "acc_stderr": 0.030021056238440317, + "acc_norm": 0.32653061224489793, + "acc_norm_stderr": 0.030021056238440317 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594696, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594696 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.010986307870045526, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.010986307870045526 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.0309645179269234, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.0309645179269234 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.03287666758603489, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.03287666758603489 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.015905987048184828, + "mc2": 0.47511549044806445, + "mc2_stderr": 0.01675534080180412 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21487603305785125, + "acc_stderr": 0.01412140552290331, + "acc_norm": 0.29515938606847697, + "acc_norm_stderr": 0.015681535229192203 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "OpenModels4all/gemma-1.1-7b-it", + "model_sha": "484670e31c44eededf8c64064eb0207e7af4a2b3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ParkTaeEon/Myrrh_solar_10.7b_v0.1-dpo/result_2024-04-05 09:07:13.json b/ParkTaeEon/Myrrh_solar_10.7b_v0.1-dpo/result_2024-04-05 09:07:13.json new file mode 100644 index 0000000000000000000000000000000000000000..cd0eda0b96781a4a67c25b31ff929794283575c5 --- /dev/null +++ b/ParkTaeEon/Myrrh_solar_10.7b_v0.1-dpo/result_2024-04-05 09:07:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7039249146757679, + "acc_stderr": 0.013340916085246256, + "acc_norm": 0.7474402730375427, + "acc_norm_stderr": 0.01269672898020771 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5087631945827524, + "acc_stderr": 0.004989014986235633, + "acc_norm": 0.6724756024696276, + "acc_norm_stderr": 0.004683511716552247 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7251461988304093, + "acc_stderr": 0.034240429246915824, + "acc_norm": 0.7251461988304093, + "acc_norm_stderr": 0.034240429246915824 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7184466019417476, + "acc_stderr": 0.04453254836326467, + "acc_norm": 0.7184466019417476, + "acc_norm_stderr": 0.04453254836326467 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7343550446998723, + "acc_stderr": 0.01579430248788872, + "acc_norm": 0.7343550446998723, + "acc_norm_stderr": 0.01579430248788872 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5319148936170213, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.5319148936170213, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5301204819277109, + "acc_stderr": 0.03885425420866767, + "acc_norm": 0.5301204819277109, + "acc_norm_stderr": 0.03885425420866767 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6205787781350482, + "acc_stderr": 0.027559949802347813, + "acc_norm": 0.6205787781350482, + "acc_norm_stderr": 0.027559949802347813 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6367713004484304, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.6367713004484304, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.042764865428145914, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.042764865428145914 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7676767676767676, + "acc_stderr": 0.030088629490217487, + "acc_norm": 0.7676767676767676, + "acc_norm_stderr": 0.030088629490217487 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6554621848739496, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.6554621848739496, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6512820512820513, + "acc_stderr": 0.02416278028401772, + "acc_norm": 0.6512820512820513, + "acc_norm_stderr": 0.02416278028401772 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6451612903225806, + "acc_stderr": 0.027218889773308753, + "acc_norm": 0.6451612903225806, + "acc_norm_stderr": 0.027218889773308753 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8418803418803419, + "acc_stderr": 0.023902325549560406, + "acc_norm": 0.8418803418803419, + "acc_norm_stderr": 0.023902325549560406 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6113207547169811, + "acc_stderr": 0.03000048544867599, + "acc_norm": 0.6113207547169811, + "acc_norm_stderr": 0.03000048544867599 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6818181818181818, + "acc_stderr": 0.044612721759105085, + "acc_norm": 0.6818181818181818, + "acc_norm_stderr": 0.044612721759105085 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.02925290592725198, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.02925290592725198 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.03958027231121569, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.03958027231121569 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.030965903123573037, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.030965903123573037 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43386243386243384, + "acc_stderr": 0.02552503438247489, + "acc_norm": 0.43386243386243384, + "acc_norm_stderr": 0.02552503438247489 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5763888888888888, + "acc_stderr": 0.04132125019723369, + "acc_norm": 0.5763888888888888, + "acc_norm_stderr": 0.04132125019723369 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6242774566473989, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.6242774566473989, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6012269938650306, + "acc_stderr": 0.038470214204560246, + "acc_norm": 0.6012269938650306, + "acc_norm_stderr": 0.038470214204560246 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6419753086419753, + "acc_stderr": 0.02667561192603711, + "acc_norm": 0.6419753086419753, + "acc_norm_stderr": 0.02667561192603711 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.030748905363909902, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.030748905363909902 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583703, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583703 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.763302752293578, + "acc_stderr": 0.018224078117299054, + "acc_norm": 0.763302752293578, + "acc_norm_stderr": 0.018224078117299054 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6339869281045751, + "acc_stderr": 0.027582811415159607, + "acc_norm": 0.6339869281045751, + "acc_norm_stderr": 0.027582811415159607 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252609, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252609 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6052631578947368, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.6052631578947368, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5866013071895425, + "acc_stderr": 0.01992211568278668, + "acc_norm": 0.5866013071895425, + "acc_norm_stderr": 0.01992211568278668 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.029658235097666907, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.029658235097666907 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3329608938547486, + "acc_stderr": 0.01576171617839756, + "acc_norm": 0.3329608938547486, + "acc_norm_stderr": 0.01576171617839756 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.03016191193076711, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.03016191193076711 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6244897959183674, + "acc_stderr": 0.031001209039894843, + "acc_norm": 0.6244897959183674, + "acc_norm_stderr": 0.031001209039894843 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.45371577574967403, + "acc_stderr": 0.012715404841277748, + "acc_norm": 0.45371577574967403, + "acc_norm_stderr": 0.012715404841277748 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.03296245110172229, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.03296245110172229 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6609547123623011, + "mc1_stderr": 0.016571797910626605, + "mc2": 0.7475407070546581, + "mc2_stderr": 0.014153696507814086 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45690672963400236, + "acc_stderr": 0.017126389093086777, + "acc_norm": 0.4757969303423849, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ParkTaeEon/Myrrh_solar_10.7b_v0.1-dpo", + "model_sha": "6401bbbeb47d17da8a8fa3a8d8374a7ecd54a005", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ParkTaeEon/Myrrh_solar_10.7b_v0.2/result_2024-04-08 03:08:50.json b/ParkTaeEon/Myrrh_solar_10.7b_v0.2/result_2024-04-08 03:08:50.json new file mode 100644 index 0000000000000000000000000000000000000000..8b7bff9af4df2c61e21996880b54a5482886e175 --- /dev/null +++ b/ParkTaeEon/Myrrh_solar_10.7b_v0.2/result_2024-04-08 03:08:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7209897610921502, + "acc_stderr": 0.01310678488360134, + "acc_norm": 0.7602389078498294, + "acc_norm_stderr": 0.012476304127453949 + }, + "harness|ko_hellaswag|10": { + "acc": 0.604959171479785, + "acc_stderr": 0.004878603699686037, + "acc_norm": 0.7476598287193786, + "acc_norm_stderr": 0.0043346769527038585 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7251461988304093, + "acc_stderr": 0.034240429246915824, + "acc_norm": 0.7251461988304093, + "acc_norm_stderr": 0.034240429246915824 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7281553398058253, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.7281553398058253, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6973180076628352, + "acc_stderr": 0.016428781581749367, + "acc_norm": 0.6973180076628352, + "acc_norm_stderr": 0.016428781581749367 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5234042553191489, + "acc_stderr": 0.03265019475033581, + "acc_norm": 0.5234042553191489, + "acc_norm_stderr": 0.03265019475033581 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.038922121953330446, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.038922121953330446 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.639871382636656, + "acc_stderr": 0.027264297599804012, + "acc_norm": 0.639871382636656, + "acc_norm_stderr": 0.027264297599804012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6771300448430493, + "acc_stderr": 0.03138147637575499, + "acc_norm": 0.6771300448430493, + "acc_norm_stderr": 0.03138147637575499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7676767676767676, + "acc_stderr": 0.030088629490217487, + "acc_norm": 0.7676767676767676, + "acc_norm_stderr": 0.030088629490217487 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5793103448275863, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.5793103448275863, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.617948717948718, + "acc_stderr": 0.024635549163908237, + "acc_norm": 0.617948717948718, + "acc_norm_stderr": 0.024635549163908237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.47783251231527096, + "acc_stderr": 0.035145285621750094, + "acc_norm": 0.47783251231527096, + "acc_norm_stderr": 0.035145285621750094 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6290322580645161, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.6290322580645161, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8376068376068376, + "acc_stderr": 0.024161618127987745, + "acc_norm": 0.8376068376068376, + "acc_norm_stderr": 0.024161618127987745 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5962264150943396, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.5962264150943396, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.04494290866252091, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.04494290866252091 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.029869605095316904, + "acc_norm": 0.4, + "acc_norm_stderr": 0.029869605095316904 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7512437810945274, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.7512437810945274, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4576719576719577, + "acc_stderr": 0.02565886886205832, + "acc_norm": 0.4576719576719577, + "acc_norm_stderr": 0.02565886886205832 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6319444444444444, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.6319444444444444, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.615606936416185, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.615606936416185, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.588957055214724, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.588957055214724, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.026229649178821163, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.026229649178821163 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7467889908256881, + "acc_stderr": 0.018644073041375053, + "acc_norm": 0.7467889908256881, + "acc_norm_stderr": 0.018644073041375053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6372549019607843, + "acc_stderr": 0.02753007844711031, + "acc_norm": 0.6372549019607843, + "acc_norm_stderr": 0.02753007844711031 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.03941897526516303, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.03941897526516303 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6513157894736842, + "acc_stderr": 0.03878139888797611, + "acc_norm": 0.6513157894736842, + "acc_norm_stderr": 0.03878139888797611 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5964052287581699, + "acc_stderr": 0.01984828016840116, + "acc_norm": 0.5964052287581699, + "acc_norm_stderr": 0.01984828016840116 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4078014184397163, + "acc_stderr": 0.02931601177634356, + "acc_norm": 0.4078014184397163, + "acc_norm_stderr": 0.02931601177634356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.36312849162011174, + "acc_stderr": 0.016083749986853697, + "acc_norm": 0.36312849162011174, + "acc_norm_stderr": 0.016083749986853697 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5477941176470589, + "acc_stderr": 0.03023375855159645, + "acc_norm": 0.5477941176470589, + "acc_norm_stderr": 0.03023375855159645 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6857142857142857, + "acc_stderr": 0.029719329422417454, + "acc_norm": 0.6857142857142857, + "acc_norm_stderr": 0.029719329422417454 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598025, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598025 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4485006518904824, + "acc_stderr": 0.01270231749055982, + "acc_norm": 0.4485006518904824, + "acc_norm_stderr": 0.01270231749055982 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6323529411764706, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.6323529411764706, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7503059975520195, + "mc1_stderr": 0.015152286907148123, + "mc2": 0.8081937963912694, + "mc2_stderr": 0.013275492049791426 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5159386068476978, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.5312868949232585, + "acc_norm_stderr": 0.017156666859785466 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ParkTaeEon/Myrrh_solar_10.7b_v0.2", + "model_sha": "0d9cdfd33cef884c02c0ea1bd6779947946005cc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PathFinderKR/Waktaverse-Llama-3-KO-8B-Instruct-v2/result_2024-05-15 12:36:39.json b/PathFinderKR/Waktaverse-Llama-3-KO-8B-Instruct-v2/result_2024-05-15 12:36:39.json new file mode 100644 index 0000000000000000000000000000000000000000..9b962d98bc69527d22f714c64fe8139ce95b45a7 --- /dev/null +++ b/PathFinderKR/Waktaverse-Llama-3-KO-8B-Instruct-v2/result_2024-05-15 12:36:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4069965870307167, + "acc_stderr": 0.01435639941800912, + "acc_norm": 0.4616040955631399, + "acc_norm_stderr": 0.01456824555029636 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4108743278231428, + "acc_stderr": 0.004909870006388833, + "acc_norm": 0.5520812587134037, + "acc_norm_stderr": 0.0049626384463959845 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6140350877192983, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.6140350877192983, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5632183908045977, + "acc_stderr": 0.01773647083780069, + "acc_norm": 0.5632183908045977, + "acc_norm_stderr": 0.01773647083780069 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033582, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033582 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5655172413793104, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.5655172413793104, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617749, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617749 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.032219436365661956, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.032219436365661956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017824, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017824 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.02905858830374884, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.02905858830374884 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.030770900763851302, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.030770900763851302 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02911661760608303, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02911661760608303 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.02437319786798306, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.02437319786798306 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651281, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651281 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5, + "acc_stderr": 0.02782074420373286, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02782074420373286 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144807, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5871559633027523, + "acc_stderr": 0.021109128133413913, + "acc_norm": 0.5871559633027523, + "acc_norm_stderr": 0.021109128133413913 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449848, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449848 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563164, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563164 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833587, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833587 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608043, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290803, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290803 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.031601069934496004, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.031601069934496004 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33702737940026073, + "acc_stderr": 0.012072836273691325, + "acc_norm": 0.33702737940026073, + "acc_norm_stderr": 0.012072836273691325 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253595, + "mc2": 0.4213996960500084, + "mc2_stderr": 0.015028409383683924 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4427390791027155, + "acc_stderr": 0.017077254131556224, + "acc_norm": 0.5548996458087367, + "acc_norm_stderr": 0.01708641743100547 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PathFinderKR/Waktaverse-Llama-3-KO-8B-Instruct-v2", + "model_sha": "15f9b6eedc33ea8f12ef75b8f111239891216fe7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PathFinderKR/Waktaverse-Llama-3-KO-8B-Instruct/result_2024-05-01 11:03:29.json b/PathFinderKR/Waktaverse-Llama-3-KO-8B-Instruct/result_2024-05-01 11:03:29.json new file mode 100644 index 0000000000000000000000000000000000000000..a39fbb08eff08d5e01b426b8b8e3a319a2f72c2a --- /dev/null +++ b/PathFinderKR/Waktaverse-Llama-3-KO-8B-Instruct/result_2024-05-01 11:03:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.014150631435111726, + "acc_norm": 0.4257679180887372, + "acc_norm_stderr": 0.014449464278868803 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3785102569209321, + "acc_stderr": 0.004840244782805299, + "acc_norm": 0.502688707428799, + "acc_norm_stderr": 0.004989709267191013 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.045821241601615506, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.045821241601615506 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5504469987228607, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.5504469987228607, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5530546623794212, + "acc_stderr": 0.028237769422085345, + "acc_norm": 0.5530546623794212, + "acc_norm_stderr": 0.028237769422085345 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6050420168067226, + "acc_stderr": 0.031753678460966245, + "acc_norm": 0.6050420168067226, + "acc_norm_stderr": 0.031753678460966245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5358974358974359, + "acc_stderr": 0.025285585990017862, + "acc_norm": 0.5358974358974359, + "acc_norm_stderr": 0.025285585990017862 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.035107665979592154, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.035107665979592154 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5580645161290323, + "acc_stderr": 0.028251557906849745, + "acc_norm": 0.5580645161290323, + "acc_norm_stderr": 0.028251557906849745 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392926, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392926 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723456, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723456 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253252, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253252 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342665, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.569364161849711, + "acc_stderr": 0.026658800273672376, + "acc_norm": 0.569364161849711, + "acc_norm_stderr": 0.026658800273672376 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.027648477877413327, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.027648477877413327 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5981651376146789, + "acc_stderr": 0.021020106172997013, + "acc_norm": 0.5981651376146789, + "acc_norm_stderr": 0.021020106172997013 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.02856869975222588, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.02856869975222588 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.04065771002562603, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.04065771002562603 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43300653594771243, + "acc_stderr": 0.020045442473324224, + "acc_norm": 0.43300653594771243, + "acc_norm_stderr": 0.020045442473324224 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.028838921471251448, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.028838921471251448 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.7, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5918367346938775, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.5918367346938775, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3728813559322034, + "acc_stderr": 0.012350630058333364, + "acc_norm": 0.3728813559322034, + "acc_norm_stderr": 0.012350630058333364 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6078431372549019, + "acc_stderr": 0.034267123492472726, + "acc_norm": 0.6078431372549019, + "acc_norm_stderr": 0.034267123492472726 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03756335775187896, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03756335775187896 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842883, + "mc2": 0.4245858576748032, + "mc2_stderr": 0.015117083520928751 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46989374262101535, + "acc_stderr": 0.017159163590170216, + "acc_norm": 0.5348288075560803, + "acc_norm_stderr": 0.017148598015747425 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PathFinderKR/Waktaverse-Llama-3-KO-8B-Instruct", + "model_sha": "1d550c8fbf7b3694ae53f943a064794615525ffe", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PerRing/Yi-Ko-6x2B-v0.0/result_2024-01-01 02:20:06.json b/PerRing/Yi-Ko-6x2B-v0.0/result_2024-01-01 02:20:06.json new file mode 100644 index 0000000000000000000000000000000000000000..c4e9b4ba6973c81d813ff76d4b966994efda095c --- /dev/null +++ b/PerRing/Yi-Ko-6x2B-v0.0/result_2024-01-01 02:20:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2568259385665529, + "acc_stderr": 0.0127669237941168, + "acc_norm": 0.32849829351535836, + "acc_norm_stderr": 0.013724978465537371 + }, + "harness|ko_hellaswag|10": { + "acc": 0.28809002190798644, + "acc_stderr": 0.004519476835646767, + "acc_norm": 0.36606253734315874, + "acc_norm_stderr": 0.0048074233432245875 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5197956577266922, + "acc_stderr": 0.01786594482729163, + "acc_norm": 0.5197956577266922, + "acc_norm_stderr": 0.01786594482729163 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5407407407407407, + "acc_stderr": 0.04304979692464241, + "acc_norm": 0.5407407407407407, + "acc_norm_stderr": 0.04304979692464241 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.032363611119519416, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.032363611119519416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942645, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942645 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731571, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731571 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.03874102859818081, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.03874102859818081 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594384, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594384 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5577981651376147, + "acc_stderr": 0.021293613207520205, + "acc_norm": 0.5577981651376147, + "acc_norm_stderr": 0.021293613207520205 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238126, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.044492703500683836, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.044492703500683836 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437539, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437539 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101373, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101373 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2670391061452514, + "acc_stderr": 0.014796502622562551, + "acc_norm": 0.2670391061452514, + "acc_norm_stderr": 0.014796502622562551 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824862, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824862 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556162, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556162 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.318122555410691, + "acc_stderr": 0.011895407281104088, + "acc_norm": 0.318122555410691, + "acc_norm_stderr": 0.011895407281104088 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156461, + "mc2": 0.5089844823562397, + "mc2_stderr": 0.016327586148140113 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.16883116883116883, + "acc_stderr": 0.012879107151477186, + "acc_norm": 0.5430932703659976, + "acc_norm_stderr": 0.017126389093086784 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PerRing/Yi-Ko-6x2B-v0.0", + "model_sha": "d88cb6da6b532dcade4801b5bd665c2a0f57b142", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PerRing/Yi-Ko-6x2B-v0.1/result_2023-12-31 14:01:02.json b/PerRing/Yi-Ko-6x2B-v0.1/result_2023-12-31 14:01:02.json new file mode 100644 index 0000000000000000000000000000000000000000..6b839d4b6bd2dbc0e1ef2726825bca6a3672b608 --- /dev/null +++ b/PerRing/Yi-Ko-6x2B-v0.1/result_2023-12-31 14:01:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3361774744027304, + "acc_stderr": 0.013804855026205763, + "acc_norm": 0.37457337883959047, + "acc_norm_stderr": 0.01414419347189345 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37860983867755427, + "acc_stderr": 0.004840493603166213, + "acc_norm": 0.4879506074487154, + "acc_norm_stderr": 0.004988332289642083 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.03815827365913235, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.03815827365913235 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36270753512132825, + "acc_stderr": 0.017192708674602288, + "acc_norm": 0.36270753512132825, + "acc_norm_stderr": 0.017192708674602288 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.031410821975962386, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.031410821975962386 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079023, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3247588424437299, + "acc_stderr": 0.026596782287697043, + "acc_norm": 0.3247588424437299, + "acc_norm_stderr": 0.026596782287697043 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.03318833286217282, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.03318833286217282 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.03921545312467122, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.03921545312467122 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.040233822736177455, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.040233822736177455 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.03175367846096626, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.03175367846096626 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3871794871794872, + "acc_stderr": 0.02469721693087895, + "acc_norm": 0.3871794871794872, + "acc_norm_stderr": 0.02469721693087895 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.031785297106427496, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.031785297106427496 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.33225806451612905, + "acc_stderr": 0.026795560848122797, + "acc_norm": 0.33225806451612905, + "acc_norm_stderr": 0.026795560848122797 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5598290598290598, + "acc_stderr": 0.032520741720630506, + "acc_norm": 0.5598290598290598, + "acc_norm_stderr": 0.032520741720630506 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3433962264150943, + "acc_stderr": 0.029224526469124792, + "acc_norm": 0.3433962264150943, + "acc_norm_stderr": 0.029224526469124792 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766118, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766118 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.03496101481191181, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.03496101481191181 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21693121693121692, + "acc_stderr": 0.021227082449445035, + "acc_norm": 0.21693121693121692, + "acc_norm_stderr": 0.021227082449445035 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.025305258131879727, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.025305258131879727 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.345679012345679, + "acc_stderr": 0.026462487777001872, + "acc_norm": 0.345679012345679, + "acc_norm_stderr": 0.026462487777001872 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41284403669724773, + "acc_stderr": 0.021109128133413913, + "acc_norm": 0.41284403669724773, + "acc_norm_stderr": 0.021109128133413913 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3140495867768595, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.3140495867768595, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640766, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640766 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.018550634502952964, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.018550634502952964 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.026244920349843003, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.026244920349843003 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.046355501356099754, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.046355501356099754 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30614525139664805, + "acc_stderr": 0.015414494487903217, + "acc_norm": 0.30614525139664805, + "acc_norm_stderr": 0.015414494487903217 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.02826388994378462, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.02826388994378462 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3881856540084388, + "acc_stderr": 0.031722950043323296, + "acc_norm": 0.3881856540084388, + "acc_norm_stderr": 0.031722950043323296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26988265971316816, + "acc_stderr": 0.011337381084250416, + "acc_norm": 0.26988265971316816, + "acc_norm_stderr": 0.011337381084250416 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.033540924375915195, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.033540924375915195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.037694303145125674, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.037694303145125674 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557952, + "mc2": 0.41635624749599726, + "mc2_stderr": 0.015333440438326292 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3116883116883117, + "acc_stderr": 0.015924567607358338, + "acc_norm": 0.3659976387249115, + "acc_norm_stderr": 0.01656148966489569 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PerRing/Yi-Ko-6x2B-v0.1", + "model_sha": "805684e66ede4552d5f49e1d3b1841764df7176e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PerRing/Yi-Ko-6x2B-v0.2/result_2024-01-01 02:29:13.json b/PerRing/Yi-Ko-6x2B-v0.2/result_2024-01-01 02:29:13.json new file mode 100644 index 0000000000000000000000000000000000000000..5340adee5c5913f31466411826cd10d430323006 --- /dev/null +++ b/PerRing/Yi-Ko-6x2B-v0.2/result_2024-01-01 02:29:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.01389693846114567, + "acc_norm": 0.3967576791808874, + "acc_norm_stderr": 0.01429651302018065 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3769169488149771, + "acc_stderr": 0.004836234143655411, + "acc_norm": 0.4865564628560048, + "acc_norm_stderr": 0.004987977492042156 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4278416347381865, + "acc_stderr": 0.01769278792780373, + "acc_norm": 0.4278416347381865, + "acc_norm_stderr": 0.01769278792780373 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357783, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357783 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42443729903536975, + "acc_stderr": 0.028071928247946208, + "acc_norm": 0.42443729903536975, + "acc_norm_stderr": 0.028071928247946208 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149354, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149354 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.031918633744784645, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.031918633744784645 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33004926108374383, + "acc_stderr": 0.033085304262282574, + "acc_norm": 0.33004926108374383, + "acc_norm_stderr": 0.033085304262282574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3709677419354839, + "acc_stderr": 0.02748054188795359, + "acc_norm": 0.3709677419354839, + "acc_norm_stderr": 0.02748054188795359 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5769230769230769, + "acc_stderr": 0.032366121762202014, + "acc_norm": 0.5769230769230769, + "acc_norm_stderr": 0.032366121762202014 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.36981132075471695, + "acc_stderr": 0.029711421880107926, + "acc_norm": 0.36981132075471695, + "acc_norm_stderr": 0.029711421880107926 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.0472457740573157, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.0472457740573157 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658752, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658752 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48258706467661694, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.48258706467661694, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02256989707491841, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02256989707491841 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.025816756791584204, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.025816756791584204 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.026406145973625672, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.026406145973625672 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40414507772020725, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.40414507772020725, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46238532110091746, + "acc_stderr": 0.021376575274397572, + "acc_norm": 0.46238532110091746, + "acc_norm_stderr": 0.021376575274397572 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.0380952380952381, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.0380952380952381 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809068, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809068 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32189542483660133, + "acc_stderr": 0.018901015322093092, + "acc_norm": 0.32189542483660133, + "acc_norm_stderr": 0.018901015322093092 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.0271871270115038, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.0271871270115038 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.03385177976044811, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.03385177976044811 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2905027932960894, + "acc_stderr": 0.015183844307206153, + "acc_norm": 0.2905027932960894, + "acc_norm_stderr": 0.015183844307206153 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22448979591836735, + "acc_stderr": 0.026711430555538415, + "acc_norm": 0.22448979591836735, + "acc_norm_stderr": 0.026711430555538415 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45147679324894513, + "acc_stderr": 0.0323936001739747, + "acc_norm": 0.45147679324894513, + "acc_norm_stderr": 0.0323936001739747 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2770534550195567, + "acc_stderr": 0.01143046244371968, + "acc_norm": 0.2770534550195567, + "acc_norm_stderr": 0.01143046244371968 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.03713158067481913, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.03713158067481913 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627877, + "mc2": 0.44048085202099274, + "mc2_stderr": 0.015381401989056739 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3187721369539551, + "acc_stderr": 0.016021427055309588, + "acc_norm": 0.43919716646989376, + "acc_norm_stderr": 0.0170627757447807 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PerRing/Yi-Ko-6x2B-v0.2", + "model_sha": "1847566fd5b2bcd7a320e7af58b6bd8d97f905c5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PerRing/Yi-Ko-6x2B-v0.3/result_2024-01-02 01:07:22.json b/PerRing/Yi-Ko-6x2B-v0.3/result_2024-01-02 01:07:22.json new file mode 100644 index 0000000000000000000000000000000000000000..f5a22cdc242f84075870f1e820d9fb35e5520010 --- /dev/null +++ b/PerRing/Yi-Ko-6x2B-v0.3/result_2024-01-02 01:07:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3302047781569966, + "acc_stderr": 0.013743085603760434, + "acc_norm": 0.3839590443686007, + "acc_norm_stderr": 0.014212444980651892 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3813981278629755, + "acc_stderr": 0.004847372670134639, + "acc_norm": 0.49063931487751444, + "acc_norm_stderr": 0.0049889069013077355 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.42656449553001274, + "acc_stderr": 0.01768606697567564, + "acc_norm": 0.42656449553001274, + "acc_norm_stderr": 0.01768606697567564 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.41479099678456594, + "acc_stderr": 0.027982680459759553, + "acc_norm": 0.41479099678456594, + "acc_norm_stderr": 0.027982680459759553 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4595959595959596, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.4595959595959596, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.0394170763206489, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.0394170763206489 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.031631458075523804, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.031631458075523804 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3230769230769231, + "acc_stderr": 0.023710888501970565, + "acc_norm": 0.3230769230769231, + "acc_norm_stderr": 0.023710888501970565 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33004926108374383, + "acc_stderr": 0.033085304262282574, + "acc_norm": 0.33004926108374383, + "acc_norm_stderr": 0.033085304262282574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3419354838709677, + "acc_stderr": 0.026985289576552756, + "acc_norm": 0.3419354838709677, + "acc_norm_stderr": 0.026985289576552756 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3471698113207547, + "acc_stderr": 0.029300101705549655, + "acc_norm": 0.3471698113207547, + "acc_norm_stderr": 0.029300101705549655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823017, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823017 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137288, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137288 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113935, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113935 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.025906632631016117, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.025906632631016117 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3374233128834356, + "acc_stderr": 0.037149084099355745, + "acc_norm": 0.3374233128834356, + "acc_norm_stderr": 0.037149084099355745 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.027163686038271236, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.027163686038271236 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38860103626943004, + "acc_stderr": 0.035177397963731316, + "acc_norm": 0.38860103626943004, + "acc_norm_stderr": 0.035177397963731316 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4, + "acc_stderr": 0.021004201260420075, + "acc_norm": 0.4, + "acc_norm_stderr": 0.021004201260420075 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604673, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604673 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.02803609227389177, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.02803609227389177 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.47107438016528924, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.019139943748487043, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.019139943748487043 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.027374128882631157, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.027374128882631157 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764377, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764377 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02988691054762696, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02988691054762696 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29832402234636873, + "acc_stderr": 0.01530184004512927, + "acc_norm": 0.29832402234636873, + "acc_norm_stderr": 0.01530184004512927 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.02736586113151381, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.02736586113151381 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2816326530612245, + "acc_stderr": 0.02879518557429129, + "acc_norm": 0.2816326530612245, + "acc_norm_stderr": 0.02879518557429129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4219409282700422, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.4219409282700422, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2835723598435463, + "acc_stderr": 0.011511900775968325, + "acc_norm": 0.2835723598435463, + "acc_norm_stderr": 0.011511900775968325 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.032133257173736156, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.032133257173736156 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.03804913653971011, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.03804913653971011 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522517, + "mc2": 0.41582498658203726, + "mc2_stderr": 0.01527388876150569 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3659976387249115, + "acc_stderr": 0.016561489664895703, + "acc_norm": 0.46162927981109797, + "acc_norm_stderr": 0.017139660221845557 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PerRing/Yi-Ko-6x2B-v0.3", + "model_sha": "683f2c0bac0a90a7c5f0946f197122c1741d9126", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PerRing/Yi-Ko-6x2B-v1/result_2024-01-06 07:20:47.json b/PerRing/Yi-Ko-6x2B-v1/result_2024-01-06 07:20:47.json new file mode 100644 index 0000000000000000000000000000000000000000..259af497703ed0b307878c8b40bf3606cb0cef98 --- /dev/null +++ b/PerRing/Yi-Ko-6x2B-v1/result_2024-01-06 07:20:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32849829351535836, + "acc_stderr": 0.013724978465537377, + "acc_norm": 0.39590443686006827, + "acc_norm_stderr": 0.014291228393536585 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38408683529177456, + "acc_stderr": 0.004853845750392152, + "acc_norm": 0.5100577574188409, + "acc_norm_stderr": 0.004988771791854518 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4904214559386973, + "acc_stderr": 0.017876682275340852, + "acc_norm": 0.4904214559386973, + "acc_norm_stderr": 0.017876682275340852 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.02832032583010591, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.02832032583010591 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40336134453781514, + "acc_stderr": 0.03186608121408831, + "acc_norm": 0.40336134453781514, + "acc_norm_stderr": 0.03186608121408831 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37948717948717947, + "acc_stderr": 0.024603626924097417, + "acc_norm": 0.37948717948717947, + "acc_norm_stderr": 0.024603626924097417 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.033442837442804574, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.033442837442804574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.02790615082604114, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.02790615082604114 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.0302363899421731, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.0302363899421731 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.02646611753895992, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.02646611753895992 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473836, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473836 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.037038511930995215, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.037038511930995215 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.0230681888482611, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.0230681888482611 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.407514450867052, + "acc_stderr": 0.0264545781469315, + "acc_norm": 0.407514450867052, + "acc_norm_stderr": 0.0264545781469315 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831027, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831027 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569653, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569653 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42752293577981654, + "acc_stderr": 0.02121091020430044, + "acc_norm": 0.42752293577981654, + "acc_norm_stderr": 0.02121091020430044 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.042639068927951315, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.042639068927951315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.028074158947600656, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.028074158947600656 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.01980828131744985, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.01980828131744985 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022142, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022142 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.0449394906861354, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.0449394906861354 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.029346665094372944, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.029346665094372944 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3014705882352941, + "acc_stderr": 0.027875982114273168, + "acc_norm": 0.3014705882352941, + "acc_norm_stderr": 0.027875982114273168 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.34285714285714286, + "acc_stderr": 0.03038726291954773, + "acc_norm": 0.34285714285714286, + "acc_norm_stderr": 0.03038726291954773 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4978902953586498, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.4978902953586498, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.288135593220339, + "acc_stderr": 0.011567140661324565, + "acc_norm": 0.288135593220339, + "acc_norm_stderr": 0.011567140661324565 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.03434131164719129, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.03434131164719129 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015008, + "mc2": 0.41246750618008304, + "mc2_stderr": 0.015072168715859086 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4203069657615112, + "acc_stderr": 0.016970598281177703, + "acc_norm": 0.5182998819362455, + "acc_norm_stderr": 0.017178836639177776 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PerRing/Yi-Ko-6x2B-v1", + "model_sha": "d439bfb4d0feb31671af14f2b4edebed17261a2a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Custom-KoLLM-13B-v1/result_2023-11-23 04:55:42.json b/PracticeLLM/Custom-KoLLM-13B-v1/result_2023-11-23 04:55:42.json new file mode 100644 index 0000000000000000000000000000000000000000..941e5461aef7888e8806bdd71b0ac36dd6976752 --- /dev/null +++ b/PracticeLLM/Custom-KoLLM-13B-v1/result_2023-11-23 04:55:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40187713310580203, + "acc_stderr": 0.014327268614578274, + "acc_norm": 0.4598976109215017, + "acc_norm_stderr": 0.014564318856924848 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4257120095598486, + "acc_stderr": 0.0049343998025949045, + "acc_norm": 0.569308902609042, + "acc_norm_stderr": 0.004941609820763589 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5312899106002554, + "acc_stderr": 0.01784491809046855, + "acc_norm": 0.5312899106002554, + "acc_norm_stderr": 0.01784491809046855 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.032363611119519416, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.032363611119519416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.024915243985987844, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.024915243985987844 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.0281291127091659, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.0281291127091659 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.02646611753895992, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.02646611753895992 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.02271746789770861, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.02271746789770861 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5467889908256881, + "acc_stderr": 0.021343255165546044, + "acc_norm": 0.5467889908256881, + "acc_norm_stderr": 0.021343255165546044 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529675, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529675 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340455, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03141554629402544, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03141554629402544 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714857, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714857 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.031680911612338825, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.031680911612338825 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32333767926988266, + "acc_stderr": 0.011946565758447197, + "acc_norm": 0.32333767926988266, + "acc_norm_stderr": 0.011946565758447197 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03484941514429231, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03484941514429231 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485083, + "mc2": 0.4166001481815424, + "mc2_stderr": 0.014823792417118063 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5619834710743802, + "acc_stderr": 0.01705775370216029, + "acc_norm": 0.6458087367178277, + "acc_norm_stderr": 0.01644317574921476 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Custom-KoLLM-13B-v1", + "model_sha": "f2647ab150f35533ea3197cf7a6a18d21422df44", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Custom-KoLLM-13B-v2/result_2023-11-25 07:15:53.json b/PracticeLLM/Custom-KoLLM-13B-v2/result_2023-11-25 07:15:53.json new file mode 100644 index 0000000000000000000000000000000000000000..5623fd28fb8d89dd7197385e61a2a7b2b1f2cbbe --- /dev/null +++ b/PracticeLLM/Custom-KoLLM-13B-v2/result_2023-11-25 07:15:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910471, + "acc_norm": 0.45733788395904434, + "acc_norm_stderr": 0.01455810654392406 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4255128460466043, + "acc_stderr": 0.004934100774481221, + "acc_norm": 0.5697072296355308, + "acc_norm_stderr": 0.004941051795214796 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.524904214559387, + "acc_stderr": 0.01785777070490103, + "acc_norm": 0.524904214559387, + "acc_norm_stderr": 0.01785777070490103 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480864, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480864 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.0282908690541976, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.0282908690541976 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.043171711948702535, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.043171711948702535 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378947, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378947 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03196876989195778, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03196876989195778 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38974358974358975, + "acc_stderr": 0.024726967886647074, + "acc_norm": 0.38974358974358975, + "acc_norm_stderr": 0.024726967886647074 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.0336612448905145, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.0336612448905145 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3903225806451613, + "acc_stderr": 0.027751256636969576, + "acc_norm": 0.3903225806451613, + "acc_norm_stderr": 0.027751256636969576 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.031804252043840985, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.031804252043840985 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712156, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712156 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.47761194029850745, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.47761194029850745, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.035995863012470784, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.035995863012470784 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.026483392042098174, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.026483392042098174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.027339546640662737, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.027339546640662737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4954128440366973, + "acc_stderr": 0.021436420955529414, + "acc_norm": 0.4954128440366973, + "acc_norm_stderr": 0.021436420955529414 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848879, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848879 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.027732834353363944, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.027732834353363944 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3545751633986928, + "acc_stderr": 0.01935336054755371, + "acc_norm": 0.3545751633986928, + "acc_norm_stderr": 0.01935336054755371 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.029346665094372937, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.029346665094372937 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.028888193103988644, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.028888193103988644 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29335071707953064, + "acc_stderr": 0.011628520449582075, + "acc_norm": 0.29335071707953064, + "acc_norm_stderr": 0.011628520449582075 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.034602283272391704, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.034602283272391704 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.039042723414318574, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.039042723414318574 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2460220318237454, + "mc1_stderr": 0.015077219200662564, + "mc2": 0.38751888371590926, + "mc2_stderr": 0.014562542001309185 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.017188904359077307, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.016756921571069422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Custom-KoLLM-13B-v2", + "model_sha": "c309f5b24994489e7d44ef766605e5559e730a22", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Custom-KoLLM-13B-v3/result_2023-11-26 07:38:53.json b/PracticeLLM/Custom-KoLLM-13B-v3/result_2023-11-26 07:38:53.json new file mode 100644 index 0000000000000000000000000000000000000000..61e89bc5920ccdd3c632624a8964102f626041e9 --- /dev/null +++ b/PracticeLLM/Custom-KoLLM-13B-v3/result_2023-11-26 07:38:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3848122866894198, + "acc_stderr": 0.014218371065251104, + "acc_norm": 0.447098976109215, + "acc_norm_stderr": 0.014529380160526842 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4239195379406493, + "acc_stderr": 0.004931679059919374, + "acc_norm": 0.5689105755825533, + "acc_norm_stderr": 0.004942164585991475 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5300127713920817, + "acc_stderr": 0.01784772308664908, + "acc_norm": 0.5300127713920817, + "acc_norm_stderr": 0.01784772308664908 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828063, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828063 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.03191863374478465, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.03191863374478465 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461224, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461224 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.02357760479165582, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.02357760479165582 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4421965317919075, + "acc_stderr": 0.026738603643807403, + "acc_norm": 0.4421965317919075, + "acc_norm_stderr": 0.026738603643807403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.027563010971606676, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.027563010971606676 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5302752293577981, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.5302752293577981, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848876, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848876 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.044492703500683836, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.044492703500683836 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.039105257528497236, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.039105257528497236 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355445, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355445 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.0278079901413202, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.0278079901413202 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.02792096314799366, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.02792096314799366 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411962, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411962 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.030932858792789848, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.030932858792789848 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29139504563233376, + "acc_stderr": 0.011605720214257617, + "acc_norm": 0.29139504563233376, + "acc_norm_stderr": 0.011605720214257617 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775527, + "mc2": 0.44221166184758365, + "mc2_stderr": 0.014852170962038991 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4025974025974026, + "acc_stderr": 0.01686102048640777, + "acc_norm": 0.45336481700118064, + "acc_norm_stderr": 0.01711541822522687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Custom-KoLLM-13B-v3", + "model_sha": "65f37e4173e111f31c7094387a9de5627f9d3536", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Custom-KoLLM-13B-v4/result_2023-11-27 15:16:50.json b/PracticeLLM/Custom-KoLLM-13B-v4/result_2023-11-27 15:16:50.json new file mode 100644 index 0000000000000000000000000000000000000000..f51874a4250f51350f5679c754e397fe7b4cd7ab --- /dev/null +++ b/PracticeLLM/Custom-KoLLM-13B-v4/result_2023-11-27 15:16:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3924914675767918, + "acc_stderr": 0.014269634635670722, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4253136825333599, + "acc_stderr": 0.004933800927560533, + "acc_norm": 0.5706034654451304, + "acc_norm_stderr": 0.004939784311448985 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.01779775149386563, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.01779775149386563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.035476014940069384, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.035476014940069384 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419034, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419034 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413926, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.033661244890514495, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.033661244890514495 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849738, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849738 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276612, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276612 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.03525675167467974, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.03525675167467974 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5761467889908257, + "acc_stderr": 0.02118726320908751, + "acc_norm": 0.5761467889908257, + "acc_norm_stderr": 0.02118726320908751 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387296, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387296 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355442, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289784, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289784 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483927, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483927 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.031557828165561644, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.031557828165561644 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.011855911587048228, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.011855911587048228 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.01565960575532691, + "mc2": 0.42927679239357447, + "mc2_stderr": 0.014965776733274934 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5324675324675324, + "acc_stderr": 0.01715407371668287, + "acc_norm": 0.6257378984651711, + "acc_norm_stderr": 0.016637917789798746 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Custom-KoLLM-13B-v4", + "model_sha": "64859181e99108e5033e34ea2a5162400bb1a803", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Custom-KoLLM-13B-v5/result_2023-11-28 18:53:32.json b/PracticeLLM/Custom-KoLLM-13B-v5/result_2023-11-28 18:53:32.json new file mode 100644 index 0000000000000000000000000000000000000000..f554eb385be264cb3e51f16f0c70dea28d631ad0 --- /dev/null +++ b/PracticeLLM/Custom-KoLLM-13B-v5/result_2023-11-28 18:53:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3779863481228669, + "acc_stderr": 0.014169664520303094, + "acc_norm": 0.44880546075085326, + "acc_norm_stderr": 0.014534599585097669 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4272057359091814, + "acc_stderr": 0.00493661642892264, + "acc_norm": 0.5674168492332204, + "acc_norm_stderr": 0.004944215937021391 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.541507024265645, + "acc_stderr": 0.017818248603465578, + "acc_norm": 0.541507024265645, + "acc_norm_stderr": 0.017818248603465578 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101737, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101737 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123005, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123005 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.040233822736177476, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.040233822736177476 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091265, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073824, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073824 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.02357760479165581, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.02357760479165581 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.02680372058320617, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.02680372058320617 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607718, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607718 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5302752293577981, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.5302752293577981, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.02835895631342355, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.02835895631342355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.019373332420724507, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.019373332420724507 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.031141447823536023, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.031141447823536023 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935893, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935893 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31290743155149936, + "acc_stderr": 0.011842529823063, + "acc_norm": 0.31290743155149936, + "acc_norm_stderr": 0.011842529823063 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03471157907953425, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03471157907953425 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03888176921674101, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03888176921674101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.01559475363200651, + "mc2": 0.42820899219390524, + "mc2_stderr": 0.014856416829616066 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.017188904359077307, + "acc_norm": 0.6080283353010626, + "acc_norm_stderr": 0.016784332119424088 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Custom-KoLLM-13B-v5", + "model_sha": "c1190fbd8699492d5e87775303df899f326f47da", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Custom-KoLLM-13B-v6/result_2023-11-29 06:51:23.json b/PracticeLLM/Custom-KoLLM-13B-v6/result_2023-11-29 06:51:23.json new file mode 100644 index 0000000000000000000000000000000000000000..dee84f024c92701e67e6064f518ef3a5e4d452c2 --- /dev/null +++ b/PracticeLLM/Custom-KoLLM-13B-v6/result_2023-11-29 06:51:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39334470989761094, + "acc_stderr": 0.014275101465693024, + "acc_norm": 0.4522184300341297, + "acc_norm_stderr": 0.014544519880633832 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4245170284803824, + "acc_stderr": 0.00493259334881362, + "acc_norm": 0.5660227046405099, + "acc_norm_stderr": 0.004946089230153022 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5376756066411239, + "acc_stderr": 0.017829131764287187, + "acc_norm": 0.5376756066411239, + "acc_norm_stderr": 0.017829131764287187 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956278, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956278 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.025174048384000777, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.025174048384000777 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230175, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230175 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5376146788990825, + "acc_stderr": 0.021376575274397576, + "acc_norm": 0.5376146788990825, + "acc_norm_stderr": 0.021376575274397576 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013317, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013317 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094607, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094607 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.0317229500433233, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.0317229500433233 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3122555410691004, + "acc_stderr": 0.011835798135683185, + "acc_norm": 0.3122555410691004, + "acc_norm_stderr": 0.011835798135683185 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.01565960575532691, + "mc2": 0.42310297040812733, + "mc2_stderr": 0.01480446318735434 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4498229043683589, + "acc_stderr": 0.01710357334382571, + "acc_norm": 0.5242030696576151, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Custom-KoLLM-13B-v6", + "model_sha": "4e3b471cca7e769c11dbb9f30ca3dd4256911d66", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Custom-KoLLM-13B-v7/result_2023-11-30 15:59:42.json b/PracticeLLM/Custom-KoLLM-13B-v7/result_2023-11-30 15:59:42.json new file mode 100644 index 0000000000000000000000000000000000000000..bcd3cb0193b4be77c7ef29b9dbbe9bbd73ca5b94 --- /dev/null +++ b/PracticeLLM/Custom-KoLLM-13B-v7/result_2023-11-30 15:59:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938162, + "acc_norm": 0.4590443686006826, + "acc_norm_stderr": 0.014562291073601229 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4246166102370046, + "acc_stderr": 0.004932745013072717, + "acc_norm": 0.5680143397729536, + "acc_norm_stderr": 0.004943400892881046 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5325670498084292, + "acc_stderr": 0.017841995750520867, + "acc_norm": 0.5325670498084292, + "acc_norm_stderr": 0.017841995750520867 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561063, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561063 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.02686462436675665, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.02686462436675665 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.03919415545048409, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.03919415545048409 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.027402042040269955, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.027402042040269955 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5504587155963303, + "acc_stderr": 0.021327881417823387, + "acc_norm": 0.5504587155963303, + "acc_norm_stderr": 0.021327881417823387 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.041349130183033156, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.041349130183033156 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.02843109544417664, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.02843109544417664 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35130718954248363, + "acc_stderr": 0.019312676065786575, + "acc_norm": 0.35130718954248363, + "acc_norm_stderr": 0.019312676065786575 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467763, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467763 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0321495214780275, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0321495214780275 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.0302114796091216, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.0302114796091216 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235926, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235926 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30964797913950454, + "acc_stderr": 0.011808598262503316, + "acc_norm": 0.30964797913950454, + "acc_norm_stderr": 0.011808598262503316 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630573, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630573 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522505, + "mc2": 0.4142296152328429, + "mc2_stderr": 0.014852594216061029 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5053128689492326, + "acc_stderr": 0.01718938362722971, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.016876941165045612 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Custom-KoLLM-13B-v7", + "model_sha": "1fa610cc17b0a5c51c0637c98b9ac671df98c27c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Custom-KoLLM-13B-v8/result_2023-12-02 18:41:57.json b/PracticeLLM/Custom-KoLLM-13B-v8/result_2023-12-02 18:41:57.json new file mode 100644 index 0000000000000000000000000000000000000000..64741ce022dcb174ec8956486f18bb551a6c6133 --- /dev/null +++ b/PracticeLLM/Custom-KoLLM-13B-v8/result_2023-12-02 18:41:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938167, + "acc_norm": 0.4564846416382253, + "acc_norm_stderr": 0.014555949760496439 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42362079267078273, + "acc_stderr": 0.004931219148182245, + "acc_norm": 0.569806811392153, + "acc_norm_stderr": 0.004940911779273374 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.04944901092973781, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.04944901092973781 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5376756066411239, + "acc_stderr": 0.017829131764287187, + "acc_norm": 0.5376756066411239, + "acc_norm_stderr": 0.017829131764287187 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840688, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840688 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009225, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009225 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.03536085947529481, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.03536085947529481 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.024939313906940774, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.024939313906940774 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091265, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.03070948699255655, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.03070948699255655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276611, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276611 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.023000086859068652, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.023000086859068652 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.026613350840261746, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.026613350840261746 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5302752293577981, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.5302752293577981, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387296, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387296 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35947712418300654, + "acc_stderr": 0.01941253924203216, + "acc_norm": 0.35947712418300654, + "acc_norm_stderr": 0.01941253924203216 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280048, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280048 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3122555410691004, + "acc_stderr": 0.011835798135683185, + "acc_norm": 0.3122555410691004, + "acc_norm_stderr": 0.011835798135683185 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.039036986477484416, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.039036986477484416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.01548369193923726, + "mc2": 0.42521422618331217, + "mc2_stderr": 0.014890426369457616 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5407319952774499, + "acc_stderr": 0.01713321827653767, + "acc_norm": 0.6269185360094451, + "acc_norm_stderr": 0.01662731827513747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Custom-KoLLM-13B-v8", + "model_sha": "a09ef58abf42fd2fcbf5149126bcd8d13838cc97", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/KoSOLAR-Platypus-10.7B/result_2024-01-30 02:11:50.json b/PracticeLLM/KoSOLAR-Platypus-10.7B/result_2024-01-30 02:11:50.json new file mode 100644 index 0000000000000000000000000000000000000000..902f76a85f7d5992e488d7932a64ea8f3a0f68a4 --- /dev/null +++ b/PracticeLLM/KoSOLAR-Platypus-10.7B/result_2024-01-30 02:11:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4667235494880546, + "acc_stderr": 0.01457899585960581, + "acc_norm": 0.5238907849829352, + "acc_norm_stderr": 0.01459470179807165 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45439155546703847, + "acc_stderr": 0.004968979259738327, + "acc_norm": 0.6176060545708026, + "acc_norm_stderr": 0.004849788423944377 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.04541609446503947, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.04541609446503947 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.665389527458493, + "acc_stderr": 0.01687346864159216, + "acc_norm": 0.665389527458493, + "acc_norm_stderr": 0.01687346864159216 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5063829787234042, + "acc_stderr": 0.03268335899936336, + "acc_norm": 0.5063829787234042, + "acc_norm_stderr": 0.03268335899936336 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.027466610213140112, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.027466610213140112 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932046, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932046 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.634453781512605, + "acc_stderr": 0.031282177063684614, + "acc_norm": 0.634453781512605, + "acc_norm_stderr": 0.031282177063684614 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.02527589207024065, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.02527589207024065 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6161290322580645, + "acc_stderr": 0.02766618207553965, + "acc_norm": 0.6161290322580645, + "acc_norm_stderr": 0.02766618207553965 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.811965811965812, + "acc_stderr": 0.02559819368665225, + "acc_norm": 0.811965811965812, + "acc_norm_stderr": 0.02559819368665225 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5547169811320755, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.5547169811320755, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.025305906241590636, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.025305906241590636 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.73, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6416184971098265, + "acc_stderr": 0.025816756791584187, + "acc_norm": 0.6416184971098265, + "acc_norm_stderr": 0.025816756791584187 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.588957055214724, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.588957055214724, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6080246913580247, + "acc_stderr": 0.027163686038271146, + "acc_norm": 0.6080246913580247, + "acc_norm_stderr": 0.027163686038271146 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7202072538860104, + "acc_stderr": 0.032396370467357036, + "acc_norm": 0.7202072538860104, + "acc_norm_stderr": 0.032396370467357036 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.689908256880734, + "acc_stderr": 0.019830849684439756, + "acc_norm": 0.689908256880734, + "acc_norm_stderr": 0.019830849684439756 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.565359477124183, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.565359477124183, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.040335656678483205, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.040335656678483205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.020226106567657807, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.020226106567657807 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4078014184397163, + "acc_stderr": 0.02931601177634356, + "acc_norm": 0.4078014184397163, + "acc_norm_stderr": 0.02931601177634356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.014487500852850423, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.014487500852850423 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4963235294117647, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.4963235294117647, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6571428571428571, + "acc_stderr": 0.030387262919547728, + "acc_norm": 0.6571428571428571, + "acc_norm_stderr": 0.030387262919547728 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149685, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.40221642764015647, + "acc_stderr": 0.012523646856180178, + "acc_norm": 0.40221642764015647, + "acc_norm_stderr": 0.012523646856180178 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.696078431372549, + "acc_stderr": 0.03228210387037892, + "acc_norm": 0.696078431372549, + "acc_norm_stderr": 0.03228210387037892 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3292533659730722, + "mc1_stderr": 0.016451264440068242, + "mc2": 0.4872200383531268, + "mc2_stderr": 0.015364387176828335 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5454545454545454, + "acc_stderr": 0.0171191722080615, + "acc_norm": 0.6080283353010626, + "acc_norm_stderr": 0.016784332119424077 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/KoSOLAR-Platypus-10.7B", + "model_sha": "81219f5c1666e19b7491306cd3fd282051603817", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/SOLAR-tail-10.7B-Merge-v1.0/result_2023-12-26 19:07:34.json b/PracticeLLM/SOLAR-tail-10.7B-Merge-v1.0/result_2023-12-26 19:07:34.json new file mode 100644 index 0000000000000000000000000000000000000000..191f403feb19f6a56caf135ae35391cc733a00fd --- /dev/null +++ b/PracticeLLM/SOLAR-tail-10.7B-Merge-v1.0/result_2023-12-26 19:07:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35409556313993173, + "acc_stderr": 0.013975454122756564, + "acc_norm": 0.4249146757679181, + "acc_norm_stderr": 0.014445698968520769 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38169687313284206, + "acc_stderr": 0.004848099661619702, + "acc_norm": 0.5087631945827524, + "acc_norm_stderr": 0.004989014986235632 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370608, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370608 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.017570705239256586, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.017570705239256586 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340354, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340354 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5498392282958199, + "acc_stderr": 0.028256660723360173, + "acc_norm": 0.5498392282958199, + "acc_norm_stderr": 0.028256660723360173 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03358618145732523, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03358618145732523 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.02535100632816969, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02535100632816969 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5419354838709678, + "acc_stderr": 0.028343787250540618, + "acc_norm": 0.5419354838709678, + "acc_norm_stderr": 0.028343787250540618 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.028120966503914387, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.028120966503914387 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524572, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524572 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7014925373134329, + "acc_stderr": 0.032357437893550445, + "acc_norm": 0.7014925373134329, + "acc_norm_stderr": 0.032357437893550445 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155247, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155247 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5809248554913294, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.5809248554913294, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.027586006221607708, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.027586006221607708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583703, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583703 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.02070745816435298, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.02070745816435298 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5359477124183006, + "acc_stderr": 0.028555827516528777, + "acc_norm": 0.5359477124183006, + "acc_norm_stderr": 0.028555827516528777 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249034, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249034 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46895424836601307, + "acc_stderr": 0.020188804456361887, + "acc_norm": 0.46895424836601307, + "acc_norm_stderr": 0.020188804456361887 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.02889395541211589, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.02889395541211589 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.03409386946992699, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.03409386946992699 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.014102223623152608, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.014102223623152608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.030254372573976694, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.030254372573976694 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6408163265306123, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.6408163265306123, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.679324894514768, + "acc_stderr": 0.030381931949990407, + "acc_norm": 0.679324894514768, + "acc_norm_stderr": 0.030381931949990407 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38852672750977835, + "acc_stderr": 0.012448817838292364, + "acc_norm": 0.38852672750977835, + "acc_norm_stderr": 0.012448817838292364 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.03465868196380762, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.03465868196380762 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380027, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380027 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.46871726153267024, + "mc2_stderr": 0.016338202358424335 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4982290436835891, + "acc_stderr": 0.01719024627623186, + "acc_norm": 0.5053128689492326, + "acc_norm_stderr": 0.01718938362722971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/SOLAR-tail-10.7B-Merge-v1.0", + "model_sha": "92349666d0209524a920adefafad53f82aecfee8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/SOLAR-tail-10.7B-instruct-v1.0/result_2023-12-28 15:26:44.json b/PracticeLLM/SOLAR-tail-10.7B-instruct-v1.0/result_2023-12-28 15:26:44.json new file mode 100644 index 0000000000000000000000000000000000000000..e35a90b6f3602d5d2d08243bfdc5f8dd1f7d5607 --- /dev/null +++ b/PracticeLLM/SOLAR-tail-10.7B-instruct-v1.0/result_2023-12-28 15:26:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40955631399317405, + "acc_stderr": 0.014370358632472428, + "acc_norm": 0.46928327645051193, + "acc_norm_stderr": 0.014583792546304037 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4307906791475802, + "acc_stderr": 0.0049417488176823, + "acc_norm": 0.5818562039434375, + "acc_norm_stderr": 0.004922459820434773 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.04721188506097172, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.04721188506097172 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6309067688378033, + "acc_stderr": 0.017256283109124634, + "acc_norm": 0.6309067688378033, + "acc_norm_stderr": 0.017256283109124634 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.502127659574468, + "acc_stderr": 0.03268572658667493, + "acc_norm": 0.502127659574468, + "acc_norm_stderr": 0.03268572658667493 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5755627009646302, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.5755627009646302, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7373737373737373, + "acc_stderr": 0.03135305009533086, + "acc_norm": 0.7373737373737373, + "acc_norm_stderr": 0.03135305009533086 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.02534267129380724, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.02534267129380724 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04712821257426769, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04712821257426769 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6193548387096774, + "acc_stderr": 0.02762171783290703, + "acc_norm": 0.6193548387096774, + "acc_norm_stderr": 0.02762171783290703 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.026246772946890488, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.026246772946890488 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075657, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075657 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.038118909889404126, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.038118909889404126 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.02530590624159064, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.02530590624159064 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.026261677607806642, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.026261677607806642 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5895061728395061, + "acc_stderr": 0.027371350925124764, + "acc_norm": 0.5895061728395061, + "acc_norm_stderr": 0.027371350925124764 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7512953367875648, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.7512953367875648, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4824561403508772, + "acc_stderr": 0.04700708033551038, + "acc_norm": 0.4824561403508772, + "acc_norm_stderr": 0.04700708033551038 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6788990825688074, + "acc_stderr": 0.020018149772733744, + "acc_norm": 0.6788990825688074, + "acc_norm_stderr": 0.020018149772733744 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5424836601307189, + "acc_stderr": 0.028526383452142635, + "acc_norm": 0.5424836601307189, + "acc_norm_stderr": 0.028526383452142635 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.039849796533028704, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.039849796533028704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874143, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874143 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4918300653594771, + "acc_stderr": 0.020225134343057265, + "acc_norm": 0.4918300653594771, + "acc_norm_stderr": 0.020225134343057265 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.02946218923337059, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.02946218923337059 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21787709497206703, + "acc_stderr": 0.013806211780732986, + "acc_norm": 0.21787709497206703, + "acc_norm_stderr": 0.013806211780732986 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4963235294117647, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.4963235294117647, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.02981802474975309, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.02981802474975309 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3970013037809648, + "acc_stderr": 0.012496346982909553, + "acc_norm": 0.3970013037809648, + "acc_norm_stderr": 0.012496346982909553 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6127450980392157, + "acc_stderr": 0.034189312338333444, + "acc_norm": 0.6127450980392157, + "acc_norm_stderr": 0.034189312338333444 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.036462049632538136, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.036462049632538136 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30966952264381886, + "mc1_stderr": 0.016185744355144912, + "mc2": 0.46519091505563187, + "mc2_stderr": 0.015560577374692961 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.017142736117643304 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/SOLAR-tail-10.7B-instruct-v1.0", + "model_sha": "89df820084202a2da014491ee6ebe5c9f8ff9004", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Twice-KoSOLAR-16.1B-instruct-test/result_2023-12-31 17:01:53.json b/PracticeLLM/Twice-KoSOLAR-16.1B-instruct-test/result_2023-12-31 17:01:53.json new file mode 100644 index 0000000000000000000000000000000000000000..273f9dfc8e598bf2f75b14a5d7be851a8803bc1b --- /dev/null +++ b/PracticeLLM/Twice-KoSOLAR-16.1B-instruct-test/result_2023-12-31 17:01:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44368600682593856, + "acc_stderr": 0.014518421825670452, + "acc_norm": 0.523037542662116, + "acc_norm_stderr": 0.014595873205358264 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44124676359290976, + "acc_stderr": 0.004955212787832377, + "acc_norm": 0.5997809201354312, + "acc_norm_stderr": 0.00488941312620877 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6081871345029239, + "acc_stderr": 0.03743979825926398, + "acc_norm": 0.6081871345029239, + "acc_norm_stderr": 0.03743979825926398 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6602809706257982, + "acc_stderr": 0.016936394114301635, + "acc_norm": 0.6602809706257982, + "acc_norm_stderr": 0.016936394114301635 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5191489361702127, + "acc_stderr": 0.032662042990646796, + "acc_norm": 0.5191489361702127, + "acc_norm_stderr": 0.032662042990646796 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6237942122186495, + "acc_stderr": 0.027513925683549427, + "acc_norm": 0.6237942122186495, + "acc_norm_stderr": 0.027513925683549427 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5605381165919282, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.5605381165919282, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6641221374045801, + "acc_stderr": 0.04142313771996663, + "acc_norm": 0.6641221374045801, + "acc_norm_stderr": 0.04142313771996663 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6050420168067226, + "acc_stderr": 0.031753678460966245, + "acc_norm": 0.6050420168067226, + "acc_norm_stderr": 0.031753678460966245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5538461538461539, + "acc_stderr": 0.025203571773028323, + "acc_norm": 0.5538461538461539, + "acc_norm_stderr": 0.025203571773028323 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.02786932057166463, + "acc_norm": 0.6, + "acc_norm_stderr": 0.02786932057166463 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.02645350805404035, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.02645350805404035 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054096, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054096 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.03807301726504511, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.03807301726504511 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.025305906241590632, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.025305906241590632 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.74, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.02712511551316686, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.02712511551316686 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.032210245080411516, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.032210245080411516 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.691743119266055, + "acc_stderr": 0.019798366698367254, + "acc_norm": 0.691743119266055, + "acc_norm_stderr": 0.019798366698367254 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.02782610930728369, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.02782610930728369 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.040260970832965634, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.040260970832965634 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49836601307189543, + "acc_stderr": 0.020227726838150117, + "acc_norm": 0.49836601307189543, + "acc_norm_stderr": 0.020227726838150117 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41134751773049644, + "acc_stderr": 0.02935491115994098, + "acc_norm": 0.41134751773049644, + "acc_norm_stderr": 0.02935491115994098 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.034086558679777494, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.034086558679777494 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22905027932960895, + "acc_stderr": 0.014054314935614555, + "acc_norm": 0.22905027932960895, + "acc_norm_stderr": 0.014054314935614555 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03035969707904611, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03035969707904611 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6408163265306123, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.6408163265306123, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598014, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598014 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39895697522816165, + "acc_stderr": 0.01250675765529368, + "acc_norm": 0.39895697522816165, + "acc_norm_stderr": 0.01250675765529368 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624335, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624335 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768545, + "mc2": 0.44070259401172257, + "mc2_stderr": 0.015527002175607771 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.525383707201889, + "acc_stderr": 0.01716818720142925, + "acc_norm": 0.5844155844155844, + "acc_norm_stderr": 0.01694358631307656 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Twice-KoSOLAR-16.1B-instruct-test", + "model_sha": "d0ebbc4b865e6ca25e7f4c7c812636cb6645f72e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/PracticeLLM/Twice-KoSOLAR-16.1B-test/result_2023-12-30 03:41:23.json b/PracticeLLM/Twice-KoSOLAR-16.1B-test/result_2023-12-30 03:41:23.json new file mode 100644 index 0000000000000000000000000000000000000000..1f42af4dca902beac4c12b392847cd9e662ca954 --- /dev/null +++ b/PracticeLLM/Twice-KoSOLAR-16.1B-test/result_2023-12-30 03:41:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3984641638225256, + "acc_stderr": 0.014306946052735563, + "acc_norm": 0.4564846416382253, + "acc_norm_stderr": 0.014555949760496437 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41734714200358497, + "acc_stderr": 0.0049211338649318885, + "acc_norm": 0.571400119498108, + "acc_norm_stderr": 0.004938643787869551 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6081871345029239, + "acc_stderr": 0.03743979825926399, + "acc_norm": 0.6081871345029239, + "acc_norm_stderr": 0.03743979825926399 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6360153256704981, + "acc_stderr": 0.017205684809032232, + "acc_norm": 0.6360153256704981, + "acc_norm_stderr": 0.017205684809032232 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.027604689028581993, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.027604689028581993 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6767676767676768, + "acc_stderr": 0.03332299921070645, + "acc_norm": 0.6767676767676768, + "acc_norm_stderr": 0.03332299921070645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929777, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.025275892070240655, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.025275892070240655 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5774193548387097, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.5774193548387097, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.027778835904935427, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.027778835904935427 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155257, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932262, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932262 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5578034682080925, + "acc_stderr": 0.026738603643807403, + "acc_norm": 0.5578034682080925, + "acc_norm_stderr": 0.026738603643807403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.027237415094592488, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.027237415094592488 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.03292296639155141, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.03292296639155141 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6238532110091743, + "acc_stderr": 0.02076923196820508, + "acc_norm": 0.6238532110091743, + "acc_norm_stderr": 0.02076923196820508 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147124, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147124 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.02843109544417664, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.02843109544417664 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249034, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249034 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46895424836601307, + "acc_stderr": 0.020188804456361887, + "acc_norm": 0.46895424836601307, + "acc_norm_stderr": 0.020188804456361887 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596154, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596154 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.033953227263757976, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.033953227263757976 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.014655780837497726, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.014655780837497726 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5110294117647058, + "acc_stderr": 0.030365446477275675, + "acc_norm": 0.5110294117647058, + "acc_norm_stderr": 0.030365446477275675 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6163265306122448, + "acc_stderr": 0.031130880396235946, + "acc_norm": 0.6163265306122448, + "acc_norm_stderr": 0.031130880396235946 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.029818024749753095, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.029818024749753095 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39504563233376794, + "acc_stderr": 0.012485727813251562, + "acc_norm": 0.39504563233376794, + "acc_norm_stderr": 0.012485727813251562 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.03434131164719129, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.03434131164719129 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165633, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165633 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219371, + "mc2": 0.4298875800555882, + "mc2_stderr": 0.015553207955178432 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46635182998819363, + "acc_stderr": 0.017151384117131872, + "acc_norm": 0.538370720188902, + "acc_norm_stderr": 0.017139660221845553 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "PracticeLLM/Twice-KoSOLAR-16.1B-test", + "model_sha": "bb2523b46fd43ed82852d295418c5243e3666d15", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.15/result_2023-12-04 03:30:04.json b/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.15/result_2023-12-04 03:30:04.json new file mode 100644 index 0000000000000000000000000000000000000000..b39b2e49527f6c0f6d763a976e3e3ac799b82a58 --- /dev/null +++ b/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.15/result_2023-12-04 03:30:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3984641638225256, + "acc_stderr": 0.014306946052735567, + "acc_norm": 0.4539249146757679, + "acc_norm_stderr": 0.014549221105171864 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4246166102370046, + "acc_stderr": 0.004932745013072715, + "acc_norm": 0.5688109938259311, + "acc_norm_stderr": 0.004942302768002102 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.017779225233394223, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.017779225233394223 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.03536085947529481, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.03536085947529481 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102308, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102308 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.0305032920133426, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.0305032920133426 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4653179190751445, + "acc_stderr": 0.026854257928258886, + "acc_norm": 0.4653179190751445, + "acc_norm_stderr": 0.026854257928258886 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5559633027522936, + "acc_stderr": 0.021302621211654518, + "acc_norm": 0.5559633027522936, + "acc_norm_stderr": 0.021302621211654518 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.027732834353363947, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.027732834353363947 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.019373332420724507, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.019373332420724507 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320196, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320196 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755805, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755805 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.032259413526312945, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.032259413526312945 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714847, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714847 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.333116036505867, + "acc_stderr": 0.01203793045151205, + "acc_norm": 0.333116036505867, + "acc_norm_stderr": 0.01203793045151205 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.0158663464013843, + "mc2": 0.4683667146519244, + "mc2_stderr": 0.014981879654024812 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.01717730199234255, + "acc_norm": 0.5749704840613932, + "acc_norm_stderr": 0.016996016308362887 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.15", + "model_sha": "db132e6067a6392c99b4bf6d7afda85c7d5f78a8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.18/result_2023-12-07 05:18:25.json b/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.18/result_2023-12-07 05:18:25.json new file mode 100644 index 0000000000000000000000000000000000000000..92d2de8c19ae452f31c69a7774271cfcda797f81 --- /dev/null +++ b/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.18/result_2023-12-07 05:18:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3967576791808874, + "acc_stderr": 0.014296513020180647, + "acc_norm": 0.4590443686006826, + "acc_norm_stderr": 0.014562291073601226 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4246166102370046, + "acc_stderr": 0.004932745013072719, + "acc_norm": 0.571400119498108, + "acc_norm_stderr": 0.004938643787869549 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.017797751493865636, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.017797751493865636 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4128205128205128, + "acc_stderr": 0.024962683564331827, + "acc_norm": 0.4128205128205128, + "acc_norm_stderr": 0.024962683564331827 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684973, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684973 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.0478200179138006, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.0478200179138006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712177, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712177 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5853211009174312, + "acc_stderr": 0.021122903208602585, + "acc_norm": 0.5853211009174312, + "acc_norm_stderr": 0.021122903208602585 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.01964380155792481, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.01964380155792481 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.040598672469526864, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.040598672469526864 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.0316746870682898, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.0316746870682898 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714857, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714857 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33376792698826596, + "acc_stderr": 0.012043812655846147, + "acc_norm": 0.33376792698826596, + "acc_norm_stderr": 0.012043812655846147 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.015744027248256055, + "mc2": 0.4544067221641174, + "mc2_stderr": 0.014927896908949237 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.01717730199234255, + "acc_norm": 0.5914994096812278, + "acc_norm_stderr": 0.016900062879427122 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.18", + "model_sha": "be58129e9338fbdc42bfc803860d4308f835cd6e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.2/result_2023-11-29 11:13:19.json b/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.2/result_2023-11-29 11:13:19.json new file mode 100644 index 0000000000000000000000000000000000000000..0b11b5f1ecf574344bbf93c4545394f7ead73402 --- /dev/null +++ b/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.2/result_2023-11-29 11:13:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.371160409556314, + "acc_stderr": 0.014117971901142818, + "acc_norm": 0.4496587030716723, + "acc_norm_stderr": 0.014537144444284738 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41884086835291773, + "acc_stderr": 0.004923609207861539, + "acc_norm": 0.5671181039633539, + "acc_norm_stderr": 0.004944620712318274 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5095785440613027, + "acc_stderr": 0.01787668227534085, + "acc_norm": 0.5095785440613027, + "acc_norm_stderr": 0.01787668227534085 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197604, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262971, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262971 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4129032258064516, + "acc_stderr": 0.02800913812540039, + "acc_norm": 0.4129032258064516, + "acc_norm_stderr": 0.02800913812540039 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.030365050829115205, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.030365050829115205 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.035281314729336065, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.035281314729336065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.0365634365335316, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.0365634365335316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960719, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960719 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.026803720583206174, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.026803720583206174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272438, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272438 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281337, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281337 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.021436998359765317, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.021436998359765317 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924314, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924314 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.028074158947600666, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.028074158947600666 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223974, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223974 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.03038805130167812, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.03038805130167812 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225417, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225417 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.032419206846933335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.032419206846933335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3070404172099087, + "acc_stderr": 0.01178095911451378, + "acc_norm": 0.3070404172099087, + "acc_norm_stderr": 0.01178095911451378 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.01522589934082683, + "mc2": 0.39177380761625485, + "mc2_stderr": 0.014625221380747738 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.017177301992342558, + "acc_norm": 0.5584415584415584, + "acc_norm_stderr": 0.017072525875563103 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.2", + "model_sha": "e61e6122ceca6995569c008901a3e1d4a0a58972", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.20/result_2023-12-07 11:05:08.json b/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.20/result_2023-12-07 11:05:08.json new file mode 100644 index 0000000000000000000000000000000000000000..8486f8909fd6dbd1ef1116fb09517249fe30bbef --- /dev/null +++ b/Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.20/result_2023-12-07 11:05:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40955631399317405, + "acc_stderr": 0.014370358632472427, + "acc_norm": 0.4667235494880546, + "acc_norm_stderr": 0.014578995859605814 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4279028082055367, + "acc_stderr": 0.004937635112830286, + "acc_norm": 0.5790679147580163, + "acc_norm_stderr": 0.004926996830194243 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.545338441890166, + "acc_stderr": 0.0178063045850526, + "acc_norm": 0.545338441890166, + "acc_norm_stderr": 0.0178063045850526 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710852, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710852 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849734, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.031342504862454025, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.031342504862454025 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5834862385321101, + "acc_stderr": 0.021136376504030874, + "acc_norm": 0.5834862385321101, + "acc_norm_stderr": 0.021136376504030874 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.028036092273891776, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.028036092273891776 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320186, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320186 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.0316746870682898, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.0316746870682898 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.029624663581159685, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.029624663581159685 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330371, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330371 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454892, + "mc2": 0.46011374521819187, + "mc2_stderr": 0.014998848301007965 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.01716818720142925, + "acc_norm": 0.5726092089728453, + "acc_norm_stderr": 0.017008129844823156 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Puluming/AISquare-Instruct-llama2-koen-13b-v0.9.20", + "model_sha": "ea626222a55229e517e4c9f75ba9bbd64cd892a5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/QuantumIntelligence/QI-mistral-7B-slerp/result_2024-04-11 04:45:52.json b/QuantumIntelligence/QI-mistral-7B-slerp/result_2024-04-11 04:45:52.json new file mode 100644 index 0000000000000000000000000000000000000000..152332276f11231f9bf93d847e3932e4ceded99e --- /dev/null +++ b/QuantumIntelligence/QI-mistral-7B-slerp/result_2024-04-11 04:45:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.363481228668942, + "acc_stderr": 0.014056207319068285, + "acc_norm": 0.41552901023890787, + "acc_norm_stderr": 0.014401366641216391 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39145588528181635, + "acc_stderr": 0.004870785036708288, + "acc_norm": 0.5085640310695081, + "acc_norm_stderr": 0.004989049430391295 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49169859514687103, + "acc_stderr": 0.017877498991072008, + "acc_norm": 0.49169859514687103, + "acc_norm_stderr": 0.017877498991072008 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956279, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956279 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5076923076923077, + "acc_stderr": 0.025348006031534743, + "acc_norm": 0.5076923076923077, + "acc_norm_stderr": 0.025348006031534743 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347354, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347354 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.02519710107424649, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.02519710107424649 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03981240543717861, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03981240543717861 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353928, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353928 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5321100917431193, + "acc_stderr": 0.021393071222680804, + "acc_norm": 0.5321100917431193, + "acc_norm_stderr": 0.021393071222680804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556054, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556054 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.01982184368827176, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.01982184368827176 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.028999080904806178, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.028999080904806178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468641, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468641 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.028245687391462913, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.028245687391462913 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.03171752824062665, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.03171752824062665 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.03190080389473236, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.03190080389473236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3324641460234681, + "acc_stderr": 0.012032022332260521, + "acc_norm": 0.3324641460234681, + "acc_norm_stderr": 0.012032022332260521 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35128518971848227, + "mc1_stderr": 0.0167113581635444, + "mc2": 0.532589840184244, + "mc2_stderr": 0.016038282113032338 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4380165289256198, + "acc_stderr": 0.01705775370216029, + "acc_norm": 0.45336481700118064, + "acc_norm_stderr": 0.01711541822522687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "QuantumIntelligence/QI-mistral-7B-slerp", + "model_sha": "1a9a8379a7651644dbd6c9ae99dfba6ae8aeb4e0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/QuantumIntelligence/QI-neural-chat-7B-ko-DPO/result_2024-04-11 23:50:16.json b/QuantumIntelligence/QI-neural-chat-7B-ko-DPO/result_2024-04-11 23:50:16.json new file mode 100644 index 0000000000000000000000000000000000000000..df4d3cf065bffa9d6f1d1ae40811c9ffdf796a91 --- /dev/null +++ b/QuantumIntelligence/QI-neural-chat-7B-ko-DPO/result_2024-04-11 23:50:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3430034129692833, + "acc_stderr": 0.01387242322371817, + "acc_norm": 0.3822525597269625, + "acc_norm_stderr": 0.014200454049979293 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36227843059151565, + "acc_stderr": 0.004796763521045227, + "acc_norm": 0.4691296554471221, + "acc_norm_stderr": 0.004980262025472491 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4508301404853129, + "acc_stderr": 0.017793297572699054, + "acc_norm": 0.4508301404853129, + "acc_norm_stderr": 0.017793297572699054 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357787, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357787 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029319, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029319 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42765273311897106, + "acc_stderr": 0.02809924077580956, + "acc_norm": 0.42765273311897106, + "acc_norm_stderr": 0.02809924077580956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928276, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.028040981380761543, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.028040981380761543 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.02860595370200424, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.02860595370200424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719197, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719197 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.0350349092367328, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.0350349092367328 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.037657466938651483, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.037657466938651483 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332783, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.02143642095552942, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.02143642095552942 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.040260970832965565, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.040260970832965565 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.0193733324207245, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.0193733324207245 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101376, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101376 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3340782122905028, + "acc_stderr": 0.015774911422381632, + "acc_norm": 0.3340782122905028, + "acc_norm_stderr": 0.015774911422381632 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.02928941340940319, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.02928941340940319 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687578, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4978902953586498, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.4978902953586498, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3161668839634941, + "acc_stderr": 0.01187578089438658, + "acc_norm": 0.3161668839634941, + "acc_norm_stderr": 0.01187578089438658 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03815494308688929, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03815494308688929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3623011015911873, + "mc1_stderr": 0.01682664689726226, + "mc2": 0.5291842386279259, + "mc2_stderr": 0.01594543639067517 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3400236127508855, + "acc_stderr": 0.01628671722073768, + "acc_norm": 0.3565525383707202, + "acc_norm_stderr": 0.016467706981527448 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "QuantumIntelligence/QI-neural-chat-7B-ko-DPO", + "model_sha": "f3fb87056e1937507f8c343da4ace31a1fbaaf68", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Qwen/Qwen1.5-14B-Chat/result_2024-05-13 18:15:27.json b/Qwen/Qwen1.5-14B-Chat/result_2024-05-13 18:15:27.json new file mode 100644 index 0000000000000000000000000000000000000000..dfc84ca4a1fc3bdcec9f78468e874424c7c89bdb --- /dev/null +++ b/Qwen/Qwen1.5-14B-Chat/result_2024-05-13 18:15:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37457337883959047, + "acc_stderr": 0.014144193471893452, + "acc_norm": 0.41638225255972694, + "acc_norm_stderr": 0.014405618279436172 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38707428799044014, + "acc_stderr": 0.004860854240821965, + "acc_norm": 0.5027882891854212, + "acc_norm_stderr": 0.004989703824167097 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6257309941520468, + "acc_stderr": 0.03711601185389481, + "acc_norm": 0.6257309941520468, + "acc_norm_stderr": 0.03711601185389481 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280041, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280041 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6130268199233716, + "acc_stderr": 0.017417138059440125, + "acc_norm": 0.6130268199233716, + "acc_norm_stderr": 0.017417138059440125 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5404255319148936, + "acc_stderr": 0.032579014820998335, + "acc_norm": 0.5404255319148936, + "acc_norm_stderr": 0.032579014820998335 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758396, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758396 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5594855305466238, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.5594855305466238, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262971, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262971 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.702020202020202, + "acc_stderr": 0.03258630383836556, + "acc_norm": 0.702020202020202, + "acc_norm_stderr": 0.03258630383836556 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.04810840148082636, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.04810840148082636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5798319327731093, + "acc_stderr": 0.032061837832361516, + "acc_norm": 0.5798319327731093, + "acc_norm_stderr": 0.032061837832361516 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5717948717948718, + "acc_stderr": 0.025088301454694824, + "acc_norm": 0.5717948717948718, + "acc_norm_stderr": 0.025088301454694824 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406795, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406795 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.567741935483871, + "acc_stderr": 0.02818173972001941, + "acc_norm": 0.567741935483871, + "acc_norm_stderr": 0.02818173972001941 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.026453508054040332, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.026453508054040332 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5886792452830188, + "acc_stderr": 0.030285009259009787, + "acc_norm": 0.5886792452830188, + "acc_norm_stderr": 0.030285009259009787 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37407407407407406, + "acc_stderr": 0.029502861128955293, + "acc_norm": 0.37407407407407406, + "acc_norm_stderr": 0.029502861128955293 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.039439666991836285, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.039439666991836285 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4973544973544973, + "acc_stderr": 0.02575094967813038, + "acc_norm": 0.4973544973544973, + "acc_norm_stderr": 0.02575094967813038 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723368, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723368 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6011560693641619, + "acc_stderr": 0.026362437574546545, + "acc_norm": 0.6011560693641619, + "acc_norm_stderr": 0.026362437574546545 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5524691358024691, + "acc_stderr": 0.027667138569422708, + "acc_norm": 0.5524691358024691, + "acc_norm_stderr": 0.027667138569422708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6321243523316062, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.6321243523316062, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.046774730044912, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.046774730044912 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6440366972477064, + "acc_stderr": 0.020528559278244214, + "acc_norm": 0.6440366972477064, + "acc_norm_stderr": 0.020528559278244214 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.47619047619047616, + "acc_stderr": 0.04467062628403273, + "acc_norm": 0.47619047619047616, + "acc_norm_stderr": 0.04467062628403273 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.02830457667314111, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.02830457667314111 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6447368421052632, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.6447368421052632, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.0202239460050743, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.0202239460050743 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.028893955412115882, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.028893955412115882 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.04738975119274153, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.04738975119274153 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3418994413407821, + "acc_stderr": 0.015864506461604654, + "acc_norm": 0.3418994413407821, + "acc_norm_stderr": 0.015864506461604654 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121596, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.03093285879278985, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.03093285879278985 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7172995780590717, + "acc_stderr": 0.029312814153955934, + "acc_norm": 0.7172995780590717, + "acc_norm_stderr": 0.029312814153955934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36962190352020863, + "acc_stderr": 0.01232844577857526, + "acc_norm": 0.36962190352020863, + "acc_norm_stderr": 0.01232844577857526 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.03354092437591519, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.03354092437591519 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3843329253365973, + "mc1_stderr": 0.0170287073012452, + "mc2": 0.5573073600753922, + "mc2_stderr": 0.016173107129410107 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5360094451003542, + "acc_stderr": 0.017145715365486657, + "acc_norm": 0.5749704840613932, + "acc_norm_stderr": 0.016996016308362887 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Qwen/Qwen1.5-14B-Chat", + "model_sha": "9492b22871f43e975435455f5c616c77fe7a50ec", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Qwen/Qwen1.5-14B/result_2024-05-13 18:15:41.json b/Qwen/Qwen1.5-14B/result_2024-05-13 18:15:41.json new file mode 100644 index 0000000000000000000000000000000000000000..d411b41fbaae287ca67bf18106194eb027ac05f7 --- /dev/null +++ b/Qwen/Qwen1.5-14B/result_2024-05-13 18:15:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34726962457337884, + "acc_stderr": 0.013913034529620437, + "acc_norm": 0.3890784982935154, + "acc_norm_stderr": 0.014247309976045605 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37343158733320053, + "acc_stderr": 0.004827266662144028, + "acc_norm": 0.49203345947022503, + "acc_norm_stderr": 0.0049891480106251185 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280042, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280042 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6040868454661558, + "acc_stderr": 0.017488247006979266, + "acc_norm": 0.6040868454661558, + "acc_norm_stderr": 0.017488247006979266 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.6, + "acc_stderr": 0.032025630761017373, + "acc_norm": 0.6, + "acc_norm_stderr": 0.032025630761017373 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.031911782267135445, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.031911782267135445 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.03169380235712997, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.03169380235712997 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5717948717948718, + "acc_stderr": 0.025088301454694824, + "acc_norm": 0.5717948717948718, + "acc_norm_stderr": 0.025088301454694824 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4729064039408867, + "acc_stderr": 0.03512819077876106, + "acc_norm": 0.4729064039408867, + "acc_norm_stderr": 0.03512819077876106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6258064516129033, + "acc_stderr": 0.027528904299845697, + "acc_norm": 0.6258064516129033, + "acc_norm_stderr": 0.027528904299845697 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922737, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922737 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5471698113207547, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.5471698113207547, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.02967090612463088, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.02967090612463088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6865671641791045, + "acc_stderr": 0.03280188205348642, + "acc_norm": 0.6865671641791045, + "acc_norm_stderr": 0.03280188205348642 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.49206349206349204, + "acc_stderr": 0.025748065871673297, + "acc_norm": 0.49206349206349204, + "acc_norm_stderr": 0.025748065871673297 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6127167630057804, + "acc_stderr": 0.02622615860512465, + "acc_norm": 0.6127167630057804, + "acc_norm_stderr": 0.02622615860512465 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.558282208588957, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.558282208588957, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5216049382716049, + "acc_stderr": 0.02779476010500874, + "acc_norm": 0.5216049382716049, + "acc_norm_stderr": 0.02779476010500874 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6321243523316062, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.6321243523316062, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.04598188057816542, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.04598188057816542 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.673394495412844, + "acc_stderr": 0.020106990889937303, + "acc_norm": 0.673394495412844, + "acc_norm_stderr": 0.020106990889937303 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6013071895424836, + "acc_stderr": 0.028036092273891776, + "acc_norm": 0.6013071895424836, + "acc_norm_stderr": 0.028036092273891776 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.768595041322314, + "acc_stderr": 0.03849856098794089, + "acc_norm": 0.768595041322314, + "acc_norm_stderr": 0.03849856098794089 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.04008973785779205, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.04008973785779205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.020220920829626916, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.020220920829626916 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.02899908090480618, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.02899908090480618 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053757, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053757 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475363, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475363 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.030320243265004137, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.030320243265004137 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6693877551020408, + "acc_stderr": 0.03011642629654061, + "acc_norm": 0.6693877551020408, + "acc_norm_stderr": 0.03011642629654061 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7215189873417721, + "acc_stderr": 0.02917868230484252, + "acc_norm": 0.7215189873417721, + "acc_norm_stderr": 0.02917868230484252 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3767926988265971, + "acc_stderr": 0.012376459593894402, + "acc_norm": 0.3767926988265971, + "acc_norm_stderr": 0.012376459593894402 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6225490196078431, + "acc_stderr": 0.03402272044340703, + "acc_norm": 0.6225490196078431, + "acc_norm_stderr": 0.03402272044340703 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3292533659730722, + "mc1_stderr": 0.016451264440068246, + "mc2": 0.48890432930564176, + "mc2_stderr": 0.01607466925683564 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5088547815820543, + "acc_stderr": 0.017187658199336736, + "acc_norm": 0.6092089728453365, + "acc_norm_stderr": 0.016775298465108255 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Qwen/Qwen1.5-14B", + "model_sha": "dce4b190d34470818e5bec2a92cb8233aaa02ca2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Qwen/Qwen2-7B-Instruct/result_2024-06-06 17:19:27.json b/Qwen/Qwen2-7B-Instruct/result_2024-06-06 17:19:27.json new file mode 100644 index 0000000000000000000000000000000000000000..f53f3de90036a735b7c85a2fb96970b55dc70f59 --- /dev/null +++ b/Qwen/Qwen2-7B-Instruct/result_2024-06-06 17:19:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3924914675767918, + "acc_stderr": 0.014269634635670712, + "acc_norm": 0.4522184300341297, + "acc_norm_stderr": 0.014544519880633839 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2504481179047998, + "acc_stderr": 0.004323856300539177, + "acc_norm": 0.2504481179047998, + "acc_norm_stderr": 0.004323856300539177 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.8058252427184466, + "acc_stderr": 0.03916667762822585, + "acc_norm": 0.8058252427184466, + "acc_norm_stderr": 0.03916667762822585 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6590038314176245, + "acc_stderr": 0.01695178138322332, + "acc_norm": 0.6590038314176245, + "acc_norm_stderr": 0.01695178138322332 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.6127659574468085, + "acc_stderr": 0.03184389265339525, + "acc_norm": 0.6127659574468085, + "acc_norm_stderr": 0.03184389265339525 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6366559485530546, + "acc_stderr": 0.027316847674192714, + "acc_norm": 0.6366559485530546, + "acc_norm_stderr": 0.027316847674192714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6053811659192825, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.6053811659192825, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.03191178226713547, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.03191178226713547 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6275862068965518, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.6275862068965518, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929778, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6638655462184874, + "acc_stderr": 0.030684737115135353, + "acc_norm": 0.6638655462184874, + "acc_norm_stderr": 0.030684737115135353 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6641025641025641, + "acc_stderr": 0.023946724741563986, + "acc_norm": 0.6641025641025641, + "acc_norm_stderr": 0.023946724741563986 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.7129629629629629, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.7129629629629629, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.5221674876847291, + "acc_stderr": 0.035145285621750066, + "acc_norm": 0.5221674876847291, + "acc_norm_stderr": 0.035145285621750066 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.635483870967742, + "acc_stderr": 0.027379871229943252, + "acc_norm": 0.635483870967742, + "acc_norm_stderr": 0.027379871229943252 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.02537213967172293, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.02537213967172293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6150943396226415, + "acc_stderr": 0.029946498567699948, + "acc_norm": 0.6150943396226415, + "acc_norm_stderr": 0.029946498567699948 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.030484701665084362, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.030484701665084362 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7711442786069652, + "acc_stderr": 0.029705284056772443, + "acc_norm": 0.7711442786069652, + "acc_norm_stderr": 0.029705284056772443 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.03794012674697029, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.03794012674697029 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.582010582010582, + "acc_stderr": 0.02540255550326091, + "acc_norm": 0.582010582010582, + "acc_norm_stderr": 0.02540255550326091 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.041666666666666664, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.041666666666666664 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6329479768786127, + "acc_stderr": 0.025950054337654085, + "acc_norm": 0.6329479768786127, + "acc_norm_stderr": 0.025950054337654085 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6073619631901841, + "acc_stderr": 0.0383674090783103, + "acc_norm": 0.6073619631901841, + "acc_norm_stderr": 0.0383674090783103 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.027002521034516478, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.027002521034516478 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6580310880829016, + "acc_stderr": 0.03423465100104284, + "acc_norm": 0.6580310880829016, + "acc_norm_stderr": 0.03423465100104284 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.047028804320496165, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.047028804320496165 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6880733944954128, + "acc_stderr": 0.019862967976707245, + "acc_norm": 0.6880733944954128, + "acc_norm_stderr": 0.019862967976707245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.5238095238095238, + "acc_stderr": 0.04467062628403273, + "acc_norm": 0.5238095238095238, + "acc_norm_stderr": 0.04467062628403273 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5915032679738562, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.5915032679738562, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7933884297520661, + "acc_stderr": 0.03695980128098824, + "acc_norm": 0.7933884297520661, + "acc_norm_stderr": 0.03695980128098824 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.7105263157894737, + "acc_stderr": 0.036906779861372814, + "acc_norm": 0.7105263157894737, + "acc_norm_stderr": 0.036906779861372814 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.020200164564804588, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.020200164564804588 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4574468085106383, + "acc_stderr": 0.02971928127223684, + "acc_norm": 0.4574468085106383, + "acc_norm_stderr": 0.02971928127223684 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053756, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053756 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5694444444444444, + "acc_stderr": 0.03376922151252336, + "acc_norm": 0.5694444444444444, + "acc_norm_stderr": 0.03376922151252336 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.4301675977653631, + "acc_stderr": 0.01655860163604103, + "acc_norm": 0.4301675977653631, + "acc_norm_stderr": 0.01655860163604103 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.03086214492108757, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.03086214492108757 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7341772151898734, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.7341772151898734, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39504563233376794, + "acc_stderr": 0.012485727813251565, + "acc_norm": 0.39504563233376794, + "acc_norm_stderr": 0.012485727813251565 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6911764705882353, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.6911764705882353, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.03588624800091708, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.03588624800091708 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3880048959608323, + "mc1_stderr": 0.017058761501347962, + "mc2": 0.5601057396457566, + "mc2_stderr": 0.01603029702389325 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5525383707201889, + "acc_stderr": 0.01709519030150058, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.01701403811929749 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Qwen/Qwen2-7B-Instruct", + "model_sha": "41c66b0be1c3081f13defc6bdf946c2ef240d6a6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Qwen/Qwen2-7B/result_2024-06-06 17:19:35.json b/Qwen/Qwen2-7B/result_2024-06-06 17:19:35.json new file mode 100644 index 0000000000000000000000000000000000000000..e61e82198ff223230d1facc70eabd862cb646a21 --- /dev/null +++ b/Qwen/Qwen2-7B/result_2024-06-06 17:19:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3651877133105802, + "acc_stderr": 0.014070265519268804, + "acc_norm": 0.42406143344709896, + "acc_norm_stderr": 0.01444188962746439 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38856801433977295, + "acc_stderr": 0.004864286176731831, + "acc_norm": 0.5241983668591914, + "acc_norm_stderr": 0.004983934343250459 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7864077669902912, + "acc_stderr": 0.040580420156460364, + "acc_norm": 0.7864077669902912, + "acc_norm_stderr": 0.040580420156460364 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.669220945083014, + "acc_stderr": 0.016824818462563756, + "acc_norm": 0.669220945083014, + "acc_norm_stderr": 0.016824818462563756 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.6127659574468085, + "acc_stderr": 0.03184389265339525, + "acc_norm": 0.6127659574468085, + "acc_norm_stderr": 0.03184389265339525 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.536144578313253, + "acc_stderr": 0.03882310850890593, + "acc_norm": 0.536144578313253, + "acc_norm_stderr": 0.03882310850890593 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.662379421221865, + "acc_stderr": 0.026858825879488554, + "acc_norm": 0.662379421221865, + "acc_norm_stderr": 0.026858825879488554 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6233183856502242, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.6233183856502242, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.648854961832061, + "acc_stderr": 0.0418644516301375, + "acc_norm": 0.648854961832061, + "acc_norm_stderr": 0.0418644516301375 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7525252525252525, + "acc_stderr": 0.030746300742124505, + "acc_norm": 0.7525252525252525, + "acc_norm_stderr": 0.030746300742124505 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6758620689655173, + "acc_stderr": 0.039004320691855554, + "acc_norm": 0.6758620689655173, + "acc_norm_stderr": 0.039004320691855554 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6890756302521008, + "acc_stderr": 0.03006676158297794, + "acc_norm": 0.6890756302521008, + "acc_norm_stderr": 0.03006676158297794 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6435897435897436, + "acc_stderr": 0.02428314052946731, + "acc_norm": 0.6435897435897436, + "acc_norm_stderr": 0.02428314052946731 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.7129629629629629, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.7129629629629629, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.5467980295566502, + "acc_stderr": 0.03502544650845872, + "acc_norm": 0.5467980295566502, + "acc_norm_stderr": 0.03502544650845872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6741935483870968, + "acc_stderr": 0.026662010578567097, + "acc_norm": 0.6741935483870968, + "acc_norm_stderr": 0.026662010578567097 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.02514093595033544, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.02514093595033544 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6377358490566037, + "acc_stderr": 0.029582245128384303, + "acc_norm": 0.6377358490566037, + "acc_norm_stderr": 0.029582245128384303 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4703703703703704, + "acc_stderr": 0.030431963547936577, + "acc_norm": 0.4703703703703704, + "acc_norm_stderr": 0.030431963547936577 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7512437810945274, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.7512437810945274, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.037940126746970296, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.037940126746970296 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5846560846560847, + "acc_stderr": 0.02537952491077839, + "acc_norm": 0.5846560846560847, + "acc_norm_stderr": 0.02537952491077839 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294939, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294939 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.73, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6445086705202312, + "acc_stderr": 0.025770292082977254, + "acc_norm": 0.6445086705202312, + "acc_norm_stderr": 0.025770292082977254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6012269938650306, + "acc_stderr": 0.03847021420456024, + "acc_norm": 0.6012269938650306, + "acc_norm_stderr": 0.03847021420456024 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6327160493827161, + "acc_stderr": 0.026822801759507894, + "acc_norm": 0.6327160493827161, + "acc_norm_stderr": 0.026822801759507894 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6476683937823834, + "acc_stderr": 0.03447478286414357, + "acc_norm": 0.6476683937823834, + "acc_norm_stderr": 0.03447478286414357 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7009174311926606, + "acc_stderr": 0.01963041728541517, + "acc_norm": 0.7009174311926606, + "acc_norm_stderr": 0.01963041728541517 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.49206349206349204, + "acc_stderr": 0.044715725362943486, + "acc_norm": 0.49206349206349204, + "acc_norm_stderr": 0.044715725362943486 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.027914055510468, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.027914055510468 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7603305785123967, + "acc_stderr": 0.03896878985070416, + "acc_norm": 0.7603305785123967, + "acc_norm_stderr": 0.03896878985070416 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.7171052631578947, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.7171052631578947, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5359477124183006, + "acc_stderr": 0.020175488765484043, + "acc_norm": 0.5359477124183006, + "acc_norm_stderr": 0.020175488765484043 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.475177304964539, + "acc_stderr": 0.029790719243829707, + "acc_norm": 0.475177304964539, + "acc_norm_stderr": 0.029790719243829707 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.4022346368715084, + "acc_stderr": 0.016399716732847146, + "acc_norm": 0.4022346368715084, + "acc_norm_stderr": 0.016399716732847146 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.76, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.76, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5772058823529411, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.5772058823529411, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.673469387755102, + "acc_stderr": 0.030021056238440317, + "acc_norm": 0.673469387755102, + "acc_norm_stderr": 0.030021056238440317 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7383966244725738, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.7383966244725738, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.42698826597131684, + "acc_stderr": 0.012633353557534423, + "acc_norm": 0.42698826597131684, + "acc_norm_stderr": 0.012633353557534423 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7303921568627451, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.7303921568627451, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7212121212121212, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.7212121212121212, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3219094247246022, + "mc1_stderr": 0.016355567611960383, + "mc2": 0.49264334051888825, + "mc2_stderr": 0.015667716428599748 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.538370720188902, + "acc_stderr": 0.01713966022184556, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.016756921571069422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Qwen/Qwen2-7B", + "model_sha": "453ed1575b739b5b03ce3758b23befdb0967f40e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/RLHFlow/LLaMA3-iterative-DPO-final/result_2024-06-05 14:46:44.json b/RLHFlow/LLaMA3-iterative-DPO-final/result_2024-06-05 14:46:44.json new file mode 100644 index 0000000000000000000000000000000000000000..23afe3988ea0328946cc42a4abd32b22eadec57f --- /dev/null +++ b/RLHFlow/LLaMA3-iterative-DPO-final/result_2024-06-05 14:46:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4104095563139932, + "acc_stderr": 0.014374922192642667, + "acc_norm": 0.4709897610921502, + "acc_norm_stderr": 0.014586776355294307 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4032065325632344, + "acc_stderr": 0.004895390341445628, + "acc_norm": 0.5456084445329615, + "acc_norm_stderr": 0.0049689792597383325 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6549707602339181, + "acc_stderr": 0.03645981377388807, + "acc_norm": 0.6549707602339181, + "acc_norm_stderr": 0.03645981377388807 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.017769250583533253, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.017769250583533253 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4851063829787234, + "acc_stderr": 0.032671518489247764, + "acc_norm": 0.4851063829787234, + "acc_norm_stderr": 0.032671518489247764 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5498392282958199, + "acc_stderr": 0.028256660723360177, + "acc_norm": 0.5498392282958199, + "acc_norm_stderr": 0.028256660723360177 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.03488901616852729, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.03488901616852729 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.03228410626716391, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.03228410626716391 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.025342671293807247, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.025342671293807247 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.0487831731214563, + "acc_norm": 0.62, + "acc_norm_stderr": 0.0487831731214563 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.034711928605184676, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.034711928605184676 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5225806451612903, + "acc_stderr": 0.02841498501970786, + "acc_norm": 0.5225806451612903, + "acc_norm_stderr": 0.02841498501970786 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.028286324075564407, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.028286324075564407 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3962962962962963, + "acc_stderr": 0.029822619458533997, + "acc_norm": 0.3962962962962963, + "acc_norm_stderr": 0.029822619458533997 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6616915422885572, + "acc_stderr": 0.03345563070339191, + "acc_norm": 0.6616915422885572, + "acc_norm_stderr": 0.03345563070339191 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.038016851045244604, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.038016851045244604 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42328042328042326, + "acc_stderr": 0.025446365634406783, + "acc_norm": 0.42328042328042326, + "acc_norm_stderr": 0.025446365634406783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5751445086705202, + "acc_stderr": 0.026613350840261733, + "acc_norm": 0.5751445086705202, + "acc_norm_stderr": 0.026613350840261733 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5493827160493827, + "acc_stderr": 0.027684721415656196, + "acc_norm": 0.5493827160493827, + "acc_norm_stderr": 0.027684721415656196 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583703, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583703 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6128440366972477, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.6128440366972477, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5522875816993464, + "acc_stderr": 0.028472938478033522, + "acc_norm": 0.5522875816993464, + "acc_norm_stderr": 0.028472938478033522 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.0404633688397825, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.0404633688397825 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.020054269200726452, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.020054269200726452 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5357142857142857, + "acc_stderr": 0.04733667890053756, + "acc_norm": 0.5357142857142857, + "acc_norm_stderr": 0.04733667890053756 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.033812000056435254, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.033812000056435254 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30837988826815643, + "acc_stderr": 0.015445716910998877, + "acc_norm": 0.30837988826815643, + "acc_norm_stderr": 0.015445716910998877 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483924, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6204081632653061, + "acc_stderr": 0.03106721126287246, + "acc_norm": 0.6204081632653061, + "acc_norm_stderr": 0.03106721126287246 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.030486039389105317, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.030486039389105317 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3683181225554107, + "acc_stderr": 0.012319403369564644, + "acc_norm": 0.3683181225554107, + "acc_norm_stderr": 0.012319403369564644 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03756335775187895, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03756335775187895 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3953488372093023, + "mc1_stderr": 0.017115815632418183, + "mc2": 0.5576718488551453, + "mc2_stderr": 0.01598772875231436 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5100354191263282, + "acc_stderr": 0.017186891286894053, + "acc_norm": 0.5088547815820543, + "acc_norm_stderr": 0.017187658199336736 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "RLHFlow/LLaMA3-iterative-DPO-final", + "model_sha": "40b73bd07a019795837f80579fe95470484ca82b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Raphael21/Raphael21-SOLAR-10.7B/result_2024-02-26 11:01:49.json b/Raphael21/Raphael21-SOLAR-10.7B/result_2024-02-26 11:01:49.json new file mode 100644 index 0000000000000000000000000000000000000000..eaab3a5ce8659e17eb243e63cd6fd5d08281e6ad --- /dev/null +++ b/Raphael21/Raphael21-SOLAR-10.7B/result_2024-02-26 11:01:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.48293515358361777, + "acc_stderr": 0.014602878388536597, + "acc_norm": 0.5418088737201365, + "acc_norm_stderr": 0.0145602203087147 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4885480979884485, + "acc_stderr": 0.004988472459418033, + "acc_norm": 0.675363473411671, + "acc_norm_stderr": 0.004672819355838559 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6538952745849298, + "acc_stderr": 0.017011965266412077, + "acc_norm": 0.6538952745849298, + "acc_norm_stderr": 0.017011965266412077 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6205787781350482, + "acc_stderr": 0.027559949802347824, + "acc_norm": 0.6205787781350482, + "acc_norm_stderr": 0.027559949802347824 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5650224215246636, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.5650224215246636, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.042258754519696386, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.042258754519696386 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7070707070707071, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.7070707070707071, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.04810840148082635, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.04810840148082635 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6638655462184874, + "acc_stderr": 0.03068473711513537, + "acc_norm": 0.6638655462184874, + "acc_norm_stderr": 0.03068473711513537 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.558974358974359, + "acc_stderr": 0.025174048384000718, + "acc_norm": 0.558974358974359, + "acc_norm_stderr": 0.025174048384000718 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.028040981380761533, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.028040981380761533 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.025487187147859372, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.025487187147859372 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6080246913580247, + "acc_stderr": 0.027163686038271146, + "acc_norm": 0.6080246913580247, + "acc_norm_stderr": 0.027163686038271146 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.694300518134715, + "acc_stderr": 0.033248379397581594, + "acc_norm": 0.694300518134715, + "acc_norm_stderr": 0.033248379397581594 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6972477064220184, + "acc_stderr": 0.019698711434756346, + "acc_norm": 0.6972477064220184, + "acc_norm_stderr": 0.019698711434756346 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6013071895424836, + "acc_stderr": 0.02803609227389177, + "acc_norm": 0.6013071895424836, + "acc_norm_stderr": 0.02803609227389177 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.042943408452120926, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.042943408452120926 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.04026097083296564, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.04026097083296564 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4918300653594771, + "acc_stderr": 0.020225134343057255, + "acc_norm": 0.4918300653594771, + "acc_norm_stderr": 0.020225134343057255 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3463687150837989, + "acc_stderr": 0.015913546784020117, + "acc_norm": 0.3463687150837989, + "acc_norm_stderr": 0.015913546784020117 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4963235294117647, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.4963235294117647, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6204081632653061, + "acc_stderr": 0.031067211262872464, + "acc_norm": 0.6204081632653061, + "acc_norm_stderr": 0.031067211262872464 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.423728813559322, + "acc_stderr": 0.01262078515588599, + "acc_norm": 0.423728813559322, + "acc_norm_stderr": 0.01262078515588599 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5931372549019608, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.5931372549019608, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.47613219094247244, + "mc1_stderr": 0.017483547156961585, + "mc2": 0.6437655582948802, + "mc2_stderr": 0.015843103346719872 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45690672963400236, + "acc_stderr": 0.017126389093086784, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.017161563949916348 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Raphael21/Raphael21-SOLAR-10.7B", + "model_sha": "7e0a60cde6431778dd80b90376415ad8bb171de7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/RubielLabarta/LogoS-7Bx2-MoE-13B-v0.2/result_2024-07-03 09:55:23.json b/RubielLabarta/LogoS-7Bx2-MoE-13B-v0.2/result_2024-07-03 09:55:23.json new file mode 100644 index 0000000000000000000000000000000000000000..f704629f9e0f25ae701854b7571f6ab72560b142 --- /dev/null +++ b/RubielLabarta/LogoS-7Bx2-MoE-13B-v0.2/result_2024-07-03 09:55:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40017064846416384, + "acc_stderr": 0.014317197787809178, + "acc_norm": 0.4598976109215017, + "acc_norm_stderr": 0.01456431885692485 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4041027683728341, + "acc_stderr": 0.004897146690596257, + "acc_norm": 0.536247759410476, + "acc_norm_stderr": 0.004976651989757642 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4763729246487867, + "acc_stderr": 0.017859989765176453, + "acc_norm": 0.4763729246487867, + "acc_norm_stderr": 0.017859989765176453 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.02823776942208533, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.02823776942208533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993178, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993178 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.033959703819985754, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.033959703819985754 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.027236013946196687, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.027236013946196687 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616255, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137282, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137282 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5321100917431193, + "acc_stderr": 0.02139307122268081, + "acc_norm": 0.5321100917431193, + "acc_norm_stderr": 0.02139307122268081 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.019691459052354154, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.019691459052354154 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.033509916046960436, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.033509916046960436 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2860335195530726, + "acc_stderr": 0.015113972129062129, + "acc_norm": 0.2860335195530726, + "acc_norm_stderr": 0.015113972129062129 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252611, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252611 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411945, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411945 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.0317229500433233, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.0317229500433233 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3500651890482399, + "acc_stderr": 0.012182552313215163, + "acc_norm": 0.3500651890482399, + "acc_norm_stderr": 0.012182552313215163 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4283965728274174, + "mc1_stderr": 0.017323088597314767, + "mc2": 0.5874844740097508, + "mc2_stderr": 0.01634195500942428 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44155844155844154, + "acc_stderr": 0.017072525875563103, + "acc_norm": 0.448642266824085, + "acc_norm_stderr": 0.01709943051472578 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "RubielLabarta/LogoS-7Bx2-MoE-13B-v0.2", + "model_sha": "fb0f72b9914a81892bfeea5a04fcd9676c883d64", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/SJ-Donald/SJ-SOLAR-10.7b-DPO/result_2024-01-25 00:56:50.json b/SJ-Donald/SJ-SOLAR-10.7b-DPO/result_2024-01-25 00:56:50.json new file mode 100644 index 0000000000000000000000000000000000000000..686c6d1b7e25d1581a2c5813d03825ca2edcaa73 --- /dev/null +++ b/SJ-Donald/SJ-SOLAR-10.7b-DPO/result_2024-01-25 00:56:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4718430034129693, + "acc_stderr": 0.014588204105102203, + "acc_norm": 0.5366894197952219, + "acc_norm_stderr": 0.014572000527756994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4493128858793069, + "acc_stderr": 0.004964075870120345, + "acc_norm": 0.619896434973113, + "acc_norm_stderr": 0.004844199910173041 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.03786720706234214, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.03786720706234214 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6500638569604087, + "acc_stderr": 0.017055679797150426, + "acc_norm": 0.6500638569604087, + "acc_norm_stderr": 0.017055679797150426 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4851063829787234, + "acc_stderr": 0.03267151848924777, + "acc_norm": 0.4851063829787234, + "acc_norm_stderr": 0.03267151848924777 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6366559485530546, + "acc_stderr": 0.02731684767419271, + "acc_norm": 0.6366559485530546, + "acc_norm_stderr": 0.02731684767419271 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5829596412556054, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.5829596412556054, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.04243869242230524, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.04243869242230524 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7373737373737373, + "acc_stderr": 0.03135305009533086, + "acc_norm": 0.7373737373737373, + "acc_norm_stderr": 0.03135305009533086 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.03142946637883708, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.03142946637883708 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5615384615384615, + "acc_stderr": 0.02515826601686861, + "acc_norm": 0.5615384615384615, + "acc_norm_stderr": 0.02515826601686861 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5903225806451613, + "acc_stderr": 0.02797605491534736, + "acc_norm": 0.5903225806451613, + "acc_norm_stderr": 0.02797605491534736 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.02777883590493543, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.02777883590493543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524582, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524582 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.031871875379197945, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.031871875379197945 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.03807301726504511, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.03807301726504511 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43386243386243384, + "acc_stderr": 0.025525034382474887, + "acc_norm": 0.43386243386243384, + "acc_norm_stderr": 0.025525034382474887 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456606, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456606 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6327160493827161, + "acc_stderr": 0.02682280175950789, + "acc_norm": 0.6327160493827161, + "acc_norm_stderr": 0.02682280175950789 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.032210245080411544, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.032210245080411544 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.046774730044912005, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.046774730044912005 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6697247706422018, + "acc_stderr": 0.02016446633634298, + "acc_norm": 0.6697247706422018, + "acc_norm_stderr": 0.02016446633634298 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.028074158947600656, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.028074158947600656 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249034, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249034 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.020203517280261447, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.020203517280261447 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.02904919034254346, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.02904919034254346 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.014676252009319475, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.014676252009319475 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5110294117647058, + "acc_stderr": 0.030365446477275675, + "acc_norm": 0.5110294117647058, + "acc_norm_stderr": 0.030365446477275675 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556165, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4152542372881356, + "acc_stderr": 0.012585471793400665, + "acc_norm": 0.4152542372881356, + "acc_norm_stderr": 0.012585471793400665 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.03646204963253813, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.03646204963253813 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4039167686658507, + "mc1_stderr": 0.01717727682258428, + "mc2": 0.5720346967291646, + "mc2_stderr": 0.01586677021938394 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5702479338842975, + "acc_stderr": 0.01701984753597221, + "acc_norm": 0.5844155844155844, + "acc_norm_stderr": 0.01694358631307656 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "SJ-Donald/SJ-SOLAR-10.7b-DPO", + "model_sha": "a0cb2fbc20b3b65e8e6c626893eaf5456054f49c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/SJ-Donald/SOLAR-10.7B-slerp/result_2024-01-11 05:42:26.json b/SJ-Donald/SOLAR-10.7B-slerp/result_2024-01-11 05:42:26.json new file mode 100644 index 0000000000000000000000000000000000000000..bab13866d42af20f5635e38cd70c31d0d4322011 --- /dev/null +++ b/SJ-Donald/SOLAR-10.7B-slerp/result_2024-01-11 05:42:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4726962457337884, + "acc_stderr": 0.014589589101985994, + "acc_norm": 0.5358361774744027, + "acc_norm_stderr": 0.014573813664735712 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4508066122286397, + "acc_stderr": 0.00496557224680386, + "acc_norm": 0.6202947619996017, + "acc_norm_stderr": 0.004843216325090246 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6526181353767561, + "acc_stderr": 0.017026671748655728, + "acc_norm": 0.6526181353767561, + "acc_norm_stderr": 0.017026671748655728 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4851063829787234, + "acc_stderr": 0.032671518489247764, + "acc_norm": 0.4851063829787234, + "acc_norm_stderr": 0.032671518489247764 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6495176848874598, + "acc_stderr": 0.027098652621301747, + "acc_norm": 0.6495176848874598, + "acc_norm_stderr": 0.027098652621301747 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5829596412556054, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.5829596412556054, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.04243869242230524, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.04243869242230524 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.031544498882702866, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.031544498882702866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201943, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201943 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5666666666666667, + "acc_stderr": 0.0251246535258851, + "acc_norm": 0.5666666666666667, + "acc_norm_stderr": 0.0251246535258851 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5870967741935483, + "acc_stderr": 0.02800913812540038, + "acc_norm": 0.5870967741935483, + "acc_norm_stderr": 0.02800913812540038 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924336, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924336 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5358490566037736, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.5358490566037736, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948496, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.03203841040213319, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.03203841040213319 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.03807301726504511, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.03807301726504511 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43386243386243384, + "acc_stderr": 0.025525034382474887, + "acc_norm": 0.43386243386243384, + "acc_norm_stderr": 0.025525034382474887 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5578034682080925, + "acc_stderr": 0.026738603643807403, + "acc_norm": 0.5578034682080925, + "acc_norm_stderr": 0.026738603643807403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.02686949074481526, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.02686949074481526 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7150259067357513, + "acc_stderr": 0.032577140777096614, + "acc_norm": 0.7150259067357513, + "acc_norm_stderr": 0.032577140777096614 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.046774730044912, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.046774730044912 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6678899082568808, + "acc_stderr": 0.02019268298542334, + "acc_norm": 0.6678899082568808, + "acc_norm_stderr": 0.02019268298542334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.028074158947600653, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.028074158947600653 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.020203517280261447, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.020203517280261447 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.02904919034254346, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.02904919034254346 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.03407632093854054, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.03407632093854054 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364555, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364555 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03035969707904611, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03035969707904611 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556165, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036416, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036416 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41590612777053454, + "acc_stderr": 0.012588323850313592, + "acc_norm": 0.41590612777053454, + "acc_norm_stderr": 0.012588323850313592 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.03646204963253813, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.03646204963253813 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.41003671970624234, + "mc1_stderr": 0.017217844717449318, + "mc2": 0.5715844361100709, + "mc2_stderr": 0.015837361919137687 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.577331759149941, + "acc_stderr": 0.016983506079577604, + "acc_norm": 0.5855962219598583, + "acc_norm_stderr": 0.016936583383943608 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "SJ-Donald/SOLAR-10.7B-slerp", + "model_sha": "d6c0e1eb5dc5c3c0f087e875b5e8d6962eb1a24e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/SJ-Donald/llama3-passthrough-chat/result_2024-05-17 07:48:22.json b/SJ-Donald/llama3-passthrough-chat/result_2024-05-17 07:48:22.json new file mode 100644 index 0000000000000000000000000000000000000000..998114e574e0ef2a2a3a2fdebac22e504e4af1c0 --- /dev/null +++ b/SJ-Donald/llama3-passthrough-chat/result_2024-05-17 07:48:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3643344709897611, + "acc_stderr": 0.014063260279882419, + "acc_norm": 0.4351535836177474, + "acc_norm_stderr": 0.014487986197186045 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35570603465445133, + "acc_stderr": 0.004777483159634026, + "acc_norm": 0.4560844453296156, + "acc_norm_stderr": 0.004970497804772314 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.454661558109834, + "acc_stderr": 0.017806304585052606, + "acc_norm": 0.454661558109834, + "acc_norm_stderr": 0.017806304585052606 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5063829787234042, + "acc_stderr": 0.03268335899936336, + "acc_norm": 0.5063829787234042, + "acc_norm_stderr": 0.03268335899936336 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840636, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840636 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5793103448275863, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.5793103448275863, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062946, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062946 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.0253480060315348, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.0253480060315348 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5032258064516129, + "acc_stderr": 0.02844341422643831, + "acc_norm": 0.5032258064516129, + "acc_norm_stderr": 0.02844341422643831 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924336, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924336 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155254, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155254 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.041406856391115014, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.041406856391115014 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.027807490044276198, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.027807490044276198 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5577981651376147, + "acc_stderr": 0.0212936132075202, + "acc_norm": 0.5577981651376147, + "acc_norm_stderr": 0.0212936132075202 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5, + "acc_stderr": 0.028629916715693413, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028629916715693413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483184, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483184 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.020054269200726452, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.020054269200726452 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.311731843575419, + "acc_stderr": 0.015491756531894637, + "acc_norm": 0.311731843575419, + "acc_norm_stderr": 0.015491756531894637 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6408163265306123, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.6408163265306123, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3617992177314211, + "acc_stderr": 0.012272736233262943, + "acc_norm": 0.3617992177314211, + "acc_norm_stderr": 0.012272736233262943 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32313341493268055, + "mc1_stderr": 0.016371836286454607, + "mc2": 0.5049311098779597, + "mc2_stderr": 0.016074222030752545 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4639905548996458, + "acc_stderr": 0.017145715365486664, + "acc_norm": 0.5194805194805194, + "acc_norm_stderr": 0.017177301992342544 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "SJ-Donald/llama3-passthrough-chat", + "model_sha": "ac11fd8473e7e057c7b1ec8abc30e201867be6ec", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/SJ-Donald/llama3-passthrough/result_2024-05-16 12:56:07.json b/SJ-Donald/llama3-passthrough/result_2024-05-16 12:56:07.json new file mode 100644 index 0000000000000000000000000000000000000000..d68ee0f9fd4af74bf2c07d2b15302841d705f167 --- /dev/null +++ b/SJ-Donald/llama3-passthrough/result_2024-05-16 12:56:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3532423208191126, + "acc_stderr": 0.013967822714840055, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398324 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37821151165106554, + "acc_stderr": 0.004839497020536609, + "acc_norm": 0.5052778331009758, + "acc_norm_stderr": 0.004989503417767287 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6549707602339181, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.6549707602339181, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.561941251596424, + "acc_stderr": 0.017742232238257244, + "acc_norm": 0.561941251596424, + "acc_norm_stderr": 0.017742232238257244 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252603, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252603 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936337, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985905, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985905 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.0354760149400694, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.0354760149400694 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.03196876989195779, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.03196876989195779 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5076923076923077, + "acc_stderr": 0.025348006031534757, + "acc_norm": 0.5076923076923077, + "acc_norm_stderr": 0.025348006031534757 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.027046857630716667, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.027046857630716667 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465076, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465076 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342658, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342658 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303118, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303118 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5743119266055046, + "acc_stderr": 0.021199235972470802, + "acc_norm": 0.5743119266055046, + "acc_norm_stderr": 0.021199235972470802 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138296, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138296 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249036, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.020071257886886525, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.020071257886886525 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320196, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320196 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5267857142857143, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.5267857142857143, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977749, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.32513966480446926, + "acc_stderr": 0.015666542785053562, + "acc_norm": 0.32513966480446926, + "acc_norm_stderr": 0.015666542785053562 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280058, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280058 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5918367346938775, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.5918367346938775, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35984354628422427, + "acc_stderr": 0.012258260483689803, + "acc_norm": 0.35984354628422427, + "acc_norm_stderr": 0.012258260483689803 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5637254901960784, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.5637254901960784, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.038435669935887186, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.038435669935887186 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.0154610276272536, + "mc2": 0.436366604752637, + "mc2_stderr": 0.01527303310492667 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4628099173553719, + "acc_stderr": 0.0171427361176433, + "acc_norm": 0.58913813459268, + "acc_norm_stderr": 0.01691497276784106 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "SJ-Donald/llama3-passthrough", + "model_sha": "ecd852966d5bc748196022aa1991d315844aa57e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/SKYEEEE/llama3-Ko-3-8B-finetuned_ver2/result_2024-07-14 03:16:04.json b/SKYEEEE/llama3-Ko-3-8B-finetuned_ver2/result_2024-07-14 03:16:04.json new file mode 100644 index 0000000000000000000000000000000000000000..7d2a9e9c60874443b7fd4b8a9348532fc1da71f4 --- /dev/null +++ b/SKYEEEE/llama3-Ko-3-8B-finetuned_ver2/result_2024-07-14 03:16:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3464163822525597, + "acc_stderr": 0.013905011180063254, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349828 + }, + "harness|ko_hellaswag|10": { + "acc": 0.350726946823342, + "acc_stderr": 0.004762223492435252, + "acc_norm": 0.45140410276837284, + "acc_norm_stderr": 0.004966158142645415 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4278416347381865, + "acc_stderr": 0.01769278792780373, + "acc_norm": 0.4278416347381865, + "acc_norm_stderr": 0.01769278792780373 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40836012861736337, + "acc_stderr": 0.027917050748484634, + "acc_norm": 0.40836012861736337, + "acc_norm_stderr": 0.027917050748484634 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262971, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262971 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3787878787878788, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.3787878787878788, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36134453781512604, + "acc_stderr": 0.031204691225150013, + "acc_norm": 0.36134453781512604, + "acc_norm_stderr": 0.031204691225150013 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33589743589743587, + "acc_stderr": 0.023946724741563962, + "acc_norm": 0.33589743589743587, + "acc_norm_stderr": 0.023946724741563962 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.03127090713297698, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.03127090713297698 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849734, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5854700854700855, + "acc_stderr": 0.03227396567623779, + "acc_norm": 0.5854700854700855, + "acc_norm_stderr": 0.03227396567623779 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983045, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983045 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871916, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4577114427860697, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.4577114427860697, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.034765996075164785, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.034765996075164785 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633345, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633345 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3670520231213873, + "acc_stderr": 0.02595005433765408, + "acc_norm": 0.3670520231213873, + "acc_norm_stderr": 0.02595005433765408 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35802469135802467, + "acc_stderr": 0.026675611926037086, + "acc_norm": 0.35802469135802467, + "acc_norm_stderr": 0.026675611926037086 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35751295336787564, + "acc_stderr": 0.034588160421810066, + "acc_norm": 0.35751295336787564, + "acc_norm_stderr": 0.034588160421810066 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3394495412844037, + "acc_stderr": 0.02030210934266235, + "acc_norm": 0.3394495412844037, + "acc_norm_stderr": 0.02030210934266235 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790607, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790607 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.02795604616542451, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.02795604616542451 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.48760330578512395, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403164, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403164 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.019139943748487025, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.019139943748487025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169917, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169917 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605607, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605607 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3014705882352941, + "acc_stderr": 0.027875982114273168, + "acc_norm": 0.3014705882352941, + "acc_norm_stderr": 0.027875982114273168 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.02653704531214529, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.02653704531214529 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4641350210970464, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.4641350210970464, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27509778357235987, + "acc_stderr": 0.011405443620996924, + "acc_norm": 0.27509778357235987, + "acc_norm_stderr": 0.011405443620996924 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.03713158067481912, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.03713158067481912 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.468616885925621, + "mc2_stderr": 0.015261121105350028 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36481700118063753, + "acc_stderr": 0.01655014433704659, + "acc_norm": 0.43211334120425027, + "acc_norm_stderr": 0.017031170198851753 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "SKYEEEE/llama3-Ko-3-8B-finetuned_ver2", + "model_sha": "338d770ca78f9aeef89675578142349c39d7c195", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/SKYEEEE/llama3-Ko-3-8B-finetuned_ver2/result_2024-07-14 03:35:13.json b/SKYEEEE/llama3-Ko-3-8B-finetuned_ver2/result_2024-07-14 03:35:13.json new file mode 100644 index 0000000000000000000000000000000000000000..7d2a9e9c60874443b7fd4b8a9348532fc1da71f4 --- /dev/null +++ b/SKYEEEE/llama3-Ko-3-8B-finetuned_ver2/result_2024-07-14 03:35:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3464163822525597, + "acc_stderr": 0.013905011180063254, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349828 + }, + "harness|ko_hellaswag|10": { + "acc": 0.350726946823342, + "acc_stderr": 0.004762223492435252, + "acc_norm": 0.45140410276837284, + "acc_norm_stderr": 0.004966158142645415 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4278416347381865, + "acc_stderr": 0.01769278792780373, + "acc_norm": 0.4278416347381865, + "acc_norm_stderr": 0.01769278792780373 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40836012861736337, + "acc_stderr": 0.027917050748484634, + "acc_norm": 0.40836012861736337, + "acc_norm_stderr": 0.027917050748484634 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262971, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262971 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3787878787878788, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.3787878787878788, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36134453781512604, + "acc_stderr": 0.031204691225150013, + "acc_norm": 0.36134453781512604, + "acc_norm_stderr": 0.031204691225150013 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33589743589743587, + "acc_stderr": 0.023946724741563962, + "acc_norm": 0.33589743589743587, + "acc_norm_stderr": 0.023946724741563962 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.03127090713297698, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.03127090713297698 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849734, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5854700854700855, + "acc_stderr": 0.03227396567623779, + "acc_norm": 0.5854700854700855, + "acc_norm_stderr": 0.03227396567623779 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983045, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983045 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871916, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4577114427860697, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.4577114427860697, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.034765996075164785, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.034765996075164785 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633345, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633345 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3670520231213873, + "acc_stderr": 0.02595005433765408, + "acc_norm": 0.3670520231213873, + "acc_norm_stderr": 0.02595005433765408 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35802469135802467, + "acc_stderr": 0.026675611926037086, + "acc_norm": 0.35802469135802467, + "acc_norm_stderr": 0.026675611926037086 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35751295336787564, + "acc_stderr": 0.034588160421810066, + "acc_norm": 0.35751295336787564, + "acc_norm_stderr": 0.034588160421810066 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3394495412844037, + "acc_stderr": 0.02030210934266235, + "acc_norm": 0.3394495412844037, + "acc_norm_stderr": 0.02030210934266235 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790607, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790607 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.02795604616542451, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.02795604616542451 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.48760330578512395, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403164, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403164 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.019139943748487025, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.019139943748487025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169917, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169917 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605607, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605607 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3014705882352941, + "acc_stderr": 0.027875982114273168, + "acc_norm": 0.3014705882352941, + "acc_norm_stderr": 0.027875982114273168 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.02653704531214529, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.02653704531214529 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4641350210970464, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.4641350210970464, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27509778357235987, + "acc_stderr": 0.011405443620996924, + "acc_norm": 0.27509778357235987, + "acc_norm_stderr": 0.011405443620996924 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.03713158067481912, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.03713158067481912 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.468616885925621, + "mc2_stderr": 0.015261121105350028 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36481700118063753, + "acc_stderr": 0.01655014433704659, + "acc_norm": 0.43211334120425027, + "acc_norm_stderr": 0.017031170198851753 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "SKYEEEE/llama3-Ko-3-8B-finetuned_ver2", + "model_sha": "338d770ca78f9aeef89675578142349c39d7c195", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/SakanaAI/DiscoPOP-zephyr-7b-gemma/result_2024-06-17 05:07:55.json b/SakanaAI/DiscoPOP-zephyr-7b-gemma/result_2024-06-17 05:07:55.json new file mode 100644 index 0000000000000000000000000000000000000000..8d472186c3788298f2d457ea22d0d77a7bb0fd4e --- /dev/null +++ b/SakanaAI/DiscoPOP-zephyr-7b-gemma/result_2024-06-17 05:07:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3856655290102389, + "acc_stderr": 0.01422425097325718, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398326 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3800039832702649, + "acc_stderr": 0.0048439543384514415, + "acc_norm": 0.49083847839075884, + "acc_norm_stderr": 0.0049889437217112125 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48659003831417624, + "acc_stderr": 0.01787353173651038, + "acc_norm": 0.48659003831417624, + "acc_norm_stderr": 0.01787353173651038 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.02817391776176288, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.02817391776176288 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999998, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999998 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.02528558599001784, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.02528558599001784 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5032258064516129, + "acc_stderr": 0.02844341422643831, + "acc_norm": 0.5032258064516129, + "acc_norm_stderr": 0.02844341422643831 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.0302363899421731, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.0302363899421731 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.02931820364520686, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.02931820364520686 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.0343751933733825, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.0343751933733825 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.03807301726504511, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.03807301726504511 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.02535574126305528, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.02535574126305528 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.02678881193156276, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.02678881193156276 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138936, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138936 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5743119266055046, + "acc_stderr": 0.0211992359724708, + "acc_norm": 0.5743119266055046, + "acc_norm_stderr": 0.0211992359724708 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.040179012759817494, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.040179012759817494 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3937908496732026, + "acc_stderr": 0.01976621199107307, + "acc_norm": 0.3937908496732026, + "acc_norm_stderr": 0.01976621199107307 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.02678917235114025, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.02678917235114025 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.014551553659369918, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.014551553659369918 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.02976826352893311, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.02976826352893311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214945, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214945 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.46098554675551745, + "mc2_stderr": 0.01607738581359731 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4380165289256198, + "acc_stderr": 0.017057753702160283, + "acc_norm": 0.45808736717827625, + "acc_norm_stderr": 0.017129852117911147 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "SakanaAI/DiscoPOP-zephyr-7b-gemma", + "model_sha": "161d63fca6218a102cbbbbd55ebdc0517eafe42d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Saxo/yunsung-llama-2-koen-13b-linkbricks-sft-basic-v1/result_2024-03-12 18:32:39.json b/Saxo/yunsung-llama-2-koen-13b-linkbricks-sft-basic-v1/result_2024-03-12 18:32:39.json new file mode 100644 index 0000000000000000000000000000000000000000..76d430cd27fdc3bb25a7db86042d3f38a1467556 --- /dev/null +++ b/Saxo/yunsung-llama-2-koen-13b-linkbricks-sft-basic-v1/result_2024-03-12 18:32:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3848122866894198, + "acc_stderr": 0.014218371065251102, + "acc_norm": 0.439419795221843, + "acc_norm_stderr": 0.014503747823580125 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41495717984465247, + "acc_stderr": 0.0049170767266237935, + "acc_norm": 0.561840270862378, + "acc_norm_stderr": 0.004951470301995878 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.34951456310679613, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.34951456310679613, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5134099616858238, + "acc_stderr": 0.017873531736510385, + "acc_norm": 0.5134099616858238, + "acc_norm_stderr": 0.017873531736510385 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489425, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489425 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123005, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123005 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.43434343434343436, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.43434343434343436, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.024537591572830517, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.024537591572830517 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3903225806451613, + "acc_stderr": 0.027751256636969576, + "acc_norm": 0.3903225806451613, + "acc_norm_stderr": 0.027751256636969576 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5598290598290598, + "acc_stderr": 0.032520741720630506, + "acc_norm": 0.5598290598290598, + "acc_norm_stderr": 0.032520741720630506 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184406, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184406 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194974, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194974 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41329479768786126, + "acc_stderr": 0.026511261369409237, + "acc_norm": 0.41329479768786126, + "acc_norm_stderr": 0.026511261369409237 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548914, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548914 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.027402042040269955, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.027402042040269955 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37305699481865284, + "acc_stderr": 0.034902055920485744, + "acc_norm": 0.37305699481865284, + "acc_norm_stderr": 0.034902055920485744 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518754, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518754 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47339449541284406, + "acc_stderr": 0.02140695268815158, + "acc_norm": 0.47339449541284406, + "acc_norm_stderr": 0.02140695268815158 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604675, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604675 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.027420477662629242, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.027420477662629242 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.045077322787750874, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.045077322787750874 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.03761070869867479, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.03761070869867479 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.019184639328092487, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.019184639328092487 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180844, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180844 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.042032772914677614, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.042032772914677614 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993666, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993666 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.0279715413701706, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.0279715413701706 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.32653061224489793, + "acc_stderr": 0.03002105623844031, + "acc_norm": 0.32653061224489793, + "acc_norm_stderr": 0.03002105623844031 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2803129074315515, + "acc_stderr": 0.01147155594495862, + "acc_norm": 0.2803129074315515, + "acc_norm_stderr": 0.01147155594495862 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.03343311240488419, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.03343311240488419 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557964, + "mc2": 0.42044312118862787, + "mc2_stderr": 0.014880041238971613 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5277449822904369, + "acc_stderr": 0.017163867979456016, + "acc_norm": 0.6127508854781583, + "acc_norm_stderr": 0.01674757799164278 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Saxo/yunsung-llama-2-koen-13b-linkbricks-sft-basic-v1", + "model_sha": "8175d2e66bd6d45185e2f6d80cef1fd1f7b3b86b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/SeaLLMs/SeaLLM-7B-v2.5/result_2024-05-13 17:46:50.json b/SeaLLMs/SeaLLM-7B-v2.5/result_2024-05-13 17:46:50.json new file mode 100644 index 0000000000000000000000000000000000000000..cc16b7f8575570255002f205da4a8163677bc64b --- /dev/null +++ b/SeaLLMs/SeaLLM-7B-v2.5/result_2024-05-13 17:46:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19539249146757678, + "acc_stderr": 0.011586907189952911, + "acc_norm": 0.24744027303754265, + "acc_norm_stderr": 0.012610352663292673 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25403306114319857, + "acc_stderr": 0.00434426617963492, + "acc_norm": 0.2504481179047998, + "acc_norm_stderr": 0.004323856300539175 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822582, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822582 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25287356321839083, + "acc_stderr": 0.015543377313719681, + "acc_norm": 0.25287356321839083, + "acc_norm_stderr": 0.015543377313719681 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.03502553170678319, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.03502553170678319 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.19574468085106383, + "acc_stderr": 0.025937853139977148, + "acc_norm": 0.19574468085106383, + "acc_norm_stderr": 0.025937853139977148 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.03384429155233135, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.03384429155233135 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26366559485530544, + "acc_stderr": 0.02502553850053234, + "acc_norm": 0.26366559485530544, + "acc_norm_stderr": 0.02502553850053234 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.02715715047956382, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.02715715047956382 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596918, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596918 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.1919191919191919, + "acc_stderr": 0.02805779167298901, + "acc_norm": 0.1919191919191919, + "acc_norm_stderr": 0.02805779167298901 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21025641025641026, + "acc_stderr": 0.02066059748502692, + "acc_norm": 0.21025641025641026, + "acc_norm_stderr": 0.02066059748502692 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.0401910747255735, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.0401910747255735 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733552, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733552 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.22258064516129034, + "acc_stderr": 0.023664216671642518, + "acc_norm": 0.22258064516129034, + "acc_norm_stderr": 0.023664216671642518 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2863247863247863, + "acc_stderr": 0.02961432369045665, + "acc_norm": 0.2863247863247863, + "acc_norm_stderr": 0.02961432369045665 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23018867924528302, + "acc_stderr": 0.025907897122408173, + "acc_norm": 0.23018867924528302, + "acc_norm_stderr": 0.025907897122408173 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712177, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712177 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696545, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696545 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.03076944496729602, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.03076944496729602 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708607, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708607 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.022289638852617904, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.022289638852617904 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.02508947852376513, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.02508947852376513 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21284403669724772, + "acc_stderr": 0.017549376389313694, + "acc_norm": 0.21284403669724772, + "acc_norm_stderr": 0.017549376389313694 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21241830065359477, + "acc_stderr": 0.02342037547829613, + "acc_norm": 0.21241830065359477, + "acc_norm_stderr": 0.02342037547829613 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.03761070869867479, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.03761070869867479 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.017917974069594722, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.017917974069594722 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590638, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590638 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.027467401804057993, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.027467401804057993 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16176470588235295, + "acc_stderr": 0.02236867256288675, + "acc_norm": 0.16176470588235295, + "acc_norm_stderr": 0.02236867256288675 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.027682979522960227, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.027682979522960227 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27183833116036504, + "acc_stderr": 0.011363135278651411, + "acc_norm": 0.27183833116036504, + "acc_norm_stderr": 0.011363135278651411 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826842, + "mc2": 0.48553928017206527, + "mc2_stderr": 0.017277450887367634 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08146399055489964, + "acc_stderr": 0.009404717441946264, + "acc_norm": 0.1959858323494687, + "acc_norm_stderr": 0.013647685567768873 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "SeaLLMs/SeaLLM-7B-v2.5", + "model_sha": "78fcb9db2398fefe0919e510861ea23c6b2d1cb6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/SeaLLMs/SeaLLMs-v3-7B-Chat/result_2024-07-28 03:06:24.json b/SeaLLMs/SeaLLMs-v3-7B-Chat/result_2024-07-28 03:06:24.json new file mode 100644 index 0000000000000000000000000000000000000000..d4aa00ea55fba3023c36d635f8b2b4b16b03a9bd --- /dev/null +++ b/SeaLLMs/SeaLLMs-v3-7B-Chat/result_2024-07-28 03:06:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3890784982935154, + "acc_stderr": 0.014247309976045607, + "acc_norm": 0.4402730375426621, + "acc_norm_stderr": 0.014506769524804243 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2504481179047998, + "acc_stderr": 0.004323856300539177, + "acc_norm": 0.2504481179047998, + "acc_norm_stderr": 0.004323856300539177 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6842105263157895, + "acc_stderr": 0.03565079670708311, + "acc_norm": 0.6842105263157895, + "acc_norm_stderr": 0.03565079670708311 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7864077669902912, + "acc_stderr": 0.04058042015646036, + "acc_norm": 0.7864077669902912, + "acc_norm_stderr": 0.04058042015646036 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6615581098339719, + "acc_stderr": 0.016920869586210675, + "acc_norm": 0.6615581098339719, + "acc_norm_stderr": 0.016920869586210675 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.6085106382978723, + "acc_stderr": 0.031907012423268113, + "acc_norm": 0.6085106382978723, + "acc_norm_stderr": 0.031907012423268113 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6559485530546624, + "acc_stderr": 0.026981478043648047, + "acc_norm": 0.6559485530546624, + "acc_norm_stderr": 0.026981478043648047 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6098654708520179, + "acc_stderr": 0.03273766725459156, + "acc_norm": 0.6098654708520179, + "acc_norm_stderr": 0.03273766725459156 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7373737373737373, + "acc_stderr": 0.031353050095330855, + "acc_norm": 0.7373737373737373, + "acc_norm_stderr": 0.031353050095330855 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6344827586206897, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.6344827586206897, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.7016806722689075, + "acc_stderr": 0.029719142876342853, + "acc_norm": 0.7016806722689075, + "acc_norm_stderr": 0.029719142876342853 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6461538461538462, + "acc_stderr": 0.02424378399406214, + "acc_norm": 0.6461538461538462, + "acc_norm_stderr": 0.02424378399406214 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.74, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.74, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.043300437496507437, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.043300437496507437 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.5566502463054187, + "acc_stderr": 0.03495334582162933, + "acc_norm": 0.5566502463054187, + "acc_norm_stderr": 0.03495334582162933 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6774193548387096, + "acc_stderr": 0.026593084516572277, + "acc_norm": 0.6774193548387096, + "acc_norm_stderr": 0.026593084516572277 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8247863247863247, + "acc_stderr": 0.02490443909891822, + "acc_norm": 0.8247863247863247, + "acc_norm_stderr": 0.02490443909891822 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6226415094339622, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.6226415094339622, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.5148148148148148, + "acc_stderr": 0.030472153249328598, + "acc_norm": 0.5148148148148148, + "acc_norm_stderr": 0.030472153249328598 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7512437810945274, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.7512437810945274, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.03765746693865151, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.03765746693865151 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.58994708994709, + "acc_stderr": 0.025331202438944423, + "acc_norm": 0.58994708994709, + "acc_norm_stderr": 0.025331202438944423 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5763888888888888, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.5763888888888888, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6213872832369942, + "acc_stderr": 0.02611374936131034, + "acc_norm": 0.6213872832369942, + "acc_norm_stderr": 0.02611374936131034 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5950920245398773, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.5950920245398773, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6450617283950617, + "acc_stderr": 0.02662415247884585, + "acc_norm": 0.6450617283950617, + "acc_norm_stderr": 0.02662415247884585 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6476683937823834, + "acc_stderr": 0.03447478286414357, + "acc_norm": 0.6476683937823834, + "acc_norm_stderr": 0.03447478286414357 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.710091743119266, + "acc_stderr": 0.0194530666092016, + "acc_norm": 0.710091743119266, + "acc_norm_stderr": 0.0194530666092016 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.5238095238095238, + "acc_stderr": 0.04467062628403273, + "acc_norm": 0.5238095238095238, + "acc_norm_stderr": 0.04467062628403273 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6209150326797386, + "acc_stderr": 0.027780141207023355, + "acc_norm": 0.6209150326797386, + "acc_norm_stderr": 0.027780141207023355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6842105263157895, + "acc_stderr": 0.03782728980865469, + "acc_norm": 0.6842105263157895, + "acc_norm_stderr": 0.03782728980865469 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5310457516339869, + "acc_stderr": 0.020188804456361887, + "acc_norm": 0.5310457516339869, + "acc_norm_stderr": 0.020188804456361887 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4326241134751773, + "acc_stderr": 0.02955545423677885, + "acc_norm": 0.4326241134751773, + "acc_norm_stderr": 0.02955545423677885 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5, + "acc_stderr": 0.04745789978762494, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04745789978762494 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.4491620111731844, + "acc_stderr": 0.01663583834163193, + "acc_norm": 0.4491620111731844, + "acc_norm_stderr": 0.01663583834163193 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5661764705882353, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.5661764705882353, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6653061224489796, + "acc_stderr": 0.030209235226242307, + "acc_norm": 0.6653061224489796, + "acc_norm_stderr": 0.030209235226242307 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598025, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598025 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4178617992177314, + "acc_stderr": 0.012596744108998564, + "acc_norm": 0.4178617992177314, + "acc_norm_stderr": 0.012596744108998564 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7352941176470589, + "acc_stderr": 0.030964517926923382, + "acc_norm": 0.7352941176470589, + "acc_norm_stderr": 0.030964517926923382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624335, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624335 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31946144430844553, + "mc1_stderr": 0.016322644182960498, + "mc2": 0.48779238416457826, + "mc2_stderr": 0.015735783220329043 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5914994096812278, + "acc_stderr": 0.016900062879427115, + "acc_norm": 0.6375442739079102, + "acc_norm_stderr": 0.016527131240453703 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "SeaLLMs/SeaLLMs-v3-7B-Chat", + "model_sha": "67ef6dfd0a5df7af4be7a325786105a2ba4cbaf7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/SkyOrbis/SKY-Ko-Llama3-8B-lora/result_2024-06-23 07:48:25.json b/SkyOrbis/SKY-Ko-Llama3-8B-lora/result_2024-06-23 07:48:25.json new file mode 100644 index 0000000000000000000000000000000000000000..02ea54e341a9addc6f5a8496e449f5afec9a07b1 --- /dev/null +++ b/SkyOrbis/SKY-Ko-Llama3-8B-lora/result_2024-06-23 07:48:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3720136518771331, + "acc_stderr": 0.014124597881844461, + "acc_norm": 0.40187713310580203, + "acc_norm_stderr": 0.01432726861457828 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3761202947619996, + "acc_stderr": 0.004834207964061322, + "acc_norm": 0.49063931487751444, + "acc_norm_stderr": 0.004988906901307734 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6608187134502924, + "acc_stderr": 0.03631053496488905, + "acc_norm": 0.6608187134502924, + "acc_norm_stderr": 0.03631053496488905 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280041, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280041 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.017784034534992433, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.017784034534992433 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.032600385118357715, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.032600385118357715 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087764, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087764 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.0323854694875898, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.0323854694875898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5282051282051282, + "acc_stderr": 0.025310639254933865, + "acc_norm": 0.5282051282051282, + "acc_norm_stderr": 0.025310639254933865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5225806451612903, + "acc_stderr": 0.02841498501970786, + "acc_norm": 0.5225806451612903, + "acc_norm_stderr": 0.02841498501970786 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.028120966503914387, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.028120966503914387 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465073, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465073 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719197, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719197 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02487081525105709, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02487081525105709 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.026720034380514998, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.026720034380514998 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5486238532110091, + "acc_stderr": 0.02133571471126879, + "acc_norm": 0.5486238532110091, + "acc_norm_stderr": 0.02133571471126879 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.04065771002562603, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.04065771002562603 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.019944914136873586, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.019944914136873586 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.033981108902946366, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.033981108902946366 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095277, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095277 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5918367346938775, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.5918367346938775, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.03087453753755362, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.03087453753755362 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3644067796610169, + "acc_stderr": 0.012291694983056477, + "acc_norm": 0.3644067796610169, + "acc_norm_stderr": 0.012291694983056477 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.01557284045287583, + "mc2": 0.44183716295863174, + "mc2_stderr": 0.015484493265241027 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4474616292798111, + "acc_stderr": 0.01709519030150058, + "acc_norm": 0.5584415584415584, + "acc_norm_stderr": 0.017072525875563103 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "SkyOrbis/SKY-Ko-Llama3-8B-lora", + "model_sha": "de6afeb74d39dbcad6f5dba6d3334f5297a7cd33", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/SkyOrbis/SKY-Ko-Solar-10.7B-lora/result_2024-06-23 08:12:36.json b/SkyOrbis/SKY-Ko-Solar-10.7B-lora/result_2024-06-23 08:12:36.json new file mode 100644 index 0000000000000000000000000000000000000000..124f16dde2e2011aa15ea11de85e63c7d0adeff3 --- /dev/null +++ b/SkyOrbis/SKY-Ko-Solar-10.7B-lora/result_2024-06-23 08:12:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41723549488054607, + "acc_stderr": 0.014409825518403082, + "acc_norm": 0.4726962457337884, + "acc_norm_stderr": 0.014589589101985994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.425911173073093, + "acc_stderr": 0.004934698012050244, + "acc_norm": 0.5734913363871739, + "acc_norm_stderr": 0.004935587729948866 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5906432748538012, + "acc_stderr": 0.03771283107626545, + "acc_norm": 0.5906432748538012, + "acc_norm_stderr": 0.03771283107626545 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041696, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6155810983397191, + "acc_stderr": 0.017395688742819618, + "acc_norm": 0.6155810983397191, + "acc_norm_stderr": 0.017395688742819618 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789958, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789958 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.594855305466238, + "acc_stderr": 0.027882383791325956, + "acc_norm": 0.594855305466238, + "acc_norm_stderr": 0.027882383791325956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6767676767676768, + "acc_stderr": 0.03332299921070645, + "acc_norm": 0.6767676767676768, + "acc_norm_stderr": 0.03332299921070645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5630252100840336, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.5630252100840336, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5076923076923077, + "acc_stderr": 0.025348006031534743, + "acc_norm": 0.5076923076923077, + "acc_norm_stderr": 0.025348006031534743 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.03502544650845872, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.03502544650845872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5612903225806452, + "acc_stderr": 0.028229497320317206, + "acc_norm": 0.5612903225806452, + "acc_norm_stderr": 0.028229497320317206 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.026246772946890474, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.026246772946890474 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652459, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652459 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255168, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255168 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7014925373134329, + "acc_stderr": 0.03235743789355043, + "acc_norm": 0.7014925373134329, + "acc_norm_stderr": 0.03235743789355043 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155257, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.558282208588957, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.558282208588957, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5709876543209876, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.5709876543209876, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.694300518134715, + "acc_stderr": 0.03324837939758159, + "acc_norm": 0.694300518134715, + "acc_norm_stderr": 0.03324837939758159 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.020707458164352984, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.020707458164352984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.028491993586171563, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.028491993586171563 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.020184583359102202, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.020184583359102202 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.014102223623152594, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.014102223623152594 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03025437257397669, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03025437257397669 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6835443037974683, + "acc_stderr": 0.03027497488021898, + "acc_norm": 0.6835443037974683, + "acc_norm_stderr": 0.03027497488021898 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3728813559322034, + "acc_stderr": 0.012350630058333362, + "acc_norm": 0.3728813559322034, + "acc_norm_stderr": 0.012350630058333362 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.03843566993588718, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.03843566993588718 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766373, + "mc2": 0.42390975148946014, + "mc2_stderr": 0.015001490344964346 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46871310507674147, + "acc_stderr": 0.017156666859785456, + "acc_norm": 0.4923258559622196, + "acc_norm_stderr": 0.01718832921965428 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "SkyOrbis/SKY-Ko-Solar-10.7B-lora", + "model_sha": "e75e3c33b7385c513686435c497f05121fec370a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/StatPan/SinGung7B-DPO-v0.1-12600c/result_2024-01-06 08:57:13.json b/StatPan/SinGung7B-DPO-v0.1-12600c/result_2024-01-06 08:57:13.json new file mode 100644 index 0000000000000000000000000000000000000000..361d1092ac6a941a0a5f53dfb0f94aad283e9959 --- /dev/null +++ b/StatPan/SinGung7B-DPO-v0.1-12600c/result_2024-01-06 08:57:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3651877133105802, + "acc_stderr": 0.0140702655192688, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398322 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37422824138617805, + "acc_stderr": 0.00482933992638833, + "acc_norm": 0.477096195976897, + "acc_norm_stderr": 0.004984543540932338 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4559386973180077, + "acc_stderr": 0.017810403925435363, + "acc_norm": 0.4559386973180077, + "acc_norm_stderr": 0.017810403925435363 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357762, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357762 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.02804339985821063, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.02804339985821063 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255099, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255099 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424385, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424385 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.0352406895156745, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.0352406895156745 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.033959703819985726, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.033959703819985726 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.02948036054954119, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.02948036054954119 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.024870815251057103, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.024870815251057103 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.026756255129663762, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.026756255129663762 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.02723741509459247, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.02723741509459247 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442205, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442205 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579861, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579861 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44220183486238535, + "acc_stderr": 0.021293613207520205, + "acc_norm": 0.44220183486238535, + "acc_norm_stderr": 0.021293613207520205 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.028180596328259293, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.028180596328259293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483184, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483184 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.019659922493623336, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.019659922493623336 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010085, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010085 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335303, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335303 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42448979591836733, + "acc_stderr": 0.03164209487942941, + "acc_norm": 0.42448979591836733, + "acc_norm_stderr": 0.03164209487942941 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090448, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090448 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3005215123859192, + "acc_stderr": 0.011709918883039124, + "acc_norm": 0.3005215123859192, + "acc_norm_stderr": 0.011709918883039124 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03471157907953425, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03471157907953425 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398396, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398396 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.5146617602010907, + "mc2_stderr": 0.0165702140232135 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4179456906729634, + "acc_stderr": 0.016957292005279723, + "acc_norm": 0.4592680047225502, + "acc_norm_stderr": 0.017133218276537677 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "StatPan/SinGung7B-DPO-v0.1-12600c", + "model_sha": "ce6e4ccd22d2ad3904ec1486456c22d5c4edb088", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/StatPan/all-you-need-is/result_2024-01-04 01:51:48.json b/StatPan/all-you-need-is/result_2024-01-04 01:51:48.json new file mode 100644 index 0000000000000000000000000000000000000000..4944c9cc2d26d6c929d6892cd37201ab0f1625fc --- /dev/null +++ b/StatPan/all-you-need-is/result_2024-01-04 01:51:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.431740614334471, + "acc_stderr": 0.014474591427196206, + "acc_norm": 0.48976109215017066, + "acc_norm_stderr": 0.01460832690628502 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4331806413065126, + "acc_stderr": 0.004945023657032277, + "acc_norm": 0.5859390559649472, + "acc_norm_stderr": 0.004915524600627963 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5964912280701754, + "acc_stderr": 0.03762738699917057, + "acc_norm": 0.5964912280701754, + "acc_norm_stderr": 0.03762738699917057 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.0458212416016155, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.0458212416016155 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6538952745849298, + "acc_stderr": 0.01701196526641208, + "acc_norm": 0.6538952745849298, + "acc_norm_stderr": 0.01701196526641208 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5063829787234042, + "acc_stderr": 0.03268335899936335, + "acc_norm": 0.5063829787234042, + "acc_norm_stderr": 0.03268335899936335 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.0389136449583582, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.0389136449583582 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5980707395498392, + "acc_stderr": 0.027846476005930473, + "acc_norm": 0.5980707395498392, + "acc_norm_stderr": 0.027846476005930473 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5695067264573991, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.5695067264573991, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.042258754519696386, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.042258754519696386 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7373737373737373, + "acc_stderr": 0.03135305009533086, + "acc_norm": 0.7373737373737373, + "acc_norm_stderr": 0.03135305009533086 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6638655462184874, + "acc_stderr": 0.03068473711513536, + "acc_norm": 0.6638655462184874, + "acc_norm_stderr": 0.03068473711513536 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5666666666666667, + "acc_stderr": 0.0251246535258851, + "acc_norm": 0.5666666666666667, + "acc_norm_stderr": 0.0251246535258851 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6064516129032258, + "acc_stderr": 0.027791878753132264, + "acc_norm": 0.6064516129032258, + "acc_norm_stderr": 0.027791878753132264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8247863247863247, + "acc_stderr": 0.024904439098918214, + "acc_norm": 0.8247863247863247, + "acc_norm_stderr": 0.024904439098918214 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5433962264150943, + "acc_stderr": 0.030656748696739435, + "acc_norm": 0.5433962264150943, + "acc_norm_stderr": 0.030656748696739435 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681682, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681682 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43915343915343913, + "acc_stderr": 0.02555992055053101, + "acc_norm": 0.43915343915343913, + "acc_norm_stderr": 0.02555992055053101 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.615606936416185, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.615606936416185, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5802469135802469, + "acc_stderr": 0.02746009955700513, + "acc_norm": 0.5802469135802469, + "acc_norm_stderr": 0.02746009955700513 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7098445595854922, + "acc_stderr": 0.03275264467791516, + "acc_norm": 0.7098445595854922, + "acc_norm_stderr": 0.03275264467791516 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.5350877192982456, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.5350877192982456, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6954128440366972, + "acc_stderr": 0.019732299420354038, + "acc_norm": 0.6954128440366972, + "acc_norm_stderr": 0.019732299420354038 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.044359328928514664, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.044359328928514664 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5620915032679739, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.5620915032679739, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7603305785123967, + "acc_stderr": 0.03896878985070417, + "acc_norm": 0.7603305785123967, + "acc_norm_stderr": 0.03896878985070417 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5921052631578947, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.5921052631578947, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.020200164564804585, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.020200164564804585 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4148936170212766, + "acc_stderr": 0.029392236584612496, + "acc_norm": 0.4148936170212766, + "acc_norm_stderr": 0.029392236584612496 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219589, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219589 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03362277436608044, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03362277436608044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331152, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331152 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03032024326500413, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03032024326500413 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556165, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7721518987341772, + "acc_stderr": 0.027303484599069422, + "acc_norm": 0.7721518987341772, + "acc_norm_stderr": 0.027303484599069422 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41590612777053454, + "acc_stderr": 0.012588323850313604, + "acc_norm": 0.41590612777053454, + "acc_norm_stderr": 0.012588323850313604 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.032834720561085606, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.032834720561085606 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.03588624800091709, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.03588624800091709 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3818849449204406, + "mc1_stderr": 0.0170081019391635, + "mc2": 0.5429614095843092, + "mc2_stderr": 0.015630544483169008 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6115702479338843, + "acc_stderr": 0.01675692157106942, + "acc_norm": 0.6304604486422668, + "acc_norm_stderr": 0.01659488340568543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "StatPan/all-you-need-is", + "model_sha": "56dd7571c69019e915bf81bfc69725de6a23ceb1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/StatPan/mistral7b-bartending-recipe-v1/result_2023-12-29 07:50:46.json b/StatPan/mistral7b-bartending-recipe-v1/result_2023-12-29 07:50:46.json new file mode 100644 index 0000000000000000000000000000000000000000..8fa827b48d613b3671564dca2387ddfadced4493 --- /dev/null +++ b/StatPan/mistral7b-bartending-recipe-v1/result_2023-12-29 07:50:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4249146757679181, + "acc_stderr": 0.014445698968520769, + "acc_norm": 0.5, + "acc_norm_stderr": 0.014611390804670088 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41724756024696275, + "acc_stderr": 0.004920967192255291, + "acc_norm": 0.5492929695279825, + "acc_norm_stderr": 0.004965473894646782 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.561941251596424, + "acc_stderr": 0.017742232238257258, + "acc_norm": 0.561941251596424, + "acc_norm_stderr": 0.017742232238257258 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5434083601286174, + "acc_stderr": 0.0282908690541976, + "acc_norm": 0.5434083601286174, + "acc_norm_stderr": 0.0282908690541976 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.034373055019806184, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.034373055019806184 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364397, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364397 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.025317649726448677, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.025317649726448677 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.535483870967742, + "acc_stderr": 0.02837228779796294, + "acc_norm": 0.535483870967742, + "acc_norm_stderr": 0.02837228779796294 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.028447965476231022, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.028447965476231022 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808086, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808086 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616255, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137602, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.569364161849711, + "acc_stderr": 0.02665880027367238, + "acc_norm": 0.569364161849711, + "acc_norm_stderr": 0.02665880027367238 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5, + "acc_stderr": 0.02782074420373286, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02782074420373286 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5889908256880734, + "acc_stderr": 0.021095050687277656, + "acc_norm": 0.5889908256880734, + "acc_norm_stderr": 0.021095050687277656 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309172, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309172 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.02006287424353913, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.02006287424353913 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.02872386385328128, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.02872386385328128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5, + "acc_stderr": 0.04745789978762494, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04745789978762494 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03372343271653062, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03372343271653062 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103986, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103986 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.030862144921087565, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.030862144921087565 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6919831223628692, + "acc_stderr": 0.03005238933560569, + "acc_norm": 0.6919831223628692, + "acc_norm_stderr": 0.03005238933560569 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3546284224250326, + "acc_stderr": 0.01221857643909016, + "acc_norm": 0.3546284224250326, + "acc_norm_stderr": 0.01221857643909016 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3378212974296206, + "mc1_stderr": 0.016557167322516896, + "mc2": 0.5219156106272662, + "mc2_stderr": 0.015613264148505234 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45218417945690675, + "acc_stderr": 0.017111567130916785, + "acc_norm": 0.46162927981109797, + "acc_norm_stderr": 0.01713966022184556 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "StatPan/mistral7b-bartending-recipe-v1", + "model_sha": "5fc07b540bbec555260205e3a9005f55806703da", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/StatPan/singung-dpo-v0.1-2200/result_2023-12-26 13:29:32.json b/StatPan/singung-dpo-v0.1-2200/result_2023-12-26 13:29:32.json new file mode 100644 index 0000000000000000000000000000000000000000..85b78d95bc833a80e0fe244fbd65c168626d54ec --- /dev/null +++ b/StatPan/singung-dpo-v0.1-2200/result_2023-12-26 13:29:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3703071672354949, + "acc_stderr": 0.014111298751674948, + "acc_norm": 0.41552901023890787, + "acc_norm_stderr": 0.0144013666412164 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37950607448715395, + "acc_stderr": 0.004842723234022034, + "acc_norm": 0.48207528380800635, + "acc_norm_stderr": 0.004986573992451693 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.03815827365913236, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.03815827365913236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4521072796934866, + "acc_stderr": 0.017797751493865623, + "acc_norm": 0.4521072796934866, + "acc_norm_stderr": 0.017797751493865623 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.03097669299853443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.03097669299853443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.028173917761762878, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.028173917761762878 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.37404580152671757, + "acc_stderr": 0.042438692422305246, + "acc_norm": 0.37404580152671757, + "acc_norm_stderr": 0.042438692422305246 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.035534363688280626, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.035534363688280626 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.040824829046386284, + "acc_norm": 0.4, + "acc_norm_stderr": 0.040824829046386284 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.13725490196078433, + "acc_stderr": 0.03424084669891521, + "acc_norm": 0.13725490196078433, + "acc_norm_stderr": 0.03424084669891521 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.025069094387296546, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.025069094387296546 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347357, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347357 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.02999695185834948, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.02999695185834948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556538, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556538 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083015, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083015 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.03445406271987053, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.03445406271987053 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4925373134328358, + "acc_stderr": 0.035351400842767194, + "acc_norm": 0.4925373134328358, + "acc_norm_stderr": 0.035351400842767194 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.02450877752102841, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.02450877752102841 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.027402042040269952, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.027402042040269952 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.02137049460999509, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.02137049460999509 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617157, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.039531733777491945, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.039531733777491945 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.019576953122088844, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.019576953122088844 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03141554629402544, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03141554629402544 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468648, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468648 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.39, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32529335071707954, + "acc_stderr": 0.01196531153657153, + "acc_norm": 0.32529335071707954, + "acc_norm_stderr": 0.01196531153657153 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237258, + "mc2": 0.45912122204156075, + "mc2_stderr": 0.016449709945328097 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.448642266824085, + "acc_stderr": 0.017099430514725785, + "acc_norm": 0.4970484061393152, + "acc_norm_stderr": 0.017190054580194694 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "StatPan/singung-dpo-v0.1-2200", + "model_sha": "cb02b9bf247ef8597485d49647c8d91675609fa2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/SuperPowerMz/SON_Mistral-7B-QLoRA-Peft/result_2024-04-17 02:06:08.json b/SuperPowerMz/SON_Mistral-7B-QLoRA-Peft/result_2024-04-17 02:06:08.json new file mode 100644 index 0000000000000000000000000000000000000000..37357bbbbed30188e41e11f7d6cb3a46f796f8f3 --- /dev/null +++ b/SuperPowerMz/SON_Mistral-7B-QLoRA-Peft/result_2024-04-17 02:06:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28242320819112626, + "acc_stderr": 0.01315545688409722, + "acc_norm": 0.33276450511945393, + "acc_norm_stderr": 0.013769863046192309 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3316072495518821, + "acc_stderr": 0.004698285350019223, + "acc_norm": 0.41724756024696275, + "acc_norm_stderr": 0.0049209671922553 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.388250319284802, + "acc_stderr": 0.017427673295544354, + "acc_norm": 0.388250319284802, + "acc_norm_stderr": 0.017427673295544354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.02812534098397271, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.02812534098397271 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03547601494006936, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03547601494006936 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.02483881198803316, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02483881198803316 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36129032258064514, + "acc_stderr": 0.02732754844795754, + "acc_norm": 0.36129032258064514, + "acc_norm_stderr": 0.02732754844795754 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3471698113207547, + "acc_stderr": 0.029300101705549655, + "acc_norm": 0.3471698113207547, + "acc_norm_stderr": 0.029300101705549655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762613, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762613 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.03814269893261837, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.03814269893261837 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.02723741509459247, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.02723741509459247 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44220183486238535, + "acc_stderr": 0.02129361320752021, + "acc_norm": 0.44220183486238535, + "acc_norm_stderr": 0.02129361320752021 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.042857142857142816, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.042857142857142816 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35947712418300654, + "acc_stderr": 0.019412539242032165, + "acc_norm": 0.35947712418300654, + "acc_norm_stderr": 0.019412539242032165 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2860335195530726, + "acc_stderr": 0.015113972129062132, + "acc_norm": 0.2860335195530726, + "acc_norm_stderr": 0.015113972129062132 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877746, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.031362502409358915, + "acc_norm": 0.4, + "acc_norm_stderr": 0.031362502409358915 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4092827004219409, + "acc_stderr": 0.032007041833595914, + "acc_norm": 0.4092827004219409, + "acc_norm_stderr": 0.032007041833595914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29595827900912647, + "acc_stderr": 0.011658518525277047, + "acc_norm": 0.29595827900912647, + "acc_norm_stderr": 0.011658518525277047 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.015201522246299953, + "mc2": 0.4160580138975093, + "mc2_stderr": 0.015283612333533092 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3695395513577332, + "acc_stderr": 0.01659488340568542, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.01701984753597221 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "SuperPowerMz/SON_Mistral-7B-QLoRA-Peft", + "model_sha": "f5332637596dc9b925087cfb5cbfda05a6b58b85", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Surromind/Solar_v0.1/result_2024-03-28 23:58:36.json b/Surromind/Solar_v0.1/result_2024-03-28 23:58:36.json new file mode 100644 index 0000000000000000000000000000000000000000..f16866d684e77fcf52a6b3f3444db3b1956107c4 --- /dev/null +++ b/Surromind/Solar_v0.1/result_2024-03-28 23:58:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44368600682593856, + "acc_stderr": 0.014518421825670444, + "acc_norm": 0.507679180887372, + "acc_norm_stderr": 0.014609667440892577 + }, + "harness|ko_hellaswag|10": { + "acc": 0.434973112925712, + "acc_stderr": 0.004947402907996247, + "acc_norm": 0.5925114519020116, + "acc_norm_stderr": 0.004903628887264533 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036155076303109344, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036155076303109344 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7669902912621359, + "acc_stderr": 0.041858325989283164, + "acc_norm": 0.7669902912621359, + "acc_norm_stderr": 0.041858325989283164 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7292464878671775, + "acc_stderr": 0.01588988836256049, + "acc_norm": 0.7292464878671775, + "acc_norm_stderr": 0.01588988836256049 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.03260038511835772, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.03260038511835772 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5852090032154341, + "acc_stderr": 0.027982680459759563, + "acc_norm": 0.5852090032154341, + "acc_norm_stderr": 0.027982680459759563 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6098654708520179, + "acc_stderr": 0.03273766725459156, + "acc_norm": 0.6098654708520179, + "acc_norm_stderr": 0.03273766725459156 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7626262626262627, + "acc_stderr": 0.030313710538198892, + "acc_norm": 0.7626262626262627, + "acc_norm_stderr": 0.030313710538198892 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.592436974789916, + "acc_stderr": 0.03191863374478467, + "acc_norm": 0.592436974789916, + "acc_norm_stderr": 0.03191863374478467 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6230769230769231, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.6230769230769231, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720685, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720685 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.034711928605184676, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.034711928605184676 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6419354838709678, + "acc_stderr": 0.02727389059430063, + "acc_norm": 0.6419354838709678, + "acc_norm_stderr": 0.02727389059430063 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.025140935950335445, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.025140935950335445 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670238, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670238 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0287420409039485, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0287420409039485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.038073017265045125, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.038073017265045125 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851105, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851105 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.74, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.026720034380514995, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.026720034380514995 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5950920245398773, + "acc_stderr": 0.038566721635489125, + "acc_norm": 0.5950920245398773, + "acc_norm_stderr": 0.038566721635489125 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5802469135802469, + "acc_stderr": 0.027460099557005135, + "acc_norm": 0.5802469135802469, + "acc_norm_stderr": 0.027460099557005135 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.032210245080411544, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.032210245080411544 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7137614678899082, + "acc_stderr": 0.01937943662891996, + "acc_norm": 0.7137614678899082, + "acc_norm_stderr": 0.01937943662891996 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.02827549015679146, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.02827549015679146 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5408496732026143, + "acc_stderr": 0.020160213617222516, + "acc_norm": 0.5408496732026143, + "acc_norm_stderr": 0.020160213617222516 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.028538650028878634, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.028538650028878634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22681564245810057, + "acc_stderr": 0.014005843570897882, + "acc_norm": 0.22681564245810057, + "acc_norm_stderr": 0.014005843570897882 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4632352941176471, + "acc_stderr": 0.03029061918048569, + "acc_norm": 0.4632352941176471, + "acc_norm_stderr": 0.03029061918048569 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.031987615467631264, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.031987615467631264 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38070404172099087, + "acc_stderr": 0.012401430654645882, + "acc_norm": 0.38070404172099087, + "acc_norm_stderr": 0.012401430654645882 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.03354092437591518, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.03354092437591518 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842892, + "mc2": 0.4263566669862956, + "mc2_stderr": 0.014879291441628228 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5289256198347108, + "acc_stderr": 0.017161563949916345, + "acc_norm": 0.5489964580873672, + "acc_norm_stderr": 0.017107618859549346 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Surromind/Solar_v0.1", + "model_sha": "cf07c936b24f06ee31148e5dfc3b2f755b42c64a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Surromind/gemma-2b-v0.1/result_2024-03-29 02:10:12.json b/Surromind/gemma-2b-v0.1/result_2024-03-29 02:10:12.json new file mode 100644 index 0000000000000000000000000000000000000000..73a3634029a69fbd05f1e17441a5cb9a5b459c72 --- /dev/null +++ b/Surromind/gemma-2b-v0.1/result_2024-03-29 02:10:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25, + "acc_stderr": 0.012653835621466646, + "acc_norm": 0.30119453924914674, + "acc_norm_stderr": 0.013406741767847629 + }, + "harness|ko_hellaswag|10": { + "acc": 0.30860386377215693, + "acc_stderr": 0.004609731925736885, + "acc_norm": 0.3572993427604063, + "acc_norm_stderr": 0.0047822469311949965 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824564, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824564 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.34951456310679613, + "acc_stderr": 0.04721188506097172, + "acc_norm": 0.34951456310679613, + "acc_norm_stderr": 0.04721188506097172 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.34610472541507026, + "acc_stderr": 0.017011965266412066, + "acc_norm": 0.34610472541507026, + "acc_norm_stderr": 0.017011965266412066 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596239, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596239 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.026795422327893944, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.026795422327893944 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3452914798206278, + "acc_stderr": 0.031911001928357954, + "acc_norm": 0.3452914798206278, + "acc_norm_stderr": 0.031911001928357954 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.04142313771996664, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.04142313771996664 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.038552896163789485, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.038552896163789485 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.11764705882352941, + "acc_stderr": 0.03205907733144528, + "acc_norm": 0.11764705882352941, + "acc_norm_stderr": 0.03205907733144528 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.03086868260412163, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.03086868260412163 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28974358974358977, + "acc_stderr": 0.023000628243687943, + "acc_norm": 0.28974358974358977, + "acc_norm_stderr": 0.023000628243687943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.35161290322580646, + "acc_stderr": 0.027162537826948458, + "acc_norm": 0.35161290322580646, + "acc_norm_stderr": 0.027162537826948458 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5341880341880342, + "acc_stderr": 0.03267942734081228, + "acc_norm": 0.5341880341880342, + "acc_norm_stderr": 0.03267942734081228 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3433962264150943, + "acc_stderr": 0.029224526469124792, + "acc_norm": 0.3433962264150943, + "acc_norm_stderr": 0.029224526469124792 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3781094527363184, + "acc_stderr": 0.034288678487786564, + "acc_norm": 0.3781094527363184, + "acc_norm_stderr": 0.034288678487786564 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.03456425745087, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.03456425745087 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655795, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655795 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2976878612716763, + "acc_stderr": 0.024617055388677003, + "acc_norm": 0.2976878612716763, + "acc_norm_stderr": 0.024617055388677003 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.03731133519673894, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.03731133519673894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02540719779889016, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02540719779889016 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.033248379397581594, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.033248379397581594 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3743119266055046, + "acc_stderr": 0.020748959408988306, + "acc_norm": 0.3743119266055046, + "acc_norm_stderr": 0.020748959408988306 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.039701582732351734, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.039701582732351734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.02758281141515961, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.02758281141515961 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.26973684210526316, + "acc_stderr": 0.036117805602848975, + "acc_norm": 0.26973684210526316, + "acc_norm_stderr": 0.036117805602848975 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.018771683893528176, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.018771683893528176 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02728160834446941, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02728160834446941 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.046355501356099754, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.046355501356099754 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791026, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791026 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.027033041151681456, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.027033041151681456 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2816326530612245, + "acc_stderr": 0.028795185574291282, + "acc_norm": 0.2816326530612245, + "acc_norm_stderr": 0.028795185574291282 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.38396624472573837, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.38396624472573837, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29595827900912647, + "acc_stderr": 0.011658518525277039, + "acc_norm": 0.29595827900912647, + "acc_norm_stderr": 0.011658518525277039 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03308611113236434, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03308611113236434 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.038517163193983954, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.038517163193983954 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253602, + "mc2": 0.42603644196671103, + "mc2_stderr": 0.015812506803842018 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.12750885478158205, + "acc_stderr": 0.011467414350410928, + "acc_norm": 0.22668240850059032, + "acc_norm_stderr": 0.014394701800505892 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Surromind/gemma-2b-v0.1", + "model_sha": "3f9bdfea5688f36d91d07a991741875333b0f338", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/T3Q-LLM/T3Q-LLM-sft1.0-dpo1.0/result_2024-04-17 12:40:55.json b/T3Q-LLM/T3Q-LLM-sft1.0-dpo1.0/result_2024-04-17 12:40:55.json new file mode 100644 index 0000000000000000000000000000000000000000..ada01594a723d4132d9635aeaad10aa9ea3fd63e --- /dev/null +++ b/T3Q-LLM/T3Q-LLM-sft1.0-dpo1.0/result_2024-04-17 12:40:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5204778156996587, + "acc_stderr": 0.014599131353034998, + "acc_norm": 0.5631399317406144, + "acc_norm_stderr": 0.01449442158425652 + }, + "harness|ko_hellaswag|10": { + "acc": 0.49880501892053375, + "acc_stderr": 0.00498976716081135, + "acc_norm": 0.6731726747659829, + "acc_norm_stderr": 0.004680949283855316 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6842105263157895, + "acc_stderr": 0.035650796707083106, + "acc_norm": 0.6842105263157895, + "acc_norm_stderr": 0.035650796707083106 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280041, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280041 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7394636015325671, + "acc_stderr": 0.015696008563807123, + "acc_norm": 0.7394636015325671, + "acc_norm_stderr": 0.015696008563807123 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.04319223625811331, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.04319223625811331 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5319148936170213, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.5319148936170213, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.038913644958358175, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.038913644958358175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6077170418006431, + "acc_stderr": 0.027731258647011998, + "acc_norm": 0.6077170418006431, + "acc_norm_stderr": 0.027731258647011998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6367713004484304, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.6367713004484304, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7676767676767676, + "acc_stderr": 0.030088629490217487, + "acc_norm": 0.7676767676767676, + "acc_norm_stderr": 0.030088629490217487 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.031631458075523776, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.031631458075523776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5615384615384615, + "acc_stderr": 0.025158266016868606, + "acc_norm": 0.5615384615384615, + "acc_norm_stderr": 0.025158266016868606 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.635483870967742, + "acc_stderr": 0.02737987122994324, + "acc_norm": 0.635483870967742, + "acc_norm_stderr": 0.02737987122994324 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5773584905660377, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.5773584905660377, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131143, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131143 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3941798941798942, + "acc_stderr": 0.025167982333894143, + "acc_norm": 0.3941798941798942, + "acc_norm_stderr": 0.025167982333894143 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5763888888888888, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.5763888888888888, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5895953757225434, + "acc_stderr": 0.026483392042098177, + "acc_norm": 0.5895953757225434, + "acc_norm_stderr": 0.026483392042098177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6073619631901841, + "acc_stderr": 0.0383674090783103, + "acc_norm": 0.6073619631901841, + "acc_norm_stderr": 0.0383674090783103 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6141975308641975, + "acc_stderr": 0.027085401226132146, + "acc_norm": 0.6141975308641975, + "acc_norm_stderr": 0.027085401226132146 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7875647668393783, + "acc_stderr": 0.02951928261681723, + "acc_norm": 0.7875647668393783, + "acc_norm_stderr": 0.02951928261681723 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.046774730044912, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.046774730044912 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7321100917431193, + "acc_stderr": 0.01898746225797865, + "acc_norm": 0.7321100917431193, + "acc_norm_stderr": 0.01898746225797865 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5816993464052288, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.5816993464052288, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5375816993464052, + "acc_stderr": 0.020170614974969765, + "acc_norm": 0.5375816993464052, + "acc_norm_stderr": 0.020170614974969765 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.02949482760014437, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.02949482760014437 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3005586592178771, + "acc_stderr": 0.015334566806251159, + "acc_norm": 0.3005586592178771, + "acc_norm_stderr": 0.015334566806251159 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5404411764705882, + "acc_stderr": 0.03027332507734576, + "acc_norm": 0.5404411764705882, + "acc_norm_stderr": 0.03027332507734576 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6408163265306123, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.6408163265306123, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7721518987341772, + "acc_stderr": 0.02730348459906942, + "acc_norm": 0.7721518987341772, + "acc_norm_stderr": 0.02730348459906942 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4041720990873533, + "acc_stderr": 0.012533504046491367, + "acc_norm": 0.4041720990873533, + "acc_norm_stderr": 0.012533504046491367 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7549019607843137, + "acc_stderr": 0.03019028245350195, + "acc_norm": 0.7549019607843137, + "acc_norm_stderr": 0.03019028245350195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7393939393939394, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.7393939393939394, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3843329253365973, + "mc1_stderr": 0.017028707301245196, + "mc2": 0.5419514957059137, + "mc2_stderr": 0.01593235153874721 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6340023612750886, + "acc_stderr": 0.016561489664895703, + "acc_norm": 0.6399055489964581, + "acc_norm_stderr": 0.016503686720440076 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "T3Q-LLM/T3Q-LLM-sft1.0-dpo1.0", + "model_sha": "9b553f5547f7208f7c1ca5fdb9b50b332edbf945", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/T3Q-LLM/T3Q-LLM-solar10.8-sft-v1.0/result_2024-04-16 10:11:27.json b/T3Q-LLM/T3Q-LLM-solar10.8-sft-v1.0/result_2024-04-16 10:11:27.json new file mode 100644 index 0000000000000000000000000000000000000000..9e49f818a98d46f1977e8271f1d98d41f3be0b86 --- /dev/null +++ b/T3Q-LLM/T3Q-LLM-solar10.8-sft-v1.0/result_2024-04-16 10:11:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4872013651877133, + "acc_stderr": 0.014606603181012544, + "acc_norm": 0.5409556313993175, + "acc_norm_stderr": 0.01456229107360123 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4826727743477395, + "acc_stderr": 0.004986784319771786, + "acc_norm": 0.6543517227643896, + "acc_norm_stderr": 0.00474607219107258 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7420178799489144, + "acc_stderr": 0.01564583018834895, + "acc_norm": 0.7420178799489144, + "acc_norm_stderr": 0.01564583018834895 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.548936170212766, + "acc_stderr": 0.03252909619613197, + "acc_norm": 0.548936170212766, + "acc_norm_stderr": 0.03252909619613197 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6077170418006431, + "acc_stderr": 0.027731258647012005, + "acc_norm": 0.6077170418006431, + "acc_norm_stderr": 0.027731258647012005 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6053811659192825, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.6053811659192825, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7474747474747475, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.7474747474747475, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6050420168067226, + "acc_stderr": 0.031753678460966245, + "acc_norm": 0.6050420168067226, + "acc_norm_stderr": 0.031753678460966245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5564102564102564, + "acc_stderr": 0.025189149894764208, + "acc_norm": 0.5564102564102564, + "acc_norm_stderr": 0.025189149894764208 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575494, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575494 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6258064516129033, + "acc_stderr": 0.027528904299845704, + "acc_norm": 0.6258064516129033, + "acc_norm_stderr": 0.027528904299845704 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.02514093595033544, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.02514093595033544 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.030437794342983052, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.030437794342983052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.032200241045342054, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.032200241045342054 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02510742548113727, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02510742548113727 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5763888888888888, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.5763888888888888, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5809248554913294, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.5809248554913294, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6012269938650306, + "acc_stderr": 0.03847021420456022, + "acc_norm": 0.6012269938650306, + "acc_norm_stderr": 0.03847021420456022 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.02733954664066274, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.02733954664066274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7772020725388601, + "acc_stderr": 0.03003114797764154, + "acc_norm": 0.7772020725388601, + "acc_norm_stderr": 0.03003114797764154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7321100917431193, + "acc_stderr": 0.01898746225797865, + "acc_norm": 0.7321100917431193, + "acc_norm_stderr": 0.01898746225797865 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5915032679738562, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.5915032679738562, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6052631578947368, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.6052631578947368, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5310457516339869, + "acc_stderr": 0.020188804456361887, + "acc_norm": 0.5310457516339869, + "acc_norm_stderr": 0.020188804456361887 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596154, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596154 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875192, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875192 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.17318435754189945, + "acc_stderr": 0.012655809068644823, + "acc_norm": 0.17318435754189945, + "acc_norm_stderr": 0.012655809068644823 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5257352941176471, + "acc_stderr": 0.030332578094555026, + "acc_norm": 0.5257352941176471, + "acc_norm_stderr": 0.030332578094555026 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030802, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030802 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7763713080168776, + "acc_stderr": 0.027123298205229962, + "acc_norm": 0.7763713080168776, + "acc_norm_stderr": 0.027123298205229962 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41590612777053454, + "acc_stderr": 0.012588323850313596, + "acc_norm": 0.41590612777053454, + "acc_norm_stderr": 0.012588323850313596 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.75, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.75, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7454545454545455, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.7454545454545455, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33414932680538556, + "mc1_stderr": 0.01651253067715052, + "mc2": 0.4886051214791807, + "mc2_stderr": 0.0156636395522276 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6481700118063755, + "acc_stderr": 0.016418206451218054, + "acc_norm": 0.6564344746162928, + "acc_norm_stderr": 0.016327334806429134 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "T3Q-LLM/T3Q-LLM-solar10.8-sft-v1.0", + "model_sha": "e9ad89a994e794eb0af2d3a0a5f5cb723a4bcb0c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/T3Q-LLM/T3Q-LLM1-CV-v1.0/result_2024-05-07 12:24:30.json b/T3Q-LLM/T3Q-LLM1-CV-v1.0/result_2024-05-07 12:24:30.json new file mode 100644 index 0000000000000000000000000000000000000000..44fd1c9701580ad6ad5ddd4478599a3d793c015f --- /dev/null +++ b/T3Q-LLM/T3Q-LLM1-CV-v1.0/result_2024-05-07 12:24:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4462457337883959, + "acc_stderr": 0.014526705548539985, + "acc_norm": 0.5136518771331058, + "acc_norm_stderr": 0.01460594342986095 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44652459669388567, + "acc_stderr": 0.0049611615892284164, + "acc_norm": 0.6050587532364071, + "acc_norm_stderr": 0.004878390226591714 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6257309941520468, + "acc_stderr": 0.03711601185389482, + "acc_norm": 0.6257309941520468, + "acc_norm_stderr": 0.03711601185389482 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280041, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280041 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6807151979565773, + "acc_stderr": 0.016671261749538743, + "acc_norm": 0.6807151979565773, + "acc_norm_stderr": 0.016671261749538743 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.042667634040995814, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.042667634040995814 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467383, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467383 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5659163987138264, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.5659163987138264, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.57847533632287, + "acc_stderr": 0.03314190222110657, + "acc_norm": 0.57847533632287, + "acc_norm_stderr": 0.03314190222110657 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993176, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993176 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5630252100840336, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.5630252100840336, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.025317649726448673, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.025317649726448673 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5741935483870968, + "acc_stderr": 0.028129112709165904, + "acc_norm": 0.5741935483870968, + "acc_norm_stderr": 0.028129112709165904 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392926, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392926 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556538, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556538 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.02522545028406788, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.02522545028406788 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5208333333333334, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.5208333333333334, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5524691358024691, + "acc_stderr": 0.027667138569422708, + "acc_norm": 0.5524691358024691, + "acc_norm_stderr": 0.027667138569422708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7409326424870466, + "acc_stderr": 0.031618779179354094, + "acc_norm": 0.7409326424870466, + "acc_norm_stderr": 0.031618779179354094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366597, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366597 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6880733944954128, + "acc_stderr": 0.01986296797670724, + "acc_norm": 0.6880733944954128, + "acc_norm_stderr": 0.01986296797670724 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.02849199358617156, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.02849199358617156 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884121, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884121 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.48856209150326796, + "acc_stderr": 0.020222541515610874, + "acc_norm": 0.48856209150326796, + "acc_norm_stderr": 0.020222541515610874 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19776536312849163, + "acc_stderr": 0.013321620594050948, + "acc_norm": 0.19776536312849163, + "acc_norm_stderr": 0.013321620594050948 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5073529411764706, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.5073529411764706, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7341772151898734, + "acc_stderr": 0.028756799629658335, + "acc_norm": 0.7341772151898734, + "acc_norm_stderr": 0.028756799629658335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38722294654498046, + "acc_stderr": 0.012441155326854933, + "acc_norm": 0.38722294654498046, + "acc_norm_stderr": 0.012441155326854933 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.03588624800091708, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.03588624800091708 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3671970624235006, + "mc1_stderr": 0.01687480500145318, + "mc2": 0.5519207261004673, + "mc2_stderr": 0.01619848088035427 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5808736717827627, + "acc_stderr": 0.016963995010862796, + "acc_norm": 0.5997638724911453, + "acc_norm_stderr": 0.01684469351050504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "T3Q-LLM/T3Q-LLM1-CV-v1.0", + "model_sha": "523fab4d827e2f74acb7d809806a104fda8a325d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/T3Q-LLM/T3Q-LLM1-CV-v2.0/result_2024-05-09 22:59:00.json b/T3Q-LLM/T3Q-LLM1-CV-v2.0/result_2024-05-09 22:59:00.json new file mode 100644 index 0000000000000000000000000000000000000000..dd6d8442ed20ac0fa5eed86e4ca55f4292637068 --- /dev/null +++ b/T3Q-LLM/T3Q-LLM1-CV-v2.0/result_2024-05-09 22:59:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5435153583617748, + "acc_stderr": 0.014555949760496435, + "acc_norm": 0.6083617747440273, + "acc_norm_stderr": 0.01426412212493822 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5240987851025692, + "acc_stderr": 0.004983982396187368, + "acc_norm": 0.6967735510854411, + "acc_norm_stderr": 0.004587128273935065 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6549707602339181, + "acc_stderr": 0.03645981377388806, + "acc_norm": 0.6549707602339181, + "acc_norm_stderr": 0.03645981377388806 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280041, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280041 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7075351213282248, + "acc_stderr": 0.016267000684598652, + "acc_norm": 0.7075351213282248, + "acc_norm_stderr": 0.016267000684598652 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5063829787234042, + "acc_stderr": 0.032683358999363345, + "acc_norm": 0.5063829787234042, + "acc_norm_stderr": 0.032683358999363345 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.038743715565879536, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.038743715565879536 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5980707395498392, + "acc_stderr": 0.027846476005930477, + "acc_norm": 0.5980707395498392, + "acc_norm_stderr": 0.027846476005930477 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262971, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262971 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7727272727272727, + "acc_stderr": 0.029857515673386414, + "acc_norm": 0.7727272727272727, + "acc_norm_stderr": 0.029857515673386414 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.031631458075523776, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.031631458075523776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5820512820512821, + "acc_stderr": 0.025007329882461207, + "acc_norm": 0.5820512820512821, + "acc_norm_stderr": 0.025007329882461207 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6161290322580645, + "acc_stderr": 0.027666182075539635, + "acc_norm": 0.6161290322580645, + "acc_norm_stderr": 0.027666182075539635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.027046857630716667, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.027046857630716667 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5547169811320755, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.5547169811320755, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.038118909889404105, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.038118909889404105 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43915343915343913, + "acc_stderr": 0.025559920550531013, + "acc_norm": 0.43915343915343913, + "acc_norm_stderr": 0.025559920550531013 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5902777777777778, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.5902777777777778, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.569364161849711, + "acc_stderr": 0.026658800273672376, + "acc_norm": 0.569364161849711, + "acc_norm_stderr": 0.026658800273672376 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5950920245398773, + "acc_stderr": 0.038566721635489125, + "acc_norm": 0.5950920245398773, + "acc_norm_stderr": 0.038566721635489125 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5987654320987654, + "acc_stderr": 0.027272582849839796, + "acc_norm": 0.5987654320987654, + "acc_norm_stderr": 0.027272582849839796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.030748905363909878, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.030748905363909878 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366596, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7192660550458716, + "acc_stderr": 0.01926605504587161, + "acc_norm": 0.7192660550458716, + "acc_norm_stderr": 0.01926605504587161 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.028304576673141107, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.028304576673141107 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.625, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.625, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5179738562091504, + "acc_stderr": 0.020214761037872408, + "acc_norm": 0.5179738562091504, + "acc_norm_stderr": 0.020214761037872408 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.4424581005586592, + "acc_stderr": 0.01661139368726858, + "acc_norm": 0.4424581005586592, + "acc_norm_stderr": 0.01661139368726858 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5477941176470589, + "acc_stderr": 0.030233758551596438, + "acc_norm": 0.5477941176470589, + "acc_norm_stderr": 0.030233758551596438 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.03160106993449601, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.03160106993449601 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7805907172995781, + "acc_stderr": 0.026939106581553945, + "acc_norm": 0.7805907172995781, + "acc_norm_stderr": 0.026939106581553945 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4165580182529335, + "acc_stderr": 0.01259115324505739, + "acc_norm": 0.4165580182529335, + "acc_norm_stderr": 0.01259115324505739 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7009803921568627, + "acc_stderr": 0.03213325717373618, + "acc_norm": 0.7009803921568627, + "acc_norm_stderr": 0.03213325717373618 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4944920440636475, + "mc1_stderr": 0.01750243899045107, + "mc2": 0.6608137228678551, + "mc2_stderr": 0.015917590211927863 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6198347107438017, + "acc_stderr": 0.016689333596980098, + "acc_norm": 0.6257378984651711, + "acc_norm_stderr": 0.016637917789798742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "T3Q-LLM/T3Q-LLM1-CV-v2.0", + "model_sha": "6fb0adabdb4d74852287e8825dfe448cb0ee20c1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/T3Q-LLM/T3Q-LLM1-v2.0/result_2024-05-02 12:31:10.json b/T3Q-LLM/T3Q-LLM1-v2.0/result_2024-05-02 12:31:10.json new file mode 100644 index 0000000000000000000000000000000000000000..a231411a0ae0e0352f74dce57060b34c61136485 --- /dev/null +++ b/T3Q-LLM/T3Q-LLM1-v2.0/result_2024-05-02 12:31:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4462457337883959, + "acc_stderr": 0.014526705548539985, + "acc_norm": 0.5136518771331058, + "acc_norm_stderr": 0.01460594342986095 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44652459669388567, + "acc_stderr": 0.0049611615892284164, + "acc_norm": 0.6050587532364071, + "acc_norm_stderr": 0.004878390226591714 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6257309941520468, + "acc_stderr": 0.03711601185389482, + "acc_norm": 0.6257309941520468, + "acc_norm_stderr": 0.03711601185389482 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280041, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280041 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6807151979565773, + "acc_stderr": 0.016671261749538743, + "acc_norm": 0.6807151979565773, + "acc_norm_stderr": 0.016671261749538743 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.042667634040995814, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.042667634040995814 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467383, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467383 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5659163987138264, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.5659163987138264, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.57847533632287, + "acc_stderr": 0.03314190222110657, + "acc_norm": 0.57847533632287, + "acc_norm_stderr": 0.03314190222110657 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993176, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993176 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5630252100840336, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.5630252100840336, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.025317649726448673, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.025317649726448673 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5741935483870968, + "acc_stderr": 0.028129112709165904, + "acc_norm": 0.5741935483870968, + "acc_norm_stderr": 0.028129112709165904 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392926, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392926 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556538, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556538 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.02522545028406788, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.02522545028406788 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5208333333333334, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.5208333333333334, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5524691358024691, + "acc_stderr": 0.027667138569422708, + "acc_norm": 0.5524691358024691, + "acc_norm_stderr": 0.027667138569422708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7409326424870466, + "acc_stderr": 0.031618779179354094, + "acc_norm": 0.7409326424870466, + "acc_norm_stderr": 0.031618779179354094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366597, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366597 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6880733944954128, + "acc_stderr": 0.01986296797670724, + "acc_norm": 0.6880733944954128, + "acc_norm_stderr": 0.01986296797670724 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.02849199358617156, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.02849199358617156 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884121, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884121 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.48856209150326796, + "acc_stderr": 0.020222541515610874, + "acc_norm": 0.48856209150326796, + "acc_norm_stderr": 0.020222541515610874 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19776536312849163, + "acc_stderr": 0.013321620594050948, + "acc_norm": 0.19776536312849163, + "acc_norm_stderr": 0.013321620594050948 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5073529411764706, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.5073529411764706, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7341772151898734, + "acc_stderr": 0.028756799629658335, + "acc_norm": 0.7341772151898734, + "acc_norm_stderr": 0.028756799629658335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38722294654498046, + "acc_stderr": 0.012441155326854933, + "acc_norm": 0.38722294654498046, + "acc_norm_stderr": 0.012441155326854933 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.03588624800091708, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.03588624800091708 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3671970624235006, + "mc1_stderr": 0.01687480500145318, + "mc2": 0.5519207261004673, + "mc2_stderr": 0.01619848088035427 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5808736717827627, + "acc_stderr": 0.016963995010862796, + "acc_norm": 0.5997638724911453, + "acc_norm_stderr": 0.01684469351050504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "T3Q-LLM/T3Q-LLM1-v2.0", + "model_sha": "523fab4d827e2f74acb7d809806a104fda8a325d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/T3Q-LLM/T3Q-LLM2-FP-v1.0/result_2024-05-08 00:08:39.json b/T3Q-LLM/T3Q-LLM2-FP-v1.0/result_2024-05-08 00:08:39.json new file mode 100644 index 0000000000000000000000000000000000000000..74e83968a56ca0c7f17a73a0d2fea6c36b532367 --- /dev/null +++ b/T3Q-LLM/T3Q-LLM2-FP-v1.0/result_2024-05-08 00:08:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7107508532423208, + "acc_stderr": 0.013250012579393443, + "acc_norm": 0.7525597269624573, + "acc_norm_stderr": 0.01261035266329267 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6019717187811193, + "acc_stderr": 0.004884909544477079, + "acc_norm": 0.7401911969727146, + "acc_norm_stderr": 0.004376333451909809 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6608187134502924, + "acc_stderr": 0.03631053496488905, + "acc_norm": 0.6608187134502924, + "acc_norm_stderr": 0.03631053496488905 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.045821241601615506, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.045821241601615506 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6602809706257982, + "acc_stderr": 0.016936394114301624, + "acc_norm": 0.6602809706257982, + "acc_norm_stderr": 0.016936394114301624 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467383, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467383 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5787781350482315, + "acc_stderr": 0.028043399858210635, + "acc_norm": 0.5787781350482315, + "acc_norm_stderr": 0.028043399858210635 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6322869955156951, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.6322869955156951, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7121212121212122, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.7121212121212122, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5966386554621849, + "acc_stderr": 0.031866081214088314, + "acc_norm": 0.5966386554621849, + "acc_norm_stderr": 0.031866081214088314 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5923076923076923, + "acc_stderr": 0.024915243985987854, + "acc_norm": 0.5923076923076923, + "acc_norm_stderr": 0.024915243985987854 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883233, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883233 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.632258064516129, + "acc_stderr": 0.027430866579973467, + "acc_norm": 0.632258064516129, + "acc_norm_stderr": 0.027430866579973467 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.028120966503914394, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.028120966503914394 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.569811320754717, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.569811320754717, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.047245774057315726, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.047245774057315726 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.03203841040213322, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.03203841040213322 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4312169312169312, + "acc_stderr": 0.025506481698138208, + "acc_norm": 0.4312169312169312, + "acc_norm_stderr": 0.025506481698138208 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5347222222222222, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.5347222222222222, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.026636539741116072, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.026636539741116072 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.588957055214724, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.588957055214724, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6358024691358025, + "acc_stderr": 0.02677492989972234, + "acc_norm": 0.6358024691358025, + "acc_norm_stderr": 0.02677492989972234 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.772020725388601, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.772020725388601, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7045871559633028, + "acc_stderr": 0.019560619182976, + "acc_norm": 0.7045871559633028, + "acc_norm_stderr": 0.019560619182976 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.02827549015679145, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.02827549015679145 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.768595041322314, + "acc_stderr": 0.03849856098794088, + "acc_norm": 0.768595041322314, + "acc_norm_stderr": 0.03849856098794088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6513157894736842, + "acc_stderr": 0.03878139888797611, + "acc_norm": 0.6513157894736842, + "acc_norm_stderr": 0.03878139888797611 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5408496732026143, + "acc_stderr": 0.020160213617222516, + "acc_norm": 0.5408496732026143, + "acc_norm_stderr": 0.020160213617222516 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614098, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614098 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.32625698324022345, + "acc_stderr": 0.01568044151888918, + "acc_norm": 0.32625698324022345, + "acc_norm_stderr": 0.01568044151888918 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5477941176470589, + "acc_stderr": 0.03023375855159644, + "acc_norm": 0.5477941176470589, + "acc_norm_stderr": 0.03023375855159644 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6448979591836734, + "acc_stderr": 0.030635655150387638, + "acc_norm": 0.6448979591836734, + "acc_norm_stderr": 0.030635655150387638 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598025, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598025 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.42242503259452413, + "acc_stderr": 0.01261560047573493, + "acc_norm": 0.42242503259452413, + "acc_norm_stderr": 0.01261560047573493 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6519607843137255, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.6519607843137255, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6903304773561811, + "mc1_stderr": 0.016185744355144922, + "mc2": 0.7746340762004879, + "mc2_stderr": 0.013932769514766515 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4982290436835891, + "acc_stderr": 0.017190246276231863, + "acc_norm": 0.51357733175915, + "acc_norm_stderr": 0.01718401506040145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "T3Q-LLM/T3Q-LLM2-FP-v1.0", + "model_sha": "290d9ccce695529a194958ce348f9d4182f7fe81", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/T3Q-LLM/T3Q-LLM2-FP-v2.0/result_2024-05-12 04:44:25.json b/T3Q-LLM/T3Q-LLM2-FP-v2.0/result_2024-05-12 04:44:25.json new file mode 100644 index 0000000000000000000000000000000000000000..923a590c00e7014b474c474318ef5829a94fae0f --- /dev/null +++ b/T3Q-LLM/T3Q-LLM2-FP-v2.0/result_2024-05-12 04:44:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6860068259385665, + "acc_stderr": 0.013562691224726297, + "acc_norm": 0.7303754266211604, + "acc_norm_stderr": 0.012968040686869159 + }, + "harness|ko_hellaswag|10": { + "acc": 0.522405895239992, + "acc_stderr": 0.004984768912326951, + "acc_norm": 0.6799442342162916, + "acc_norm_stderr": 0.004655442766599439 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6845466155810983, + "acc_stderr": 0.0166175017387634, + "acc_norm": 0.6845466155810983, + "acc_norm_stderr": 0.0166175017387634 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5106382978723404, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.5106382978723404, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6302250803858521, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.6302250803858521, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6636771300448431, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.6636771300448431, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6128205128205129, + "acc_stderr": 0.024697216930878937, + "acc_norm": 0.6128205128205129, + "acc_norm_stderr": 0.024697216930878937 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978813, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978813 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.034653044884067945, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.034653044884067945 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.632258064516129, + "acc_stderr": 0.02743086657997347, + "acc_norm": 0.632258064516129, + "acc_norm_stderr": 0.02743086657997347 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.02624677294689048, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.02624677294689048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5773584905660377, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.5773584905660377, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131147, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131147 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935556, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935556 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4021164021164021, + "acc_stderr": 0.02525303255499769, + "acc_norm": 0.4021164021164021, + "acc_norm_stderr": 0.02525303255499769 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.76, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5924855491329479, + "acc_stderr": 0.026454578146931505, + "acc_norm": 0.5924855491329479, + "acc_norm_stderr": 0.026454578146931505 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6319018404907976, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.6319018404907976, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6450617283950617, + "acc_stderr": 0.026624152478845853, + "acc_norm": 0.6450617283950617, + "acc_norm_stderr": 0.026624152478845853 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.772020725388601, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.772020725388601, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7174311926605504, + "acc_stderr": 0.01930424349770715, + "acc_norm": 0.7174311926605504, + "acc_norm_stderr": 0.01930424349770715 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.027914055510468, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.027914055510468 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.8099173553719008, + "acc_stderr": 0.03581796951709282, + "acc_norm": 0.8099173553719008, + "acc_norm_stderr": 0.03581796951709282 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.618421052631579, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.618421052631579, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5506535947712419, + "acc_stderr": 0.020123766528027266, + "acc_norm": 0.5506535947712419, + "acc_norm_stderr": 0.020123766528027266 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347247, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347247 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30502793296089387, + "acc_stderr": 0.015398723510916715, + "acc_norm": 0.30502793296089387, + "acc_norm_stderr": 0.015398723510916715 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5514705882352942, + "acc_stderr": 0.030211479609121596, + "acc_norm": 0.5514705882352942, + "acc_norm_stderr": 0.030211479609121596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6938775510204082, + "acc_stderr": 0.02950489645459597, + "acc_norm": 0.6938775510204082, + "acc_norm_stderr": 0.02950489645459597 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7172995780590717, + "acc_stderr": 0.029312814153955934, + "acc_norm": 0.7172995780590717, + "acc_norm_stderr": 0.029312814153955934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4348109517601043, + "acc_stderr": 0.01266123380561627, + "acc_norm": 0.4348109517601043, + "acc_norm_stderr": 0.01266123380561627 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03308611113236437, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03308611113236437 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6181150550795593, + "mc1_stderr": 0.01700810193916349, + "mc2": 0.7245345695347405, + "mc2_stderr": 0.014413813713662276 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.51357733175915, + "acc_stderr": 0.01718401506040145, + "acc_norm": 0.5360094451003542, + "acc_norm_stderr": 0.017145715365486664 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "T3Q-LLM/T3Q-LLM2-FP-v2.0", + "model_sha": "fbd1c247b84b94689838632148824c6cba4c645d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/T3Q-LLM/T3Q-LLM3-NC-v1.0/result_2024-05-09 08:08:07.json b/T3Q-LLM/T3Q-LLM3-NC-v1.0/result_2024-05-09 08:08:07.json new file mode 100644 index 0000000000000000000000000000000000000000..397ca3284cbc924e568c05d58d0aa3d79d2086ed --- /dev/null +++ b/T3Q-LLM/T3Q-LLM3-NC-v1.0/result_2024-05-09 08:08:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3225255972696246, + "acc_stderr": 0.013659980894277371, + "acc_norm": 0.34982935153583616, + "acc_norm_stderr": 0.01393680921215828 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3381796454889464, + "acc_stderr": 0.004721231637092728, + "acc_norm": 0.4182433778131846, + "acc_norm_stderr": 0.004922624636945241 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.037439798259264016, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.037439798259264016 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.04498676320572924, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.04498676320572924 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3652618135376756, + "acc_stderr": 0.017218530028838636, + "acc_norm": 0.3652618135376756, + "acc_norm_stderr": 0.017218530028838636 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.02964400657700962, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.02964400657700962 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.03610805018031023, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.03610805018031023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3633440514469453, + "acc_stderr": 0.027316847674192717, + "acc_norm": 0.3633440514469453, + "acc_norm_stderr": 0.027316847674192717 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459156, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459156 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.031544498882702866, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.031544498882702866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.03996629574876718, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.03996629574876718 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3, + "acc_stderr": 0.023234581088428498, + "acc_norm": 0.3, + "acc_norm_stderr": 0.023234581088428498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.031447125816782405, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.031447125816782405 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.32903225806451614, + "acc_stderr": 0.026729499068349965, + "acc_norm": 0.32903225806451614, + "acc_norm_stderr": 0.026729499068349965 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.49145299145299143, + "acc_stderr": 0.032751303000970296, + "acc_norm": 0.49145299145299143, + "acc_norm_stderr": 0.032751303000970296 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3169811320754717, + "acc_stderr": 0.02863723563980092, + "acc_norm": 0.3169811320754717, + "acc_norm_stderr": 0.02863723563980092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910507, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910507 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766114, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.36318407960199006, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.36318407960199006, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776564, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776564 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.315028901734104, + "acc_stderr": 0.02500931379006971, + "acc_norm": 0.315028901734104, + "acc_norm_stderr": 0.02500931379006971 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724147, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724147 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.345679012345679, + "acc_stderr": 0.026462487777001893, + "acc_norm": 0.345679012345679, + "acc_norm_stderr": 0.026462487777001893 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32124352331606215, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.32124352331606215, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27889908256880735, + "acc_stderr": 0.01922746887646352, + "acc_norm": 0.27889908256880735, + "acc_norm_stderr": 0.01922746887646352 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011744, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.04507732278775094, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.04507732278775094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2236842105263158, + "acc_stderr": 0.03391160934343602, + "acc_norm": 0.2236842105263158, + "acc_norm_stderr": 0.03391160934343602 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.018492596536396955, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.018492596536396955 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022128, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.17592592592592593, + "acc_stderr": 0.025967420958258533, + "acc_norm": 0.17592592592592593, + "acc_norm_stderr": 0.025967420958258533 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23016759776536314, + "acc_stderr": 0.014078339253425809, + "acc_norm": 0.23016759776536314, + "acc_norm_stderr": 0.014078339253425809 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.024562204314142314, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.024562204314142314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.025991117672813292, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.025991117672813292 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4177215189873418, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.4177215189873418, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27183833116036504, + "acc_stderr": 0.011363135278651418, + "acc_norm": 0.27183833116036504, + "acc_norm_stderr": 0.011363135278651418 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923403, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923403 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.4632805233224405, + "mc2_stderr": 0.01565468325038931 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.282172373081464, + "acc_stderr": 0.01547327158398843, + "acc_norm": 0.3482880755608028, + "acc_norm_stderr": 0.016379926739148037 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "T3Q-LLM/T3Q-LLM3-NC-v1.0", + "model_sha": "6e7affbc1ca332e68bac7425eeff26363bfa914f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/TIGER-Lab/MAmmoTH2-7B-Plus/result_2024-05-13 17:04:51.json b/TIGER-Lab/MAmmoTH2-7B-Plus/result_2024-05-13 17:04:51.json new file mode 100644 index 0000000000000000000000000000000000000000..fb5c8213653a40fdd9b63ca8b3608b297202a749 --- /dev/null +++ b/TIGER-Lab/MAmmoTH2-7B-Plus/result_2024-05-13 17:04:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3148464163822526, + "acc_stderr": 0.01357265770308495, + "acc_norm": 0.378839590443686, + "acc_norm_stderr": 0.014175915490000319 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3392750448117905, + "acc_stderr": 0.004724956665879975, + "acc_norm": 0.40928101971718783, + "acc_norm_stderr": 0.004906962980328287 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273482, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273482 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44699872286079184, + "acc_stderr": 0.01777922523339421, + "acc_norm": 0.44699872286079184, + "acc_norm_stderr": 0.01777922523339421 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.02823776942208533, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.02823776942208533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.043285772152629715, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.043285772152629715 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.035534363688280626, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.035534363688280626 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.592436974789916, + "acc_stderr": 0.03191863374478465, + "acc_norm": 0.592436974789916, + "acc_norm_stderr": 0.03191863374478465 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.02512465352588513, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.02512465352588513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036543, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036543 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.028040981380761554, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.028040981380761554 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.03011821010694263, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.03011821010694263 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.03076739470780808, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.03076739470780808 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067873, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067873 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.026907849856282532, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.026907849856282532 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43853211009174314, + "acc_stderr": 0.021274713073954565, + "acc_norm": 0.43853211009174314, + "acc_norm_stderr": 0.021274713073954565 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138296, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138296 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.044492703500683815, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.044492703500683815 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.01948802574552966, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.01948802574552966 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3971631205673759, + "acc_stderr": 0.029189805673587088, + "acc_norm": 0.3971631205673759, + "acc_norm_stderr": 0.029189805673587088 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23016759776536314, + "acc_stderr": 0.014078339253425819, + "acc_norm": 0.23016759776536314, + "acc_norm_stderr": 0.014078339253425819 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.26838235294117646, + "acc_stderr": 0.0269174812243772, + "acc_norm": 0.26838235294117646, + "acc_norm_stderr": 0.0269174812243772 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.031964127345232726, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.031964127345232726 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937599, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937599 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32073011734028684, + "acc_stderr": 0.01192119999178262, + "acc_norm": 0.32073011734028684, + "acc_norm_stderr": 0.01192119999178262 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.032566854844603886, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.032566854844603886 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3157894736842105, + "mc1_stderr": 0.01627228795791693, + "mc2": 0.5013882170125173, + "mc2_stderr": 0.016079558760905017 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3907910271546635, + "acc_stderr": 0.016775298465108245, + "acc_norm": 0.42621015348288077, + "acc_norm_stderr": 0.017002122609489263 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "TIGER-Lab/MAmmoTH2-7B-Plus", + "model_sha": "dd37ba4789b04ba59116fa9b0b90bcc0c5f3df15", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/TIGER-Lab/MAmmoTH2-8B-Plus/result_2024-05-13 17:04:55.json b/TIGER-Lab/MAmmoTH2-8B-Plus/result_2024-05-13 17:04:55.json new file mode 100644 index 0000000000000000000000000000000000000000..2fe2d4f3b79573df45370606237b7142dc31d0e7 --- /dev/null +++ b/TIGER-Lab/MAmmoTH2-8B-Plus/result_2024-05-13 17:04:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3873720136518771, + "acc_stderr": 0.014235872487909869, + "acc_norm": 0.44795221843003413, + "acc_norm_stderr": 0.014532011498211672 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3767177853017327, + "acc_stderr": 0.004835728903731406, + "acc_norm": 0.4856602270464051, + "acc_norm_stderr": 0.004987728900897592 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.038237270928823064, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.038237270928823064 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5146871008939975, + "acc_stderr": 0.01787224802442913, + "acc_norm": 0.5146871008939975, + "acc_norm_stderr": 0.01787224802442913 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.03260038511835772, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.03260038511835772 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.02839442137098453, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.02839442137098453 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123005, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123005 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03540294377095368, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03540294377095368 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5462184873949579, + "acc_stderr": 0.032339434681820885, + "acc_norm": 0.5462184873949579, + "acc_norm_stderr": 0.032339434681820885 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736125, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736125 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465076, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465076 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673281, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673281 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.03765746693865151, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.03765746693865151 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.455026455026455, + "acc_stderr": 0.02564692836104939, + "acc_norm": 0.455026455026455, + "acc_norm_stderr": 0.02564692836104939 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5246913580246914, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.5246913580246914, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.03587014986075658, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.03587014986075658 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5944954128440367, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.5944954128440367, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5, + "acc_stderr": 0.028629916715693413, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028629916715693413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618065, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618065 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650133, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650133 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.033953227263757976, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.033953227263757976 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19776536312849163, + "acc_stderr": 0.013321620594050948, + "acc_norm": 0.19776536312849163, + "acc_norm_stderr": 0.013321620594050948 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.0290294228156814, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.0290294228156814 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35919165580182527, + "acc_stderr": 0.012253386187584259, + "acc_norm": 0.35919165580182527, + "acc_norm_stderr": 0.012253386187584259 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.03434131164719131, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.03434131164719131 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165633, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165633 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3353733170134639, + "mc1_stderr": 0.016527534039668987, + "mc2": 0.49886776378301484, + "mc2_stderr": 0.01588273337036278 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5076741440377804, + "acc_stderr": 0.01718832921965428, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.01712282914329265 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "TIGER-Lab/MAmmoTH2-8B-Plus", + "model_sha": "a49b8a9dc80a42745201002ecbfeac12f4c696dc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Taekyoon/llama2-org-koen-7b/result_2023-11-17 08:17:26.json b/Taekyoon/llama2-org-koen-7b/result_2023-11-17 08:17:26.json new file mode 100644 index 0000000000000000000000000000000000000000..ec9d52d891b442faaeec6d072f65ea5a3fba68a4 --- /dev/null +++ b/Taekyoon/llama2-org-koen-7b/result_2023-11-17 08:17:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3370307167235495, + "acc_stderr": 0.013813476652902279, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.014252959848892893 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3997211710814579, + "acc_stderr": 0.004888398535520494, + "acc_norm": 0.5370444134634534, + "acc_norm_stderr": 0.0049760677264325615 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3391812865497076, + "acc_stderr": 0.036310534964889056, + "acc_norm": 0.3391812865497076, + "acc_norm_stderr": 0.036310534964889056 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729245, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729245 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3665389527458493, + "acc_stderr": 0.01723124462679703, + "acc_norm": 0.3665389527458493, + "acc_norm_stderr": 0.01723124462679703 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745664, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745664 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071857, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071857 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.34726688102893893, + "acc_stderr": 0.027040745502307333, + "acc_norm": 0.34726688102893893, + "acc_norm_stderr": 0.027040745502307333 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.032443052830087304, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.032443052830087304 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.03258630383836554, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.03258630383836554 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386215, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380558, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380558 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2230769230769231, + "acc_stderr": 0.02110773012724398, + "acc_norm": 0.2230769230769231, + "acc_norm_stderr": 0.02110773012724398 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3258064516129032, + "acc_stderr": 0.026662010578567104, + "acc_norm": 0.3258064516129032, + "acc_norm_stderr": 0.026662010578567104 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.032366121762202014, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.032366121762202014 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145668, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145668 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.34328358208955223, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.34328358208955223, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.035149425512674394, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.035149425512674394 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918407, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918407 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.037161774375660164, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.037161774375660164 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.025522474632121615, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.025522474632121615 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.345679012345679, + "acc_stderr": 0.026462487777001886, + "acc_norm": 0.345679012345679, + "acc_norm_stderr": 0.026462487777001886 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.031618779179354094, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.031618779179354094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28440366972477066, + "acc_stderr": 0.01934203658770259, + "acc_norm": 0.28440366972477066, + "acc_norm_stderr": 0.01934203658770259 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102147, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102147 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.027245613047215365, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.027245613047215365 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663137, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663137 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984301, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984301 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.032149521478027486, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.032149521478027486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.02315746830855936, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.02315746830855936 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3224489795918367, + "acc_stderr": 0.029923100563683906, + "acc_norm": 0.3224489795918367, + "acc_norm_stderr": 0.029923100563683906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.02917868230484255, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.02917868230484255 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2848761408083442, + "acc_stderr": 0.011527830846368999, + "acc_norm": 0.2848761408083442, + "acc_norm_stderr": 0.011527830846368999 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399813, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399813 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.01574402724825605, + "mc2": 0.4237266628764529, + "mc2_stderr": 0.01496751362237835 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2727272727272727, + "acc_stderr": 0.01531185311030035, + "acc_norm": 0.44391971664698937, + "acc_norm_stderr": 0.017081884623542543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Taekyoon/llama2-org-koen-7b", + "model_sha": "869813335f48ec6a8af01c793c0e8705886d3b89", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/TeamUNIVA/Komodo_6B_v1.0.0/result_2024-01-30 12:17:31.json b/TeamUNIVA/Komodo_6B_v1.0.0/result_2024-01-30 12:17:31.json new file mode 100644 index 0000000000000000000000000000000000000000..6b159fbf8ca42c778059f0b1e0f3a77cb121a43f --- /dev/null +++ b/TeamUNIVA/Komodo_6B_v1.0.0/result_2024-01-30 12:17:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4325938566552901, + "acc_stderr": 0.014478005694182526, + "acc_norm": 0.5059726962457338, + "acc_norm_stderr": 0.01461034830025579 + }, + "harness|ko_hellaswag|10": { + "acc": 0.465345548695479, + "acc_stderr": 0.004977782217582458, + "acc_norm": 0.625771758613822, + "acc_norm_stderr": 0.004829339926388327 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5874840357598978, + "acc_stderr": 0.01760414910867193, + "acc_norm": 0.5874840357598978, + "acc_norm_stderr": 0.01760414910867193 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890593, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890593 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5498392282958199, + "acc_stderr": 0.028256660723360177, + "acc_norm": 0.5498392282958199, + "acc_norm_stderr": 0.028256660723360177 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6767676767676768, + "acc_stderr": 0.033322999210706444, + "acc_norm": 0.6767676767676768, + "acc_norm_stderr": 0.033322999210706444 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5025641025641026, + "acc_stderr": 0.025350672979412184, + "acc_norm": 0.5025641025641026, + "acc_norm_stderr": 0.025350672979412184 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5387096774193548, + "acc_stderr": 0.028358634859836942, + "acc_norm": 0.5387096774193548, + "acc_norm_stderr": 0.028358634859836942 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749465, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749465 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205608, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114993, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114993 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.033076159479790354, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.033076159479790354 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.038016851045244604, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.038016851045244604 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762623, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762623 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.02680372058320617, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.02680372058320617 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.03919415545048412, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.03919415545048412 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6269430051813472, + "acc_stderr": 0.034902055920485744, + "acc_norm": 0.6269430051813472, + "acc_norm_stderr": 0.034902055920485744 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6660550458715596, + "acc_stderr": 0.020220554196736407, + "acc_norm": 0.6660550458715596, + "acc_norm_stderr": 0.020220554196736407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.0285803410651383, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.0285803410651383 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.020206653187884782, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.020206653187884782 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281295, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281295 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29832402234636873, + "acc_stderr": 0.015301840045129272, + "acc_norm": 0.29832402234636873, + "acc_norm_stderr": 0.015301840045129272 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.46691176470588236, + "acc_stderr": 0.030306257722468317, + "acc_norm": 0.46691176470588236, + "acc_norm_stderr": 0.030306257722468317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6204081632653061, + "acc_stderr": 0.03106721126287246, + "acc_norm": 0.6204081632653061, + "acc_norm_stderr": 0.03106721126287246 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.01210681720306721, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.01210681720306721 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165634, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165634 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3157894736842105, + "mc1_stderr": 0.016272287957916933, + "mc2": 0.47900645996414987, + "mc2_stderr": 0.015981859090450398 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5997638724911453, + "acc_stderr": 0.016844693510505045, + "acc_norm": 0.5985832349468713, + "acc_norm_stderr": 0.01685290785872906 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "TeamUNIVA/Komodo_6B_v1.0.0", + "model_sha": "d5b2219e4d6645c89e686bc989db42afe420ba4d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/TeamUNIVA/Komodo_6B_v2.0.0/result_2024-02-09 17:20:47.json b/TeamUNIVA/Komodo_6B_v2.0.0/result_2024-02-09 17:20:47.json new file mode 100644 index 0000000000000000000000000000000000000000..0cac8e5608f073b9b0e03b27eb22c01c0a1e750e --- /dev/null +++ b/TeamUNIVA/Komodo_6B_v2.0.0/result_2024-02-09 17:20:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42662116040955633, + "acc_stderr": 0.014453185592920293, + "acc_norm": 0.48890784982935154, + "acc_norm_stderr": 0.01460779491401305 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4684325831507668, + "acc_stderr": 0.0049798268294007604, + "acc_norm": 0.6255725951005776, + "acc_norm_stderr": 0.00482985605860357 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.045821241601615506, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.045821241601615506 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5862068965517241, + "acc_stderr": 0.017612204084663765, + "acc_norm": 0.5862068965517241, + "acc_norm_stderr": 0.017612204084663765 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.032500536843658404, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.032500536843658404 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5530546623794212, + "acc_stderr": 0.028237769422085352, + "acc_norm": 0.5530546623794212, + "acc_norm_stderr": 0.028237769422085352 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.03437305501980619, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.03437305501980619 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5655172413793104, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.5655172413793104, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.03244980849990028, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.03244980849990028 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.02534267129380725, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.02534267129380725 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5387096774193548, + "acc_stderr": 0.02835863485983695, + "acc_norm": 0.5387096774193548, + "acc_norm_stderr": 0.02835863485983695 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431183, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431183 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433432, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433432 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.037657466938651483, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.037657466938651483 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562424, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.026756255129663765, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.026756255129663765 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6321243523316062, + "acc_stderr": 0.03480175668466036, + "acc_norm": 0.6321243523316062, + "acc_norm_stderr": 0.03480175668466036 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.653211009174312, + "acc_stderr": 0.020406097104093027, + "acc_norm": 0.653211009174312, + "acc_norm_stderr": 0.020406097104093027 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238126, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.028541722692618877, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.028541722692618877 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.0202239460050743, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.0202239460050743 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.032149521478027486, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.032149521478027486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372434, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372434 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6163265306122448, + "acc_stderr": 0.03113088039623595, + "acc_norm": 0.6163265306122448, + "acc_norm_stderr": 0.03113088039623595 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.030964810588786706, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.030964810588786706 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3644067796610169, + "acc_stderr": 0.012291694983056477, + "acc_norm": 0.3644067796610169, + "acc_norm_stderr": 0.012291694983056477 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3488372093023256, + "mc1_stderr": 0.016684419859986883, + "mc2": 0.5036147302296147, + "mc2_stderr": 0.015965776819730753 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6115702479338843, + "acc_stderr": 0.01675692157106942, + "acc_norm": 0.6139315230224321, + "acc_norm_stderr": 0.01673813076032175 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "TeamUNIVA/Komodo_6B_v2.0.0", + "model_sha": "337ec7305cfd6a931d31ebb1bfad4e4523877c95", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/TeamUNIVA/Komodo_6B_v3.0.0/result_2024-03-04 11:29:12.json b/TeamUNIVA/Komodo_6B_v3.0.0/result_2024-03-04 11:29:12.json new file mode 100644 index 0000000000000000000000000000000000000000..0276aa3f9dd91a5bd20bb310b24ab57b0a68df80 --- /dev/null +++ b/TeamUNIVA/Komodo_6B_v3.0.0/result_2024-03-04 11:29:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43600682593856654, + "acc_stderr": 0.014491225699230916, + "acc_norm": 0.4931740614334471, + "acc_norm_stderr": 0.014610029151379813 + }, + "harness|ko_hellaswag|10": { + "acc": 0.48137821151165106, + "acc_stderr": 0.004986319587524966, + "acc_norm": 0.6385182234614618, + "acc_norm_stderr": 0.004794478426382608 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5862068965517241, + "acc_stderr": 0.01761220408466376, + "acc_norm": 0.5862068965517241, + "acc_norm_stderr": 0.01761220408466376 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547155, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.0323854694875898, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.0323854694875898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5461538461538461, + "acc_stderr": 0.025242770987126198, + "acc_norm": 0.5461538461538461, + "acc_norm_stderr": 0.025242770987126198 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.047803436269367894, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.047803436269367894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5225806451612903, + "acc_stderr": 0.02841498501970786, + "acc_norm": 0.5225806451612903, + "acc_norm_stderr": 0.02841498501970786 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205608, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523857, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523857 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5216049382716049, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.5216049382716049, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6217616580310881, + "acc_stderr": 0.034998072761933376, + "acc_norm": 0.6217616580310881, + "acc_norm_stderr": 0.034998072761933376 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6587155963302752, + "acc_stderr": 0.020328612816592435, + "acc_norm": 0.6587155963302752, + "acc_norm_stderr": 0.020328612816592435 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848879, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848879 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.028614624752805434, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.028614624752805434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.020212274976302954, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.020212274976302954 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.04327040932578731, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.04327040932578731 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468628, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468628 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.03093285879278986, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.03093285879278986 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35984354628422427, + "acc_stderr": 0.0122582604836898, + "acc_norm": 0.35984354628422427, + "acc_norm_stderr": 0.0122582604836898 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.03495624522015478, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.03495624522015478 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03756335775187896, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03756335775187896 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.38310893512851896, + "mc1_stderr": 0.01701846167938986, + "mc2": 0.5300394533800307, + "mc2_stderr": 0.016087134575191925 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6446280991735537, + "acc_stderr": 0.016455496000314516, + "acc_norm": 0.6434474616292798, + "acc_norm_stderr": 0.016467706981527445 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "TeamUNIVA/Komodo_6B_v3.0.0", + "model_sha": "ac7f25d54977bd85ed364cc213bde0877031279f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/TeamUNIVA/Komodo_7B_v1.0.0/result_2024-01-30 12:16:24.json b/TeamUNIVA/Komodo_7B_v1.0.0/result_2024-01-30 12:16:24.json new file mode 100644 index 0000000000000000000000000000000000000000..4c5ee575efe4bbea26437d515036aa511d5b4d9e --- /dev/null +++ b/TeamUNIVA/Komodo_7B_v1.0.0/result_2024-01-30 12:16:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.47440273037542663, + "acc_stderr": 0.014592230885298959, + "acc_norm": 0.5213310580204779, + "acc_norm_stderr": 0.014598087973127104 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5659231228838877, + "acc_stderr": 0.004946221512145284, + "acc_norm": 0.681736705835491, + "acc_norm_stderr": 0.004648503177353943 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.017862091778507855, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.017862091778507855 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.031565646822367836, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.031565646822367836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.028396770444111288, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.028396770444111288 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016338, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016338 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.047240073523838876, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.047240073523838876 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5840336134453782, + "acc_stderr": 0.03201650100739611, + "acc_norm": 0.5840336134453782, + "acc_norm_stderr": 0.03201650100739611 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.025294608023986462, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.025294608023986462 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.0483036602463533, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.0483036602463533 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.0303650508291152, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.0303650508291152 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.02437319786798306, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.02437319786798306 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377906, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377906 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584926, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584926 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353996, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353996 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.019944914136873573, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.019944914136873573 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.0443280405529152, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.0443280405529152 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160834, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160834 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261457, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261457 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877743, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.03175195237583324, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.03175195237583324 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3624511082138201, + "acc_stderr": 0.01227751253325249, + "acc_norm": 0.3624511082138201, + "acc_norm_stderr": 0.01227751253325249 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4394124847001224, + "mc1_stderr": 0.01737452048251371, + "mc2": 0.5952931693636797, + "mc2_stderr": 0.01657492819641639 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5159386068476978, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.5489964580873672, + "acc_norm_stderr": 0.01710761885954935 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "TeamUNIVA/Komodo_7B_v1.0.0", + "model_sha": "079cadef2c996d4a14365afc3d52f88b911b357e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/TeamUNIVA/Komodo_7B_v1.0.1/result_2024-01-30 12:16:51.json b/TeamUNIVA/Komodo_7B_v1.0.1/result_2024-01-30 12:16:51.json new file mode 100644 index 0000000000000000000000000000000000000000..f9f8b9801d5f72735c0ce27ef13e1992960488ff --- /dev/null +++ b/TeamUNIVA/Komodo_7B_v1.0.1/result_2024-01-30 12:16:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.48293515358361777, + "acc_stderr": 0.014602878388536597, + "acc_norm": 0.514505119453925, + "acc_norm_stderr": 0.01460524108137005 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5424218283210516, + "acc_stderr": 0.004971789638563324, + "acc_norm": 0.6623182632941645, + "acc_norm_stderr": 0.00471952909991311 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5376756066411239, + "acc_stderr": 0.017829131764287177, + "acc_norm": 0.5376756066411239, + "acc_norm_stderr": 0.017829131764287177 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357766, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357766 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03960933549451207, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03960933549451207 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364397, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364397 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.032284106267163895, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.032284106267163895 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.025230381238934833, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.025230381238934833 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.027869320571664632, + "acc_norm": 0.4, + "acc_norm_stderr": 0.027869320571664632 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650776, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650776 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.030052580579557845, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.030052580579557845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275794, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275794 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165581, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165581 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596433, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596433 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.026803720583206167, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.026803720583206167 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836185, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836185 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5266055045871559, + "acc_stderr": 0.02140695268815158, + "acc_norm": 0.5266055045871559, + "acc_norm_stderr": 0.02140695268815158 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238126, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.020116925347422425, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.020116925347422425 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.030998666304560524, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.030998666304560524 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925314, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925314 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.029674288281311183, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.029674288281311183 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.0318421386668758, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.0318421386668758 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3376792698826597, + "acc_stderr": 0.01207856377714556, + "acc_norm": 0.3376792698826597, + "acc_norm_stderr": 0.01207856377714556 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.423500611995104, + "mc1_stderr": 0.01729742144853472, + "mc2": 0.5817391312297906, + "mc2_stderr": 0.01670758742996612 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5430932703659976, + "acc_stderr": 0.017126389093086777, + "acc_norm": 0.5761511216056671, + "acc_norm_stderr": 0.01698981083462825 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "TeamUNIVA/Komodo_7B_v1.0.1", + "model_sha": "f59d7d20552ffe38c8c7bf5c4b14b5c70d5ac820", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/The-matt/llama2_ko-7b_distinctive-snowflake-182_1060/result_2023-11-20 01:48:08.json b/The-matt/llama2_ko-7b_distinctive-snowflake-182_1060/result_2023-11-20 01:48:08.json new file mode 100644 index 0000000000000000000000000000000000000000..aa5bc87f1c3bc9958c7bdfe209512c8d35207742 --- /dev/null +++ b/The-matt/llama2_ko-7b_distinctive-snowflake-182_1060/result_2023-11-20 01:48:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3054607508532423, + "acc_stderr": 0.013460080478002494, + "acc_norm": 0.3583617747440273, + "acc_norm_stderr": 0.014012883334859859 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3761202947619996, + "acc_stderr": 0.004834207964061325, + "acc_norm": 0.4910376419040032, + "acc_norm_stderr": 0.004988979750014442 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690879, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690879 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3282247765006386, + "acc_stderr": 0.01679168564019289, + "acc_norm": 0.3282247765006386, + "acc_norm_stderr": 0.01679168564019289 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.251063829787234, + "acc_stderr": 0.02834696377716245, + "acc_norm": 0.251063829787234, + "acc_norm_stderr": 0.02834696377716245 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.034106466140718564, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.034106466140718564 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3311897106109325, + "acc_stderr": 0.026730620728004917, + "acc_norm": 0.3311897106109325, + "acc_norm_stderr": 0.026730620728004917 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.29596412556053814, + "acc_stderr": 0.0306365913486998, + "acc_norm": 0.29596412556053814, + "acc_norm_stderr": 0.0306365913486998 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03960933549451207, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03960933549451207 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.02755361446786382, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.02755361446786382 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2076923076923077, + "acc_stderr": 0.0205675395672468, + "acc_norm": 0.2076923076923077, + "acc_norm_stderr": 0.0205675395672468 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.032826493853041504, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.032826493853041504 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.025091892378859275, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.025091892378859275 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.03142616993791925, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.03142616993791925 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.373134328358209, + "acc_stderr": 0.03419832608176007, + "acc_norm": 0.373134328358209, + "acc_norm_stderr": 0.03419832608176007 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594316, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577657, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577657 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2976878612716763, + "acc_stderr": 0.024617055388676992, + "acc_norm": 0.2976878612716763, + "acc_norm_stderr": 0.024617055388676992 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868052, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868052 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.03201867122877795, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.03201867122877795 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26238532110091745, + "acc_stderr": 0.018861885021534738, + "acc_norm": 0.26238532110091745, + "acc_norm_stderr": 0.018861885021534738 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1349206349206349, + "acc_stderr": 0.030557101589417515, + "acc_norm": 0.1349206349206349, + "acc_norm_stderr": 0.030557101589417515 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.03690677986137283, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.03690677986137283 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275908, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275908 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.025767252010855963, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.025767252010855963 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22857142857142856, + "acc_stderr": 0.026882144922307744, + "acc_norm": 0.22857142857142856, + "acc_norm_stderr": 0.026882144922307744 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3291139240506329, + "acc_stderr": 0.03058732629470236, + "acc_norm": 0.3291139240506329, + "acc_norm_stderr": 0.03058732629470236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27835723598435463, + "acc_stderr": 0.011446990197380989, + "acc_norm": 0.27835723598435463, + "acc_norm_stderr": 0.011446990197380989 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.0340150671524904, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.0340150671524904 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.0151274270965207, + "mc2": 0.3908977745790188, + "mc2_stderr": 0.014711493002685353 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3305785123966942, + "acc_stderr": 0.0161734232988457, + "acc_norm": 0.4604486422668241, + "acc_norm_stderr": 0.01713648762604985 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "The-matt/llama2_ko-7b_distinctive-snowflake-182_1060", + "model_sha": "090368cb655024491c0c4dad13f8ac9a8e7d31cc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/The-matt/llama2_ko-7b_sandy-fire-170_1530/result_2023-11-13 07:13:52.json b/The-matt/llama2_ko-7b_sandy-fire-170_1530/result_2023-11-13 07:13:52.json new file mode 100644 index 0000000000000000000000000000000000000000..e8a7a6c5f78c0d4fe554e6f26bebb629cb531f3e --- /dev/null +++ b/The-matt/llama2_ko-7b_sandy-fire-170_1530/result_2023-11-13 07:13:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3165529010238908, + "acc_stderr": 0.013592431519068084, + "acc_norm": 0.3728668941979522, + "acc_norm_stderr": 0.014131176760131165 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37532364070902213, + "acc_stderr": 0.004832167854501651, + "acc_norm": 0.48994224258115915, + "acc_norm_stderr": 0.004988771791854509 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.01685739124747255, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.01685739124747255 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501116, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501116 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3729903536977492, + "acc_stderr": 0.02746661021314012, + "acc_norm": 0.3729903536977492, + "acc_norm_stderr": 0.02746661021314012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.273542600896861, + "acc_stderr": 0.029918586707798834, + "acc_norm": 0.273542600896861, + "acc_norm_stderr": 0.029918586707798834 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438014, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438014 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.13725490196078433, + "acc_stderr": 0.0342408466989152, + "acc_norm": 0.13725490196078433, + "acc_norm_stderr": 0.0342408466989152 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24789915966386555, + "acc_stderr": 0.028047967224176892, + "acc_norm": 0.24789915966386555, + "acc_norm_stderr": 0.028047967224176892 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.020932445774463182, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.020932445774463182 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.02598850079241188, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.02598850079241188 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3247863247863248, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.3247863247863248, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.029445175328199593, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.029445175328199593 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302505, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302505 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823018, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.030965903123573033, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.030965903123573033 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.1875, + "acc_stderr": 0.032639560491693344, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.032639560491693344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.02440517393578324, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.02440517393578324 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.034624199316156234, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.034624199316156234 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.31790123456790126, + "acc_stderr": 0.025910063528240865, + "acc_norm": 0.31790123456790126, + "acc_norm_stderr": 0.025910063528240865 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.03201867122877793, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.03201867122877793 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3100917431192661, + "acc_stderr": 0.01983084968443975, + "acc_norm": 0.3100917431192661, + "acc_norm_stderr": 0.01983084968443975 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276863, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276863 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.026857294663281413, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.026857294663281413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31209150326797386, + "acc_stderr": 0.01874501120127766, + "acc_norm": 0.31209150326797386, + "acc_norm_stderr": 0.01874501120127766 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432403, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432403 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012376, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012376 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841195, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174934, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174934 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29535864978902954, + "acc_stderr": 0.029696338713422882, + "acc_norm": 0.29535864978902954, + "acc_norm_stderr": 0.029696338713422882 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2653194263363755, + "acc_stderr": 0.011276198843958876, + "acc_norm": 0.2653194263363755, + "acc_norm_stderr": 0.011276198843958876 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511785, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511785 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156475, + "mc2": 0.3966274374680779, + "mc2_stderr": 0.014846518193358589 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3695395513577332, + "acc_stderr": 0.01659488340568542, + "acc_norm": 0.51357733175915, + "acc_norm_stderr": 0.01718401506040145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "The-matt/llama2_ko-7b_sandy-fire-170_1530", + "model_sha": "b963fcf8d7249c3f360ccfa5db70c0b20bddeb08", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/The-matt/llama2_ko-7b_stilted-lion-205_1530/result_2023-11-23 01:33:10.json b/The-matt/llama2_ko-7b_stilted-lion-205_1530/result_2023-11-23 01:33:10.json new file mode 100644 index 0000000000000000000000000000000000000000..5f8278b9e10027778090a2d6d397abb73b87fb20 --- /dev/null +++ b/The-matt/llama2_ko-7b_stilted-lion-205_1530/result_2023-11-23 01:33:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31569965870307165, + "acc_stderr": 0.013582571095815291, + "acc_norm": 0.36945392491467577, + "acc_norm_stderr": 0.014104578366491902 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3768173670583549, + "acc_stderr": 0.004835981632401594, + "acc_norm": 0.4987054371639116, + "acc_norm_stderr": 0.004989764686738838 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3128991060025543, + "acc_stderr": 0.016580935940304055, + "acc_norm": 0.3128991060025543, + "acc_norm_stderr": 0.016580935940304055 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.02802022627120022, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.02802022627120022 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3247588424437299, + "acc_stderr": 0.026596782287697046, + "acc_norm": 0.3247588424437299, + "acc_norm_stderr": 0.026596782287697046 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2914798206278027, + "acc_stderr": 0.030500283176545906, + "acc_norm": 0.2914798206278027, + "acc_norm_stderr": 0.030500283176545906 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3838383838383838, + "acc_stderr": 0.03464881675016338, + "acc_norm": 0.3838383838383838, + "acc_norm_stderr": 0.03464881675016338 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.32413793103448274, + "acc_stderr": 0.03900432069185555, + "acc_norm": 0.32413793103448274, + "acc_norm_stderr": 0.03900432069185555 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.028510251512341947, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.028510251512341947 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.22564102564102564, + "acc_stderr": 0.021193632525148522, + "acc_norm": 0.22564102564102564, + "acc_norm_stderr": 0.021193632525148522 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.02622648565255388, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.02622648565255388 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3418803418803419, + "acc_stderr": 0.031075028526507755, + "acc_norm": 0.3418803418803419, + "acc_norm_stderr": 0.031075028526507755 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3320754716981132, + "acc_stderr": 0.028985455652334395, + "acc_norm": 0.3320754716981132, + "acc_norm_stderr": 0.028985455652334395 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878284, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878284 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.36318407960199006, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.36318407960199006, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788989, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788989 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252603, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252603 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.024027745155265016, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.024027745155265016 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615625, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615625 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.024569223600460845, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.024569223600460845 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.033088185944157494, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.033088185944157494 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3467889908256881, + "acc_stderr": 0.020406097104093027, + "acc_norm": 0.3467889908256881, + "acc_norm_stderr": 0.020406097104093027 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.033954900208561116, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.033954900208561116 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.35947712418300654, + "acc_stderr": 0.027475969910660952, + "acc_norm": 0.35947712418300654, + "acc_norm_stderr": 0.027475969910660952 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.044492703500683815, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.044492703500683815 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.01716058723504635, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.01716058723504635 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290403, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290403 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03004261583271486, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03004261583271486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31223628691983124, + "acc_stderr": 0.03016513786784701, + "acc_norm": 0.31223628691983124, + "acc_norm_stderr": 0.03016513786784701 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2757496740547588, + "acc_stderr": 0.011413813609161005, + "acc_norm": 0.2757496740547588, + "acc_norm_stderr": 0.011413813609161005 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.03198001660115073, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.03198001660115073 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.3905558403820087, + "mc2_stderr": 0.014722115029998253 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33766233766233766, + "acc_stderr": 0.01625907578475496, + "acc_norm": 0.4639905548996458, + "acc_norm_stderr": 0.017145715365486654 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "The-matt/llama2_ko-7b_stilted-lion-205_1530", + "model_sha": "948480784c612e413d857c89d2a343b32c704498", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/TheBloke/Llama-2-13B-fp16/result_2023-09-27 04:58:32.json b/TheBloke/Llama-2-13B-fp16/result_2023-09-27 04:58:32.json new file mode 100644 index 0000000000000000000000000000000000000000..f24caff54f7f326da7a536d77030aded372e3f46 --- /dev/null +++ b/TheBloke/Llama-2-13B-fp16/result_2023-09-27 04:58:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.013621696119173302, + "acc_norm": 0.37627986348122866, + "acc_norm_stderr": 0.014157022555407166 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36138219478191597, + "acc_stderr": 0.004794191785967945, + "acc_norm": 0.46614220274845647, + "acc_norm_stderr": 0.004978328190775522 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.03762738699917055, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.03762738699917055 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4789272030651341, + "acc_stderr": 0.0178640767862129, + "acc_norm": 0.4789272030651341, + "acc_norm_stderr": 0.0178640767862129 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.02821768355665232, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.02821768355665232 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653315, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5897435897435898, + "acc_stderr": 0.03222414045241107, + "acc_norm": 0.5897435897435898, + "acc_norm_stderr": 0.03222414045241107 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3849056603773585, + "acc_stderr": 0.029946498567699945, + "acc_norm": 0.3849056603773585, + "acc_norm_stderr": 0.029946498567699945 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276611, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276611 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.02683080599895224, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.02683080599895224 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4012345679012346, + "acc_stderr": 0.0272725828498398, + "acc_norm": 0.4012345679012346, + "acc_norm_stderr": 0.0272725828498398 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384486, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384486 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584926, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584926 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44770642201834865, + "acc_stderr": 0.021319754962425462, + "acc_norm": 0.44770642201834865, + "acc_norm_stderr": 0.021319754962425462 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319774, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319774 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.018824219512706207, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.018824219512706207 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101373, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101373 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.03324708911809117, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.03324708911809117 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.39662447257383965, + "acc_stderr": 0.03184399873811226, + "acc_norm": 0.39662447257383965, + "acc_norm_stderr": 0.03184399873811226 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31681877444589307, + "acc_stderr": 0.011882349954723015, + "acc_norm": 0.31681877444589307, + "acc_norm_stderr": 0.011882349954723015 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.03441190023482466, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.03441190023482466 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.41395274449910313, + "mc2_stderr": 0.015033140507060082 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3825265643447462, + "acc_stderr": 0.016709165387228806, + "acc_norm": 0.4781582054309327, + "acc_norm_stderr": 0.017173944474294378 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "TheBloke/Llama-2-13B-fp16", + "model_sha": "b2e65e8ad4bb35e5abaee0170ebd5fc2134a50bb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T/result_2024-01-02 11:19:35.json b/TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T/result_2024-01-02 11:19:35.json new file mode 100644 index 0000000000000000000000000000000000000000..cb9674076ec1bcc92b1070c3b321588318bf9976 --- /dev/null +++ b/TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T/result_2024-01-02 11:19:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21331058020477817, + "acc_stderr": 0.011970971742326334, + "acc_norm": 0.2568259385665529, + "acc_norm_stderr": 0.0127669237941168 + }, + "harness|ko_hellaswag|10": { + "acc": 0.29286994622585144, + "acc_stderr": 0.0045414921516392275, + "acc_norm": 0.3255327623979287, + "acc_norm_stderr": 0.00467615929910542 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.030267457554898458, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.030267457554898458 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2388250319284802, + "acc_stderr": 0.015246803197398682, + "acc_norm": 0.2388250319284802, + "acc_norm_stderr": 0.015246803197398682 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.037125378336148665, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.037125378336148665 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.029771642712491234, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.029771642712491234 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.02558306248998483, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.02558306248998483 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003337, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003337 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.02820554503327772, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.02820554503327772 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.02311936275823228, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.02311936275823228 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653697, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653697 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358608, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358608 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23504273504273504, + "acc_stderr": 0.02777883590493543, + "acc_norm": 0.23504273504273504, + "acc_norm_stderr": 0.02777883590493543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.027611163402399715, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.027611163402399715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766128, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766128 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.02992941540834839, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.02992941540834839 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.034765901043041336, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.034765901043041336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0230836585869842, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0230836585869842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.02925282329180363, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.02925282329180363 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03835153954399419, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03835153954399419 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26422018348623855, + "acc_stderr": 0.018904164171510186, + "acc_norm": 0.26422018348623855, + "acc_norm_stderr": 0.018904164171510186 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.02591780611714716, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.02591780611714716 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.30578512396694213, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.0373852067611967, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.0373852067611967 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.016906615927288142, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.016906615927288142 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966358, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966358 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312547, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312547 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.02412746346265014, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.02412746346265014 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460302, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460302 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23402868318122555, + "acc_stderr": 0.0108135855526597, + "acc_norm": 0.23402868318122555, + "acc_norm_stderr": 0.0108135855526597 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3182374541003672, + "mc1_stderr": 0.016305988648920612, + "mc2": 0.48985625400067373, + "mc2_stderr": 0.01602317028922327 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2396694214876033, + "acc_stderr": 0.014676495332267253, + "acc_norm": 0.2680047225501771, + "acc_norm_stderr": 0.015227905796335145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "TinyLlama/TinyLlama-1.1B-intermediate-step-1431k-3T", + "model_sha": "4b8dd7e43ec08c24ccaf89cbf67898cff53c95ae", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/TinyLlama/TinyLlama-1.1B-intermediate-step-715k-1.5T/result_2023-11-16 08:16:14.json b/TinyLlama/TinyLlama-1.1B-intermediate-step-715k-1.5T/result_2023-11-16 08:16:14.json new file mode 100644 index 0000000000000000000000000000000000000000..7e5ad50ce5571df5dbca09da85705b2c285b905e --- /dev/null +++ b/TinyLlama/TinyLlama-1.1B-intermediate-step-715k-1.5T/result_2023-11-16 08:16:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2167235494880546, + "acc_stderr": 0.012040156713481192, + "acc_norm": 0.25597269624573377, + "acc_norm_stderr": 0.012753013241244513 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2819159529974109, + "acc_stderr": 0.004490130691020431, + "acc_norm": 0.3150766779525991, + "acc_norm_stderr": 0.004635970060392421 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2046783625730994, + "acc_stderr": 0.03094445977853321, + "acc_norm": 0.2046783625730994, + "acc_norm_stderr": 0.03094445977853321 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26436781609195403, + "acc_stderr": 0.01576998484069053, + "acc_norm": 0.26436781609195403, + "acc_norm_stderr": 0.01576998484069053 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.1925925925925926, + "acc_stderr": 0.03406542058502652, + "acc_norm": 0.1925925925925926, + "acc_norm_stderr": 0.03406542058502652 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.029513196625539355, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.029513196625539355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322416, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322416 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.025583062489984838, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.025583062489984838 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.031602951437766785, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.031602951437766785 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.1919191919191919, + "acc_stderr": 0.02805779167298901, + "acc_norm": 0.1919191919191919, + "acc_norm_stderr": 0.02805779167298901 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.0395058186117996, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.0395058186117996 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.02738140692786898, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.02738140692786898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.021020672680827912, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.021020672680827912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.16748768472906403, + "acc_stderr": 0.026273086047535414, + "acc_norm": 0.16748768472906403, + "acc_norm_stderr": 0.026273086047535414 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.20967741935483872, + "acc_stderr": 0.02315787934908353, + "acc_norm": 0.20967741935483872, + "acc_norm_stderr": 0.02315787934908353 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.02974504857267406, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.02974504857267406 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2, + "acc_stderr": 0.02461829819586651, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02461829819586651 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959312, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959312 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21393034825870647, + "acc_stderr": 0.028996909693328903, + "acc_norm": 0.21393034825870647, + "acc_norm_stderr": 0.028996909693328903 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1791907514450867, + "acc_stderr": 0.02924251305906329, + "acc_norm": 0.1791907514450867, + "acc_norm_stderr": 0.02924251305906329 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.22486772486772486, + "acc_stderr": 0.021502096078229147, + "acc_norm": 0.22486772486772486, + "acc_norm_stderr": 0.021502096078229147 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28034682080924855, + "acc_stderr": 0.024182427496577615, + "acc_norm": 0.28034682080924855, + "acc_norm_stderr": 0.024182427496577615 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.03322015795776742, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.03322015795776742 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.23765432098765432, + "acc_stderr": 0.023683591837008553, + "acc_norm": 0.23765432098765432, + "acc_norm_stderr": 0.023683591837008553 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1908256880733945, + "acc_stderr": 0.016847676400091115, + "acc_norm": 0.1908256880733945, + "acc_norm_stderr": 0.016847676400091115 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.0339549002085611, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.0339549002085611 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.040261875275912073, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.040261875275912073 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.017322789207784326, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.017322789207784326 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432403, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432403 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.028139689444859655, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.028139689444859655 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.014149575348976266, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.014149575348976266 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19852941176470587, + "acc_stderr": 0.02423101337054111, + "acc_norm": 0.19852941176470587, + "acc_norm_stderr": 0.02423101337054111 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17959183673469387, + "acc_stderr": 0.024573293589585637, + "acc_norm": 0.17959183673469387, + "acc_norm_stderr": 0.024573293589585637 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598035, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598035 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23533246414602346, + "acc_stderr": 0.010834432543912224, + "acc_norm": 0.23533246414602346, + "acc_norm_stderr": 0.010834432543912224 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.03077855467869326, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.03077855467869326 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559693, + "mc2": 0.5030438206753587, + "mc2_stderr": 0.016137949960889377 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24203069657615112, + "acc_stderr": 0.014725696750525331, + "acc_norm": 0.3105076741440378, + "acc_norm_stderr": 0.01590800452876203 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "TinyLlama/TinyLlama-1.1B-intermediate-step-715k-1.5T", + "model_sha": "314e0f65d90384e224ac8d7c0b228a661a06673f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/TinyPixel/Llama-2-7B-bf16-sharded/result_2023-12-26 01:06:07.json b/TinyPixel/Llama-2-7B-bf16-sharded/result_2023-12-26 01:06:07.json new file mode 100644 index 0000000000000000000000000000000000000000..c01e29366a36a3c81688733ef99f47fbed125cba --- /dev/null +++ b/TinyPixel/Llama-2-7B-bf16-sharded/result_2023-12-26 01:06:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2713310580204778, + "acc_stderr": 0.012993807727545789, + "acc_norm": 0.3097269624573379, + "acc_norm_stderr": 0.013512058415238361 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3331009759012149, + "acc_stderr": 0.004703590558552501, + "acc_norm": 0.41127265484963155, + "acc_norm_stderr": 0.004910588449330016 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.04541609446503947, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.04541609446503947 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.017268607560005773, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.017268607560005773 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785138, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785138 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761923, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761923 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071857, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071857 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.02666441088693762, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.02666441088693762 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3282828282828283, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.3282828282828283, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424387, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424387 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416545, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416545 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.030388353551886845, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.030388353551886845 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.30256410256410254, + "acc_stderr": 0.023290888053772725, + "acc_norm": 0.30256410256410254, + "acc_norm_stderr": 0.023290888053772725 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3258064516129032, + "acc_stderr": 0.026662010578567104, + "acc_norm": 0.3258064516129032, + "acc_norm_stderr": 0.026662010578567104 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.49145299145299143, + "acc_stderr": 0.032751303000970296, + "acc_norm": 0.49145299145299143, + "acc_norm_stderr": 0.032751303000970296 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30566037735849055, + "acc_stderr": 0.02835329807332267, + "acc_norm": 0.30566037735849055, + "acc_norm_stderr": 0.02835329807332267 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505415, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505415 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230172, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230172 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.033367670865679766, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.033367670865679766 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4527363184079602, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.4527363184079602, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633356, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633356 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.025992472029306386, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.025992472029306386 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.36728395061728397, + "acc_stderr": 0.02682280175950789, + "acc_norm": 0.36728395061728397, + "acc_norm_stderr": 0.02682280175950789 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3471502590673575, + "acc_stderr": 0.03435696168361356, + "acc_norm": 0.3471502590673575, + "acc_norm_stderr": 0.03435696168361356 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28990825688073396, + "acc_stderr": 0.019453066609201604, + "acc_norm": 0.28990825688073396, + "acc_norm_stderr": 0.019453066609201604 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283683, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283683 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351586, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351586 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.018663359671463667, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.018663359671463667 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.025336848563332386, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.025336848563332386 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35864978902953587, + "acc_stderr": 0.031219569445301854, + "acc_norm": 0.35864978902953587, + "acc_norm_stderr": 0.031219569445301854 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.01134599674353925, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.01134599674353925 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.03713158067481912, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.03713158067481912 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006516, + "mc2": 0.43916554694905735, + "mc2_stderr": 0.015333673661914711 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27036599763872493, + "acc_stderr": 0.015270152942068405, + "acc_norm": 0.3530106257378985, + "acc_norm_stderr": 0.016430745982427126 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "TinyPixel/Llama-2-7B-bf16-sharded", + "model_sha": "3f5d08bf8c31192686e3e88d0b9d2cdeff4115e4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Trofish/KULLM-RLHF/result_2023-09-28 05:43:12.json b/Trofish/KULLM-RLHF/result_2023-09-28 05:43:12.json new file mode 100644 index 0000000000000000000000000000000000000000..183d232b971cbdbf85b4f6311d06a25c16cf3249 --- /dev/null +++ b/Trofish/KULLM-RLHF/result_2023-09-28 05:43:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2790102389078498, + "acc_stderr": 0.013106784883601352, + "acc_norm": 0.3199658703071672, + "acc_norm_stderr": 0.013631345807016198 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3842859988050189, + "acc_stderr": 0.004854318994447741, + "acc_norm": 0.4954192391953794, + "acc_norm_stderr": 0.0049895720021966876 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03565079670708313, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03565079670708313 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.039166677628225836, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.039166677628225836 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2848020434227331, + "acc_stderr": 0.016139174096522563, + "acc_norm": 0.2848020434227331, + "acc_norm_stderr": 0.016139174096522563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501116, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501116 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.03106939026078942, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.03106939026078942 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26688102893890675, + "acc_stderr": 0.025122637608816653, + "acc_norm": 0.26688102893890675, + "acc_norm_stderr": 0.025122637608816653 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.029442495585857473, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.029442495585857473 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.1984732824427481, + "acc_stderr": 0.034981493854624686, + "acc_norm": 0.1984732824427481, + "acc_norm_stderr": 0.034981493854624686 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438014, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438014 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149353, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149353 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24516129032258063, + "acc_stderr": 0.024472243840895518, + "acc_norm": 0.24516129032258063, + "acc_norm_stderr": 0.024472243840895518 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.029480360549541198, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.029480360549541198 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.026480357179895688, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.026480357179895688 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.033742355504256936, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.033742355504256936 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655805, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655805 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.15, + "acc_stderr": 0.03588702812826371, + "acc_norm": 0.15, + "acc_norm_stderr": 0.03588702812826371 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.02402774515526501, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.02402774515526501 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924034, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924034 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.025702640260603767, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.025702640260603767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.018272575810231857, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.018272575810231857 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.033954900208561116, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.033954900208561116 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.025160998214292456, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.025160998214292456 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403165, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403165 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.018373116915903966, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.018373116915903966 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880582, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880582 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.028765111718046937, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.028765111718046937 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16911764705882354, + "acc_stderr": 0.022770868010113014, + "acc_norm": 0.16911764705882354, + "acc_norm_stderr": 0.022770868010113014 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.02783302387139968, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.02783302387139968 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24967405475880053, + "acc_stderr": 0.011054538377832318, + "acc_norm": 0.24967405475880053, + "acc_norm_stderr": 0.011054538377832318 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.03149328104507957, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.03149328104507957 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268047, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268047 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.38771109052404834, + "mc2_stderr": 0.014784638195990142 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31641086186540734, + "acc_stderr": 0.015989617951065477, + "acc_norm": 0.3990554899645809, + "acc_norm_stderr": 0.016836377292849296 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Trofish/KULLM-RLHF", + "model_sha": "ba40edd22e913ab8170e3c78035d8d9057d31fba", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter3/result_2024-08-07 03:48:23.json b/UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter3/result_2024-08-07 03:48:23.json new file mode 100644 index 0000000000000000000000000000000000000000..2201108cae1f04f3028a210d165f856138529cad --- /dev/null +++ b/UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter3/result_2024-08-07 03:48:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3890784982935154, + "acc_stderr": 0.014247309976045607, + "acc_norm": 0.4590443686006826, + "acc_norm_stderr": 0.014562291073601224 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3685520812587134, + "acc_stderr": 0.004814261966376847, + "acc_norm": 0.48317068313085043, + "acc_norm_stderr": 0.004986954139737526 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.454661558109834, + "acc_stderr": 0.017806304585052606, + "acc_norm": 0.454661558109834, + "acc_norm_stderr": 0.017806304585052606 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.032662042990646775, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.032662042990646775 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5594855305466238, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.5594855305466238, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5793103448275863, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.5793103448275863, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.047240073523838876, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.047240073523838876 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.032422250271150053, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.032422250271150053 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5051282051282051, + "acc_stderr": 0.025349672906838643, + "acc_norm": 0.5051282051282051, + "acc_norm_stderr": 0.025349672906838643 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5290322580645161, + "acc_stderr": 0.028396016402760998, + "acc_norm": 0.5290322580645161, + "acc_norm_stderr": 0.028396016402760998 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.02812096650391439, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.02812096650391439 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.029318203645206865, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.029318203645206865 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.02501074911613759, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.02501074911613759 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303118, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303118 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138936, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138936 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.04598188057816542, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.04598188057816542 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5724770642201835, + "acc_stderr": 0.021210910204300434, + "acc_norm": 0.5724770642201835, + "acc_norm_stderr": 0.021210910204300434 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.04060127035236397, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.04060127035236397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.020054269200726452, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.020054269200726452 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.49107142857142855, + "acc_stderr": 0.04745033255489123, + "acc_norm": 0.49107142857142855, + "acc_norm_stderr": 0.04745033255489123 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3016759776536313, + "acc_stderr": 0.015350767572220286, + "acc_norm": 0.3016759776536313, + "acc_norm_stderr": 0.015350767572220286 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6163265306122448, + "acc_stderr": 0.031130880396235946, + "acc_norm": 0.6163265306122448, + "acc_norm_stderr": 0.031130880396235946 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.378748370273794, + "acc_stderr": 0.012389052105003736, + "acc_norm": 0.378748370273794, + "acc_norm_stderr": 0.012389052105003736 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.03495624522015476, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.03495624522015476 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3402692778457772, + "mc1_stderr": 0.016586304901762564, + "mc2": 0.5229112145964492, + "mc2_stderr": 0.015931720408095762 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49586776859504134, + "acc_stderr": 0.01718976703213082, + "acc_norm": 0.5324675324675324, + "acc_norm_stderr": 0.017154073716682865 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "UCLA-AGI/Llama-3-Instruct-8B-SPPO-Iter3", + "model_sha": "48c29bf2d9d68113255df9a47a9dabff6c67a13f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/URP/urllm-ko-7b/result_2024-03-25 05:28:05.json b/URP/urllm-ko-7b/result_2024-03-25 05:28:05.json new file mode 100644 index 0000000000000000000000000000000000000000..e5792564fae8480af6874a677abbce4a5b27cef4 --- /dev/null +++ b/URP/urllm-ko-7b/result_2024-03-25 05:28:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2764505119453925, + "acc_stderr": 0.013069662474252428, + "acc_norm": 0.3242320819112628, + "acc_norm_stderr": 0.013678810399518824 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3460466042620992, + "acc_stderr": 0.00474736050074247, + "acc_norm": 0.4311890061740689, + "acc_norm_stderr": 0.0049423027680020985 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646035, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646035 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2515964240102171, + "acc_stderr": 0.015517322365529624, + "acc_norm": 0.2515964240102171, + "acc_norm_stderr": 0.015517322365529624 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066653, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066653 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683229, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683229 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24437299035369775, + "acc_stderr": 0.024406162094668914, + "acc_norm": 0.24437299035369775, + "acc_norm_stderr": 0.024406162094668914 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533084, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533084 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2773109243697479, + "acc_stderr": 0.029079374539480007, + "acc_norm": 0.2773109243697479, + "acc_norm_stderr": 0.029079374539480007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.02037766097037138, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.02037766097037138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358608, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358608 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2903225806451613, + "acc_stderr": 0.02582210611941591, + "acc_norm": 0.2903225806451613, + "acc_norm_stderr": 0.02582210611941591 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.029872577708891176, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.029872577708891176 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.026341480371118355, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.026341480371118355 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959323, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959323 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014628, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014628 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.031265112061730424, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.031265112061730424 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906865, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906865 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0230836585869842, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0230836585869842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.025171041915309684, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.025171041915309684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19170984455958548, + "acc_stderr": 0.02840895362624528, + "acc_norm": 0.19170984455958548, + "acc_norm_stderr": 0.02840895362624528 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26605504587155965, + "acc_stderr": 0.018946022322225597, + "acc_norm": 0.26605504587155965, + "acc_norm_stderr": 0.018946022322225597 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.025829163272757475, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.025829163272757475 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3140495867768595, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.3140495867768595, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2369281045751634, + "acc_stderr": 0.01720166216978979, + "acc_norm": 0.2369281045751634, + "acc_norm_stderr": 0.01720166216978979 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03372343271653063, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03372343271653063 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26256983240223464, + "acc_stderr": 0.014716824273017763, + "acc_norm": 0.26256983240223464, + "acc_norm_stderr": 0.014716824273017763 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.025409301953225678, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.025409301953225678 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25316455696202533, + "acc_stderr": 0.02830465794303529, + "acc_norm": 0.25316455696202533, + "acc_norm_stderr": 0.02830465794303529 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24185136897001303, + "acc_stderr": 0.010936550813827061, + "acc_norm": 0.24185136897001303, + "acc_norm_stderr": 0.010936550813827061 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.029554292605695066, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.029554292605695066 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2, + "acc_stderr": 0.03123475237772119, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03123475237772119 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.01570210709062789, + "mc2": 0.4442891995662655, + "mc2_stderr": 0.015329265090066716 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27036599763872493, + "acc_stderr": 0.015270152942068405, + "acc_norm": 0.3010625737898465, + "acc_norm_stderr": 0.015771113299945457 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "URP/urllm-ko-7b", + "model_sha": "82dcd7835c354ca5c62f569cae99ede749964738", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/URP/urllm-ko_en-2.7b/result_2024-04-11 04:09:22.json b/URP/urllm-ko_en-2.7b/result_2024-04-11 04:09:22.json new file mode 100644 index 0000000000000000000000000000000000000000..33b1318ab67f41716b64edb849da5852ec09c287 --- /dev/null +++ b/URP/urllm-ko_en-2.7b/result_2024-04-11 04:09:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.24744027303754265, + "acc_stderr": 0.012610352663292673, + "acc_norm": 0.2883959044368601, + "acc_norm_stderr": 0.01323839442242818 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3272256522605059, + "acc_stderr": 0.004682414968323638, + "acc_norm": 0.3935471021708823, + "acc_norm_stderr": 0.004875379352079817 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.1695906432748538, + "acc_stderr": 0.028782108105401712, + "acc_norm": 0.1695906432748538, + "acc_norm_stderr": 0.028782108105401712 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20561941251596424, + "acc_stderr": 0.014452500456785823, + "acc_norm": 0.20561941251596424, + "acc_norm_stderr": 0.014452500456785823 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174023, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174023 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.02802022627120022, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.02802022627120022 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.23493975903614459, + "acc_stderr": 0.03300533186128922, + "acc_norm": 0.23493975903614459, + "acc_norm_stderr": 0.03300533186128922 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2347266881028939, + "acc_stderr": 0.024071805887677048, + "acc_norm": 0.2347266881028939, + "acc_norm_stderr": 0.024071805887677048 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.11210762331838565, + "acc_stderr": 0.021174894206346103, + "acc_norm": 0.11210762331838565, + "acc_norm_stderr": 0.021174894206346103 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.20610687022900764, + "acc_stderr": 0.035477710041594626, + "acc_norm": 0.20610687022900764, + "acc_norm_stderr": 0.035477710041594626 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35858585858585856, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.35858585858585856, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.031041941304059285, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.031041941304059285 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35384615384615387, + "acc_stderr": 0.02424378399406217, + "acc_norm": 0.35384615384615387, + "acc_norm_stderr": 0.02424378399406217 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678241, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678241 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072773, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072773 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736413, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736413 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.0358687928008034, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.0358687928008034 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.20520231213872833, + "acc_stderr": 0.021742519835276298, + "acc_norm": 0.20520231213872833, + "acc_norm_stderr": 0.021742519835276298 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.03351953879521271, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.03351953879521271 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22839506172839505, + "acc_stderr": 0.023358211840626267, + "acc_norm": 0.22839506172839505, + "acc_norm_stderr": 0.023358211840626267 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518752, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518752 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3339449541284404, + "acc_stderr": 0.020220554196736407, + "acc_norm": 0.3339449541284404, + "acc_norm_stderr": 0.020220554196736407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.026090162504279053, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.026090162504279053 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.14049586776859505, + "acc_stderr": 0.031722334260021585, + "acc_norm": 0.14049586776859505, + "acc_norm_stderr": 0.031722334260021585 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351585, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351585 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.15178571428571427, + "acc_stderr": 0.03405702838185695, + "acc_norm": 0.15178571428571427, + "acc_norm_stderr": 0.03405702838185695 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20675105485232068, + "acc_stderr": 0.026361651668389087, + "acc_norm": 0.20675105485232068, + "acc_norm_stderr": 0.026361651668389087 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.01517698502770769, + "mc2": 0.40626146058944196, + "mc2_stderr": 0.015066657736319539 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2597402597402597, + "acc_stderr": 0.015075666411230296, + "acc_norm": 0.2939787485242031, + "acc_norm_stderr": 0.0156632425690911 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "URP/urllm-ko_en-2.7b", + "model_sha": "6d39a01313401945c346460286f11cd758d1a820", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Unbabel/TowerBase-7B-v0.1/result_2024-01-20 01:40:29.json b/Unbabel/TowerBase-7B-v0.1/result_2024-01-20 01:40:29.json new file mode 100644 index 0000000000000000000000000000000000000000..2e3f6be257bb4abf4913472afca0a37eeba80041 --- /dev/null +++ b/Unbabel/TowerBase-7B-v0.1/result_2024-01-20 01:40:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30119453924914674, + "acc_stderr": 0.01340674176784762, + "acc_norm": 0.3609215017064846, + "acc_norm_stderr": 0.014034761386175452 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3767177853017327, + "acc_stderr": 0.0048357289037314006, + "acc_norm": 0.4940250946026688, + "acc_norm_stderr": 0.004989425133377907 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3300970873786408, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.3300970873786408, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37547892720306514, + "acc_stderr": 0.017316613197182786, + "acc_norm": 0.37547892720306514, + "acc_norm_stderr": 0.017316613197182786 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357794, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357794 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.036293353299478616, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.036293353299478616 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36977491961414793, + "acc_stderr": 0.02741799670563099, + "acc_norm": 0.36977491961414793, + "acc_norm_stderr": 0.02741799670563099 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.041423137719966634, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.041423137719966634 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.0348890161685273, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.0348890161685273 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.35172413793103446, + "acc_stderr": 0.03979236637497412, + "acc_norm": 0.35172413793103446, + "acc_norm_stderr": 0.03979236637497412 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36134453781512604, + "acc_stderr": 0.03120469122515001, + "acc_norm": 0.36134453781512604, + "acc_norm_stderr": 0.03120469122515001 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31794871794871793, + "acc_stderr": 0.023610884308927865, + "acc_norm": 0.31794871794871793, + "acc_norm_stderr": 0.023610884308927865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.35161290322580646, + "acc_stderr": 0.027162537826948458, + "acc_norm": 0.35161290322580646, + "acc_norm_stderr": 0.027162537826948458 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5170940170940171, + "acc_stderr": 0.032736940493481824, + "acc_norm": 0.5170940170940171, + "acc_norm_stderr": 0.032736940493481824 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443867, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443867 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731571, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731571 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230172, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230172 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.035351400842767194, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.035351400842767194 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.03456425745086999, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.03456425745086999 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.02552247463212161, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.02552247463212161 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292404, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292404 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.027002521034516468, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.027002521034516468 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.31088082901554404, + "acc_stderr": 0.033403619062765864, + "acc_norm": 0.31088082901554404, + "acc_norm_stderr": 0.033403619062765864 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3119266055045872, + "acc_stderr": 0.019862967976707245, + "acc_norm": 0.3119266055045872, + "acc_norm_stderr": 0.019862967976707245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.028180596328259293, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.028180596328259293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.038035102483515854, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.038035102483515854 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.018635594034423976, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.018635594034423976 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340461004, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340461004 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.01444415780826145, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.01444415780826145 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.02981263070156974, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.02981263070156974 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.32489451476793246, + "acc_stderr": 0.030486039389105293, + "acc_norm": 0.32489451476793246, + "acc_norm_stderr": 0.030486039389105293 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27509778357235987, + "acc_stderr": 0.011405443620996946, + "acc_norm": 0.27509778357235987, + "acc_norm_stderr": 0.011405443620996946 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.03096451792692341, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.03096451792692341 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.0364620496325381, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.0364620496325381 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.39990188380339514, + "mc2_stderr": 0.014783483233378449 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.269185360094451, + "acc_stderr": 0.015249098024144528, + "acc_norm": 0.38134592680047225, + "acc_norm_stderr": 0.016699301768828084 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Unbabel/TowerBase-7B-v0.1", + "model_sha": "7512cb2c27e3b7f0b92c9271c2a845a1365048c8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Unbabel/TowerInstruct-7B-v0.1/result_2024-01-20 01:40:23.json b/Unbabel/TowerInstruct-7B-v0.1/result_2024-01-20 01:40:23.json new file mode 100644 index 0000000000000000000000000000000000000000..615cfa9e15cf0a49c7add79c74c98df93e21f831 --- /dev/null +++ b/Unbabel/TowerInstruct-7B-v0.1/result_2024-01-20 01:40:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.318259385665529, + "acc_stderr": 0.013611993916971451, + "acc_norm": 0.3796928327645051, + "acc_norm_stderr": 0.014182119866974872 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3843855805616411, + "acc_stderr": 0.004854555294017559, + "acc_norm": 0.5038836885082653, + "acc_norm_stderr": 0.004989630887066195 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041693, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041693 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.43167305236270753, + "acc_stderr": 0.01771222893929979, + "acc_norm": 0.43167305236270753, + "acc_norm_stderr": 0.01771222893929979 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596241, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596241 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36977491961414793, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.36977491961414793, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.032928028193303135, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.032928028193303135 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.042607351576445594, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.042607351576445594 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.034889016168527305, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.034889016168527305 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3487179487179487, + "acc_stderr": 0.02416278028401772, + "acc_norm": 0.3487179487179487, + "acc_norm_stderr": 0.02416278028401772 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678241, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678241 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042774, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042774 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5683760683760684, + "acc_stderr": 0.0324483553531149, + "acc_norm": 0.5683760683760684, + "acc_norm_stderr": 0.0324483553531149 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3320754716981132, + "acc_stderr": 0.028985455652334388, + "acc_norm": 0.3320754716981132, + "acc_norm_stderr": 0.028985455652334388 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.03336767086567977, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.03336767086567977 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.035034909236732824, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.035034909236732824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0356760379963917, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0356760379963917 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577656, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577656 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36127167630057805, + "acc_stderr": 0.02586220185227788, + "acc_norm": 0.36127167630057805, + "acc_norm_stderr": 0.02586220185227788 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.32515337423312884, + "acc_stderr": 0.0368035037128646, + "acc_norm": 0.32515337423312884, + "acc_norm_stderr": 0.0368035037128646 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38271604938271603, + "acc_stderr": 0.027044538138402602, + "acc_norm": 0.38271604938271603, + "acc_norm_stderr": 0.027044538138402602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860807, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3688073394495413, + "acc_stderr": 0.020686227560729548, + "acc_norm": 0.3688073394495413, + "acc_norm_stderr": 0.020686227560729548 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.028304576673141107, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.028304576673141107 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.49586776859504134, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119669, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119669 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.018850084696468712, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.018850084696468712 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.02699219917306436, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.02699219917306436 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510916, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510916 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26256983240223464, + "acc_stderr": 0.014716824273017761, + "acc_norm": 0.26256983240223464, + "acc_norm_stderr": 0.014716824273017761 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933102, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.030932858792789848, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.030932858792789848 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4936708860759494, + "acc_stderr": 0.032544620107678585, + "acc_norm": 0.4936708860759494, + "acc_norm_stderr": 0.032544620107678585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733096, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733096 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.4610240143546929, + "mc2_stderr": 0.015462097345287968 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3069657615112161, + "acc_stderr": 0.015857588095362814, + "acc_norm": 0.3825265643447462, + "acc_norm_stderr": 0.01670916538722881 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Unbabel/TowerInstruct-7B-v0.1", + "model_sha": "3ce0db8d1e9a39ec8999e57798f28c41218dc077", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/VAGOsolutions/Llama-3-SauerkrautLM-8b-Instruct/result_2024-05-17 15:11:37.json b/VAGOsolutions/Llama-3-SauerkrautLM-8b-Instruct/result_2024-05-17 15:11:37.json new file mode 100644 index 0000000000000000000000000000000000000000..cca366176757a12f296cd909c535376a516a8da7 --- /dev/null +++ b/VAGOsolutions/Llama-3-SauerkrautLM-8b-Instruct/result_2024-05-17 15:11:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44112627986348124, + "acc_stderr": 0.014509747749064663, + "acc_norm": 0.5051194539249146, + "acc_norm_stderr": 0.014610624890309154 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3890659231228839, + "acc_stderr": 0.004865419468213888, + "acc_norm": 0.5151364270065724, + "acc_norm_stderr": 0.004987494455523721 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4559386973180077, + "acc_stderr": 0.017810403925435366, + "acc_norm": 0.4559386973180077, + "acc_norm_stderr": 0.017810403925435366 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742399, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033582, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033582 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5819935691318328, + "acc_stderr": 0.028013651891995076, + "acc_norm": 0.5819935691318328, + "acc_norm_stderr": 0.028013651891995076 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.035607165165310595, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.035607165165310595 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5862068965517241, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.5862068965517241, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.048580835742663454, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.048580835742663454 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5756302521008403, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.5756302521008403, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5205128205128206, + "acc_stderr": 0.02532966316348994, + "acc_norm": 0.5205128205128206, + "acc_norm_stderr": 0.02532966316348994 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5580645161290323, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.5580645161290323, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.029773847012532967, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.029773847012532967 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.032658195885126966, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.032658195885126966 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273959, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273959 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4021164021164021, + "acc_stderr": 0.025253032554997692, + "acc_norm": 0.4021164021164021, + "acc_norm_stderr": 0.025253032554997692 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.73, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6128440366972477, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.6128440366972477, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.565359477124183, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.565359477124183, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.02016552331390791, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.02016552331390791 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2860335195530726, + "acc_stderr": 0.015113972129062136, + "acc_norm": 0.2860335195530726, + "acc_norm_stderr": 0.015113972129062136 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312547, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312547 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03068582059661082, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03068582059661082 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38265971316818775, + "acc_stderr": 0.012413595882893268, + "acc_norm": 0.38265971316818775, + "acc_norm_stderr": 0.012413595882893268 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.03465868196380762, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.03465868196380762 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.38310893512851896, + "mc1_stderr": 0.017018461679389862, + "mc2": 0.5668280924456803, + "mc2_stderr": 0.016082279449567388 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5324675324675324, + "acc_stderr": 0.017154073716682865, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.017090852631668332 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "VAGOsolutions/Llama-3-SauerkrautLM-8b-Instruct", + "model_sha": "37127c44d7c0fb56cef817270c4b1a6802d8793a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/VAGOsolutions/SauerkrautLM-Gemma-7b/result_2024-05-17 21:58:02.json b/VAGOsolutions/SauerkrautLM-Gemma-7b/result_2024-05-17 21:58:02.json new file mode 100644 index 0000000000000000000000000000000000000000..484a3962774dd2ef7a816c7b2f2d754c502ed847 --- /dev/null +++ b/VAGOsolutions/SauerkrautLM-Gemma-7b/result_2024-05-17 21:58:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19795221843003413, + "acc_stderr": 0.011643990971573405, + "acc_norm": 0.27047781569965873, + "acc_norm_stderr": 0.012980954547659556 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2577175861382195, + "acc_stderr": 0.004364838000335624, + "acc_norm": 0.25582553276239794, + "acc_norm_stderr": 0.004354325017137538 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28991060025542786, + "acc_stderr": 0.01622501794477096, + "acc_norm": 0.28991060025542786, + "acc_norm_stderr": 0.01622501794477096 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.03712537833614865, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.03712537833614865 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.029379170464124815, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.029379170464124815 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.0357160923005348, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.0357160923005348 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3054662379421222, + "acc_stderr": 0.026160584450140485, + "acc_norm": 0.3054662379421222, + "acc_norm_stderr": 0.026160584450140485 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.03219079200419997, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.03219079200419997 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.029857515673386414, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.029857515673386414 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149351, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149351 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23949579831932774, + "acc_stderr": 0.027722065493361273, + "acc_norm": 0.23949579831932774, + "acc_norm_stderr": 0.027722065493361273 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.021020672680827912, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.021020672680827912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678243, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678243 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.024892469172462833, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.024892469172462833 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.02646611753895992, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.02646611753895992 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.030631145539198823, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.030631145539198823 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.02344582627654554, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.02344582627654554 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2716049382716049, + "acc_stderr": 0.024748624490537368, + "acc_norm": 0.2716049382716049, + "acc_norm_stderr": 0.024748624490537368 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.02925282329180363, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.02925282329180363 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.02355083135199509, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.02355083135199509 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.20394736842105263, + "acc_stderr": 0.0327900040631005, + "acc_norm": 0.20394736842105263, + "acc_norm_stderr": 0.0327900040631005 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2434640522875817, + "acc_stderr": 0.017362473762146623, + "acc_norm": 0.2434640522875817, + "acc_norm_stderr": 0.017362473762146623 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.17592592592592593, + "acc_stderr": 0.02596742095825853, + "acc_norm": 0.17592592592592593, + "acc_norm_stderr": 0.02596742095825853 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.0358870281282637, + "acc_norm": 0.15, + "acc_norm_stderr": 0.0358870281282637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21323529411764705, + "acc_stderr": 0.024880971512294264, + "acc_norm": 0.21323529411764705, + "acc_norm_stderr": 0.024880971512294264 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2163265306122449, + "acc_stderr": 0.026358916334904028, + "acc_norm": 0.2163265306122449, + "acc_norm_stderr": 0.026358916334904028 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23598435462842243, + "acc_stderr": 0.010844802669662682, + "acc_norm": 0.23598435462842243, + "acc_norm_stderr": 0.010844802669662682 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501954, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501954 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715014, + "mc2": 0.4848104610131041, + "mc2_stderr": 0.017159975761829056 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08382526564344746, + "acc_stderr": 0.0095277739135922, + "acc_norm": 0.2597402597402597, + "acc_norm_stderr": 0.015075666411230324 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "VAGOsolutions/SauerkrautLM-Gemma-7b", + "model_sha": "4296bdabf82e900235b094e5348be03ebb0ec891", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/VIRNECT/llama-3-Korean-8B-V2/result_2024-07-18 05:53:14.json b/VIRNECT/llama-3-Korean-8B-V2/result_2024-07-18 05:53:14.json new file mode 100644 index 0000000000000000000000000000000000000000..2fb0ee039ac12477f6f360075244199039ad552b --- /dev/null +++ b/VIRNECT/llama-3-Korean-8B-V2/result_2024-07-18 05:53:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36177474402730375, + "acc_stderr": 0.014041957945038078, + "acc_norm": 0.40784982935153585, + "acc_norm_stderr": 0.014361097288449696 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37641904003186616, + "acc_stderr": 0.004834969412883639, + "acc_norm": 0.481876120294762, + "acc_norm_stderr": 0.004986502296931184 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.038200425866029654, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.038200425866029654 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4661558109833972, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.4661558109833972, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.039992628766177214, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.039992628766177214 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5434083601286174, + "acc_stderr": 0.028290869054197604, + "acc_norm": 0.5434083601286174, + "acc_norm_stderr": 0.028290869054197604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.043482080516448585, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.043482080516448585 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828064, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828064 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.047240073523838876, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.047240073523838876 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5630252100840336, + "acc_stderr": 0.03221943636566197, + "acc_norm": 0.5630252100840336, + "acc_norm_stderr": 0.03221943636566197 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5205128205128206, + "acc_stderr": 0.02532966316348994, + "acc_norm": 0.5205128205128206, + "acc_norm_stderr": 0.02532966316348994 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5290322580645161, + "acc_stderr": 0.028396016402761, + "acc_norm": 0.5290322580645161, + "acc_norm_stderr": 0.028396016402761 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431183, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431183 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948496, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159795, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159795 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.026788811931562757, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.026788811931562757 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5504587155963303, + "acc_stderr": 0.021327881417823363, + "acc_norm": 0.5504587155963303, + "acc_norm_stderr": 0.021327881417823363 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5620915032679739, + "acc_stderr": 0.028408302020332694, + "acc_norm": 0.5620915032679739, + "acc_norm_stderr": 0.028408302020332694 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.04065771002562603, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.04065771002562603 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.01978046595477752, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.01978046595477752 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160834, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160834 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261452, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261452 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6, + "acc_stderr": 0.03136250240935894, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03136250240935894 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3650586701434159, + "acc_stderr": 0.012296373743443478, + "acc_norm": 0.3650586701434159, + "acc_norm_stderr": 0.012296373743443478 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.035086373586305716, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.035086373586305716 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398393, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398393 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.4680116781390204, + "mc2_stderr": 0.01586332523244538 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4769775678866588, + "acc_stderr": 0.017172121546727637, + "acc_norm": 0.5076741440377804, + "acc_norm_stderr": 0.01718832921965427 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "VIRNECT/llama-3-Korean-8B-V2", + "model_sha": "81925917442b3be60e5be3f11eeccf2c2a1a4950", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/VIRNECT/llama-3-Korean-8B-V3/result_2024-07-26 01:31:25.json b/VIRNECT/llama-3-Korean-8B-V3/result_2024-07-26 01:31:25.json new file mode 100644 index 0000000000000000000000000000000000000000..2a9d65912be84e43d74759ec8ec3c17cb8dad311 --- /dev/null +++ b/VIRNECT/llama-3-Korean-8B-V3/result_2024-07-26 01:31:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3165529010238908, + "acc_stderr": 0.01359243151906808, + "acc_norm": 0.36945392491467577, + "acc_norm_stderr": 0.014104578366491906 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3609838677554272, + "acc_stderr": 0.004793042992396041, + "acc_norm": 0.4591714797849034, + "acc_norm_stderr": 0.004973117975062485 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.42656449553001274, + "acc_stderr": 0.017686066975675634, + "acc_norm": 0.42656449553001274, + "acc_norm_stderr": 0.017686066975675634 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328195, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328195 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062946, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062946 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49743589743589745, + "acc_stderr": 0.025350672979412205, + "acc_norm": 0.49743589743589745, + "acc_norm_stderr": 0.025350672979412205 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.03057281131029961, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.03057281131029961 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3962962962962963, + "acc_stderr": 0.029822619458533997, + "acc_norm": 0.3962962962962963, + "acc_norm_stderr": 0.029822619458533997 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.03461199429040013, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.03461199429040013 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.026803720583206184, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.026803720583206184 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.036080032255696545, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.036080032255696545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070435, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070435 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.544954128440367, + "acc_stderr": 0.021350503090925167, + "acc_norm": 0.544954128440367, + "acc_norm_stderr": 0.021350503090925167 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490435, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490435 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.018975427920507215, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.018975427920507215 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590954, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590954 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.0338517797604481, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.0338517797604481 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364545, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364545 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131117, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131117 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3494132985658409, + "acc_stderr": 0.012177306252786695, + "acc_norm": 0.3494132985658409, + "acc_norm_stderr": 0.012177306252786695 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03484941514429231, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03484941514429231 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148128, + "mc2": 0.4023716720709486, + "mc2_stderr": 0.015378171733120649 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42266824085005905, + "acc_stderr": 0.016983506079577604, + "acc_norm": 0.4899645808736718, + "acc_norm_stderr": 0.01718689128689405 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "VIRNECT/llama-3-Korean-8B-V3", + "model_sha": "2161672d50fbd2fad5b434f2fd0f148c439058c4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/VIRNECT/llama-3-Korean-8B-r-v1/result_2024-07-23 02:04:24.json b/VIRNECT/llama-3-Korean-8B-r-v1/result_2024-07-23 02:04:24.json new file mode 100644 index 0000000000000000000000000000000000000000..7683c5eecdfcbe14ea3607fb0027d99f00bdccba --- /dev/null +++ b/VIRNECT/llama-3-Korean-8B-r-v1/result_2024-07-23 02:04:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3054607508532423, + "acc_stderr": 0.013460080478002503, + "acc_norm": 0.35580204778157, + "acc_norm_stderr": 0.013990571137918758 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35391356303525195, + "acc_stderr": 0.0047720549044044285, + "acc_norm": 0.43616809400517825, + "acc_norm_stderr": 0.004948952519517524 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.017769250583533253, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.017769250583533253 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.048580835742663434, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.048580835742663434 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933907, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933907 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.03098029699261856, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.03098029699261856 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.02938162072646507, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.02938162072646507 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562417, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554859, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554859 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4012345679012346, + "acc_stderr": 0.0272725828498398, + "acc_norm": 0.4012345679012346, + "acc_norm_stderr": 0.0272725828498398 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5192660550458715, + "acc_stderr": 0.02142140298254889, + "acc_norm": 0.5192660550458715, + "acc_norm_stderr": 0.02142140298254889 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.019023726160724556, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.019023726160724556 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.03381200005643526, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.03381200005643526 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776125, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776125 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.031717528240626645, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.031717528240626645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3200782268578879, + "acc_stderr": 0.011914791947638519, + "acc_norm": 0.3200782268578879, + "acc_norm_stderr": 0.011914791947638519 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.01565960575532691, + "mc2": 0.4590916794721521, + "mc2_stderr": 0.01575464702305187 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42857142857142855, + "acc_stderr": 0.017014038119297484, + "acc_norm": 0.5029515938606848, + "acc_norm_stderr": 0.017190054580194694 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "VIRNECT/llama-3-Korean-8B-r-v1", + "model_sha": "9211f687aa8bcc0dadbf20e0266923c7c9f0bf8c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/VIRNECT/llama-3-Korean-8B-r-v2/result_2024-07-24 01:02:49.json b/VIRNECT/llama-3-Korean-8B-r-v2/result_2024-07-24 01:02:49.json new file mode 100644 index 0000000000000000000000000000000000000000..900b9ef79db5c1c1c4d58afb64ac284847f6d49a --- /dev/null +++ b/VIRNECT/llama-3-Korean-8B-r-v2/result_2024-07-24 01:02:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30887372013651876, + "acc_stderr": 0.013501770929344003, + "acc_norm": 0.35921501706484643, + "acc_norm_stderr": 0.014020224155839154 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3525194184425413, + "acc_stderr": 0.0047677822560409965, + "acc_norm": 0.4372634933280223, + "acc_norm_stderr": 0.0049503473337018334 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4393358876117497, + "acc_stderr": 0.017747874245683613, + "acc_norm": 0.4393358876117497, + "acc_norm_stderr": 0.017747874245683613 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501117, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501117 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.031565646822367836, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.031565646822367836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956279, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956279 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4797979797979798, + "acc_stderr": 0.03559443565563918, + "acc_norm": 0.4797979797979798, + "acc_norm_stderr": 0.03559443565563918 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.04878608714466996, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.04878608714466996 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933903, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933903 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4064516129032258, + "acc_stderr": 0.027941727346256315, + "acc_norm": 0.4064516129032258, + "acc_norm_stderr": 0.027941727346256315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.03058805297427066, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.03058805297427066 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.04653429807913509, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.04653429807913509 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.02956070739246571, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.02956070739246571 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.035080801121998406, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.035080801121998406 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562413, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562413 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.03889066619112722, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.03889066619112722 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4104938271604938, + "acc_stderr": 0.027371350925124768, + "acc_norm": 0.4104938271604938, + "acc_norm_stderr": 0.027371350925124768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5229357798165137, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.5229357798165137, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377562, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377562 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.01918463932809249, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.01918463932809249 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.033622774366080424, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.033622774366080424 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32529335071707954, + "acc_stderr": 0.011965311536571528, + "acc_norm": 0.32529335071707954, + "acc_norm_stderr": 0.011965311536571528 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394812, + "mc2": 0.47082864815026243, + "mc2_stderr": 0.015846077304211958 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45454545454545453, + "acc_stderr": 0.017119172208061504, + "acc_norm": 0.4982290436835891, + "acc_norm_stderr": 0.017190246276231863 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "VIRNECT/llama-3-Korean-8B-r-v2", + "model_sha": "1dd735f4fca9b057696a21ccca5d449d3b85fec6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Walmart-the-bag/Yi-6B-Infinity-Chat/result_2023-12-24 01:33:21.json b/Walmart-the-bag/Yi-6B-Infinity-Chat/result_2023-12-24 01:33:21.json new file mode 100644 index 0000000000000000000000000000000000000000..4bd17b2132abdee055d9283d3d3c251115e3baac --- /dev/null +++ b/Walmart-the-bag/Yi-6B-Infinity-Chat/result_2023-12-24 01:33:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.22696245733788395, + "acc_stderr": 0.012240491536132873, + "acc_norm": 0.26535836177474403, + "acc_norm_stderr": 0.012902554762313967 + }, + "harness|ko_hellaswag|10": { + "acc": 0.30521808404700257, + "acc_stderr": 0.00459558602758378, + "acc_norm": 0.35610436168094006, + "acc_norm_stderr": 0.004778679507786504 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.037439798259264016, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.037439798259264016 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.04950504382128919, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.04950504382128919 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3524904214559387, + "acc_stderr": 0.017084150244081373, + "acc_norm": 0.3524904214559387, + "acc_norm_stderr": 0.017084150244081373 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.02924188386962881, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.02924188386962881 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40514469453376206, + "acc_stderr": 0.02788238379132595, + "acc_norm": 0.40514469453376206, + "acc_norm_stderr": 0.02788238379132595 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.029442495585857473, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.029442495585857473 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.035029757994130065, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.035029757994130065 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.04161808503501528, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.04161808503501528 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33589743589743587, + "acc_stderr": 0.02394672474156397, + "acc_norm": 0.33589743589743587, + "acc_norm_stderr": 0.02394672474156397 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3709677419354839, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.3709677419354839, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651047, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651047 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.04653429807913508, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.04653429807913508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176085, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176085 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48258706467661694, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.48258706467661694, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887249, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887249 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332783, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3765432098765432, + "acc_stderr": 0.02695934451874778, + "acc_norm": 0.3765432098765432, + "acc_norm_stderr": 0.02695934451874778 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.03561587327685885, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.03561587327685885 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.038924311065187525, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.038924311065187525 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3743119266055046, + "acc_stderr": 0.020748959408988327, + "acc_norm": 0.3743119266055046, + "acc_norm_stderr": 0.020748959408988327 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.028304576673141114, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.028304576673141114 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483205, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.019117213911495155, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.019117213911495155 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340455, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.01467625200931947, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.01467625200931947 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.33755274261603374, + "acc_stderr": 0.03078154910202622, + "acc_norm": 0.33755274261603374, + "acc_norm_stderr": 0.03078154910202622 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31486310299869624, + "acc_stderr": 0.011862561755715924, + "acc_norm": 0.31486310299869624, + "acc_norm_stderr": 0.011862561755715924 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.03426712349247272, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.03426712349247272 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.038049136539710114, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.038049136539710114 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30966952264381886, + "mc1_stderr": 0.016185744355144905, + "mc2": 0.4818389603284346, + "mc2_stderr": 0.016049632203664795 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3600944510035419, + "acc_stderr": 0.016503686720440076, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.016884749503191385 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Walmart-the-bag/Yi-6B-Infinity-Chat", + "model_sha": "7a441a69e1ebd192fbf52b904589130c3875aacc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/We-Want-GPU/SOLAR-10.7B-orca-alpaca-gpt4-lora-653/result_2023-12-28 18:49:53.json b/We-Want-GPU/SOLAR-10.7B-orca-alpaca-gpt4-lora-653/result_2023-12-28 18:49:53.json new file mode 100644 index 0000000000000000000000000000000000000000..59e35428ca8f85d6ce6a261f40a887b2048ee555 --- /dev/null +++ b/We-Want-GPU/SOLAR-10.7B-orca-alpaca-gpt4-lora-653/result_2023-12-28 18:49:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44283276450511944, + "acc_stderr": 0.014515573873348899, + "acc_norm": 0.4803754266211604, + "acc_norm_stderr": 0.014600132075947087 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44742083250348536, + "acc_stderr": 0.004962115526014299, + "acc_norm": 0.6081457876916949, + "acc_norm_stderr": 0.004871667371060538 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.03722965741385539, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.03722965741385539 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6628352490421456, + "acc_stderr": 0.016905207420803547, + "acc_norm": 0.6628352490421456, + "acc_norm_stderr": 0.016905207420803547 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4978723404255319, + "acc_stderr": 0.032685726586674915, + "acc_norm": 0.4978723404255319, + "acc_norm_stderr": 0.032685726586674915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333045, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333045 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5884244372990354, + "acc_stderr": 0.027950481494401262, + "acc_norm": 0.5884244372990354, + "acc_norm_stderr": 0.027950481494401262 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.03191178226713546, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.03191178226713546 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.048108401480826346, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.048108401480826346 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5666666666666667, + "acc_stderr": 0.0251246535258851, + "acc_norm": 0.5666666666666667, + "acc_norm_stderr": 0.0251246535258851 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6258064516129033, + "acc_stderr": 0.027528904299845693, + "acc_norm": 0.6258064516129033, + "acc_norm_stderr": 0.027528904299845693 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.02931820364520686, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.02931820364520686 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681681, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681681 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6865671641791045, + "acc_stderr": 0.032801882053486414, + "acc_norm": 0.6865671641791045, + "acc_norm_stderr": 0.032801882053486414 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4312169312169312, + "acc_stderr": 0.025506481698138204, + "acc_norm": 0.4312169312169312, + "acc_norm_stderr": 0.025506481698138204 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5208333333333334, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.5208333333333334, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.615606936416185, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.615606936416185, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.02723741509459248, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.02723741509459248 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.032210245080411544, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.032210245080411544 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.04685473041907789, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.04685473041907789 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6880733944954128, + "acc_stderr": 0.019862967976707245, + "acc_norm": 0.6880733944954128, + "acc_norm_stderr": 0.019862967976707245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.028275490156791455, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.028275490156791455 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.618421052631579, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.618421052631579, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.020219083895133924, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.020219083895133924 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4148936170212766, + "acc_stderr": 0.0293922365846125, + "acc_norm": 0.4148936170212766, + "acc_norm_stderr": 0.0293922365846125 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21564245810055865, + "acc_stderr": 0.013754835975482351, + "acc_norm": 0.21564245810055865, + "acc_norm_stderr": 0.013754835975482351 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5110294117647058, + "acc_stderr": 0.030365446477275668, + "acc_norm": 0.5110294117647058, + "acc_norm_stderr": 0.030365446477275668 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5918367346938775, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.5918367346938775, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7763713080168776, + "acc_stderr": 0.027123298205229966, + "acc_norm": 0.7763713080168776, + "acc_norm_stderr": 0.027123298205229966 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3983050847457627, + "acc_stderr": 0.012503310565166235, + "acc_norm": 0.3983050847457627, + "acc_norm_stderr": 0.012503310565166235 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6862745098039216, + "acc_stderr": 0.03256685484460388, + "acc_norm": 0.6862745098039216, + "acc_norm_stderr": 0.03256685484460388 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.46216492501026685, + "mc2_stderr": 0.015258648943320074 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49940968122786306, + "acc_stderr": 0.01719034212344866, + "acc_norm": 0.5478158205430933, + "acc_norm_stderr": 0.0171115671309168 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "We-Want-GPU/SOLAR-10.7B-orca-alpaca-gpt4-lora-653", + "model_sha": "9ba1f722d3c97105e544eb7585e4f57c4ad2fd28", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/We-Want-GPU/SOLAR-10.7B-orca-alpaca-gpt4-math/result_2023-12-30 12:27:09.json b/We-Want-GPU/SOLAR-10.7B-orca-alpaca-gpt4-math/result_2023-12-30 12:27:09.json new file mode 100644 index 0000000000000000000000000000000000000000..91fb45956f7b1f0d7434f822161ca9bc7e665047 --- /dev/null +++ b/We-Want-GPU/SOLAR-10.7B-orca-alpaca-gpt4-math/result_2023-12-30 12:27:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.45819112627986347, + "acc_stderr": 0.014560220308714691, + "acc_norm": 0.5110921501706485, + "acc_norm_stderr": 0.01460779491401305 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4522007568213503, + "acc_stderr": 0.004966928094797572, + "acc_norm": 0.6169089822744473, + "acc_norm_stderr": 0.0048514666236014505 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6549707602339181, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.6549707602339181, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6564495530012772, + "acc_stderr": 0.016982145632652473, + "acc_norm": 0.6564495530012772, + "acc_norm_stderr": 0.016982145632652473 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6077170418006431, + "acc_stderr": 0.027731258647012, + "acc_norm": 0.6077170418006431, + "acc_norm_stderr": 0.027731258647012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5919282511210763, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.5919282511210763, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.03154449888270285, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.03154449888270285 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006718, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006718 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6428571428571429, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.6428571428571429, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5282051282051282, + "acc_stderr": 0.02531063925493387, + "acc_norm": 0.5282051282051282, + "acc_norm_stderr": 0.02531063925493387 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.034953345821629324, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.034953345821629324 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6451612903225806, + "acc_stderr": 0.027218889773308774, + "acc_norm": 0.6451612903225806, + "acc_norm_stderr": 0.027218889773308774 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.026453508054040332, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.026453508054040332 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.03067609659938918, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.03067609659938918 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.02977384701253297, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.02977384701253297 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.03220024104534206, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.03220024104534206 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.025279850397404907, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.025279850397404907 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5625, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.5625, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5982658959537572, + "acc_stderr": 0.02639410417764363, + "acc_norm": 0.5982658959537572, + "acc_norm_stderr": 0.02639410417764363 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6141975308641975, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.6141975308641975, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7137614678899082, + "acc_stderr": 0.019379436628919965, + "acc_norm": 0.7137614678899082, + "acc_norm_stderr": 0.019379436628919965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5522875816993464, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.5522875816993464, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.03941897526516304, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.03941897526516304 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6118421052631579, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.6118421052631579, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5081699346405228, + "acc_stderr": 0.02022513434305727, + "acc_norm": 0.5081699346405228, + "acc_norm_stderr": 0.02022513434305727 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.029462189233370593, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.029462189233370593 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.18324022346368715, + "acc_stderr": 0.012938645613066388, + "acc_norm": 0.18324022346368715, + "acc_norm_stderr": 0.012938645613066388 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.030306257722468304, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.030306257722468304 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5918367346938775, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.5918367346938775, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.02798569938703642, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.02798569938703642 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.409387222946545, + "acc_stderr": 0.012558780895570753, + "acc_norm": 0.409387222946545, + "acc_norm_stderr": 0.012558780895570753 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.696078431372549, + "acc_stderr": 0.03228210387037892, + "acc_norm": 0.696078431372549, + "acc_norm_stderr": 0.03228210387037892 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.703030303030303, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.703030303030303, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.015945068581236614, + "mc2": 0.4597238820015034, + "mc2_stderr": 0.015281888880381102 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5371900826446281, + "acc_stderr": 0.017142736117643304, + "acc_norm": 0.5749704840613932, + "acc_norm_stderr": 0.016996016308362887 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "We-Want-GPU/SOLAR-10.7B-orca-alpaca-gpt4-math", + "model_sha": "2ddabd5a5902f3154a13576cf57c747141aae375", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/We-Want-GPU/Yi-Ko-6B-DPO-v2/result_2023-12-27 13:41:49.json b/We-Want-GPU/Yi-Ko-6B-DPO-v2/result_2023-12-27 13:41:49.json new file mode 100644 index 0000000000000000000000000000000000000000..fcbe5f62e8a251e8ef79280003c9e8905b73bf18 --- /dev/null +++ b/We-Want-GPU/Yi-Ko-6B-DPO-v2/result_2023-12-27 13:41:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34982935153583616, + "acc_stderr": 0.013936809212158287, + "acc_norm": 0.4112627986348123, + "acc_norm_stderr": 0.014379441068522082 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40071698864767974, + "acc_stderr": 0.004890422457747264, + "acc_norm": 0.5447122087233619, + "acc_norm_stderr": 0.004969790407117537 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299794, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299794 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016336, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016336 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207763, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207763 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.02500732988246122, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.02500732988246122 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674064, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674064 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267436, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267436 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112143, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112143 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.026788811931562757, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.026788811931562757 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5559633027522936, + "acc_stderr": 0.021302621211654525, + "acc_norm": 0.5559633027522936, + "acc_norm_stderr": 0.021302621211654525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.020007912739359368, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.020007912739359368 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320203, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320203 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.014487500852850426, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.014487500852850426 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396563, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396563 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3220338983050847, + "acc_stderr": 0.011933936071891091, + "acc_norm": 0.3220338983050847, + "acc_norm_stderr": 0.011933936071891091 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.035091433756067866, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.035091433756067866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.01600265148736101, + "mc2": 0.4398690513425212, + "mc2_stderr": 0.014823613510260732 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.538370720188902, + "acc_stderr": 0.01713966022184555, + "acc_norm": 0.6340023612750886, + "acc_norm_stderr": 0.016561489664895714 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "We-Want-GPU/Yi-Ko-6B-DPO-v2", + "model_sha": "0bad3014188eae5bd260d536043b3794b0a223d9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math-lora-DPO/result_2023-12-21 01:28:33.json b/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math-lora-DPO/result_2023-12-21 01:28:33.json new file mode 100644 index 0000000000000000000000000000000000000000..84493ea8eb2cb06a384468498fc79b50c0bd25cb --- /dev/null +++ b/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math-lora-DPO/result_2023-12-21 01:28:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34726962457337884, + "acc_stderr": 0.013913034529620448, + "acc_norm": 0.4069965870307167, + "acc_norm_stderr": 0.014356399418009124 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4026090420235013, + "acc_stderr": 0.004894210011303206, + "acc_norm": 0.5459071898028282, + "acc_norm_stderr": 0.004968705270086754 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5798212005108557, + "acc_stderr": 0.017650651363078026, + "acc_norm": 0.5798212005108557, + "acc_norm_stderr": 0.017650651363078026 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977978, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977978 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232964, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232964 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.03238546948758979, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.03238546948758979 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539753, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539753 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.02987257770889118, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.02987257770889118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165582, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165582 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112147, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112147 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5596330275229358, + "acc_stderr": 0.02128431062376154, + "acc_norm": 0.5596330275229358, + "acc_norm_stderr": 0.02128431062376154 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.020017629214213097, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.020017629214213097 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169938, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169938 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605607, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605607 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.01453033020146863, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.01453033020146863 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3877551020408163, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.3877551020408163, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.0317229500433233, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.0317229500433233 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33116036505867014, + "acc_stderr": 0.01202012819598576, + "acc_norm": 0.33116036505867014, + "acc_norm_stderr": 0.01202012819598576 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.43100948993726446, + "mc2_stderr": 0.014880256414468152 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5466351829988194, + "acc_stderr": 0.01711541822522687, + "acc_norm": 0.6233766233766234, + "acc_norm_stderr": 0.01665879987405199 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math-lora-DPO", + "model_sha": "8ce4fd805d5ad4005edeea8ff54735ddf2a00965", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math-lora/result_2023-12-19 12:16:36.json b/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math-lora/result_2023-12-19 12:16:36.json new file mode 100644 index 0000000000000000000000000000000000000000..58af85d89d1f8a65473d971795208ad3bbcf6de4 --- /dev/null +++ b/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math-lora/result_2023-12-19 12:16:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34726962457337884, + "acc_stderr": 0.013913034529620448, + "acc_norm": 0.4035836177474403, + "acc_norm_stderr": 0.014337158914268447 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4041027683728341, + "acc_stderr": 0.004897146690596249, + "acc_norm": 0.5429197371041625, + "acc_norm_stderr": 0.004971364031062591 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.048257293373563895, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.048257293373563895 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5772669220945083, + "acc_stderr": 0.01766518035195406, + "acc_norm": 0.5772669220945083, + "acc_norm_stderr": 0.01766518035195406 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04316378599511326, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04316378599511326 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232964, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232964 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.032363611119519416, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.032363611119519416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.02512465352588513, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.02512465352588513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.03023638994217309, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.03023638994217309 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.035839017547364106, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.035839017547364106 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.0240268463928735, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.0240268463928735 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.039015918258361836, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.039015918258361836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.02780165621232366, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.02780165621232366 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5669724770642202, + "acc_stderr": 0.021244146569074338, + "acc_norm": 0.5669724770642202, + "acc_norm_stderr": 0.021244146569074338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.02807415894760066, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.02807415894760066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.020007912739359365, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.020007912739359365 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042394, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042394 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.22685185185185186, + "acc_stderr": 0.028561650102422263, + "acc_norm": 0.22685185185185186, + "acc_norm_stderr": 0.028561650102422263 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2681564245810056, + "acc_stderr": 0.014816119635317, + "acc_norm": 0.2681564245810056, + "acc_norm_stderr": 0.014816119635317 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898435, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898435 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3272490221642764, + "acc_stderr": 0.011983819806464754, + "acc_norm": 0.3272490221642764, + "acc_norm_stderr": 0.011983819806464754 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.01574402724825605, + "mc2": 0.4203099661058685, + "mc2_stderr": 0.014945969463174807 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5631641086186541, + "acc_stderr": 0.017052633559856062, + "acc_norm": 0.6493506493506493, + "acc_norm_stderr": 0.016405556903893306 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math-lora", + "model_sha": "874983de63cdc8ccd67ee342feb04a79895fa7e6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math/result_2023-12-15 15:42:33.json b/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math/result_2023-12-15 15:42:33.json new file mode 100644 index 0000000000000000000000000000000000000000..ed1a5e751a3ff7576e9f63970570f9827c7b1124 --- /dev/null +++ b/We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math/result_2023-12-15 15:42:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34812286689419797, + "acc_stderr": 0.013921008595179342, + "acc_norm": 0.39505119453924914, + "acc_norm_stderr": 0.014285898292938169 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3994224258115913, + "acc_stderr": 0.004887787255353494, + "acc_norm": 0.5222067317267477, + "acc_norm_stderr": 0.004984857671187101 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370606, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370606 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5108556832694764, + "acc_stderr": 0.01787574884024241, + "acc_norm": 0.5108556832694764, + "acc_norm_stderr": 0.01787574884024241 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.0283332771095628, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.0283332771095628 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.0354413249194797, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.0354413249194797 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.03524068951567447, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.03524068951567447 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.03175367846096625, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.03175367846096625 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347357, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347357 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.031426169937919246, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.031426169937919246 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.047245774057315705, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.047245774057315705 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.02375292871211213, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.02375292871211213 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.026720034380514998, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.026720034380514998 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.027807490044276198, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.027807490044276198 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5229357798165137, + "acc_stderr": 0.021414757058175506, + "acc_norm": 0.5229357798165137, + "acc_norm_stderr": 0.021414757058175506 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617157, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529672, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529672 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460994, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257013, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095268, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095268 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.027146271936625166, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.027146271936625166 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.031996152328062855, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.031996152328062855 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31486310299869624, + "acc_stderr": 0.011862561755715938, + "acc_norm": 0.31486310299869624, + "acc_norm_stderr": 0.011862561755715938 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.0345423658538061, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.0345423658538061 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394816, + "mc2": 0.451806214348065, + "mc2_stderr": 0.01530167301756722 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.017175671279836442, + "acc_norm": 0.5478158205430933, + "acc_norm_stderr": 0.0171115671309168 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "We-Want-GPU/Yi-Ko-6B-orca-alpaca-gpt4-math", + "model_sha": "9e0ae0ba6a5f0bab33b892ca57727c5c63fb908a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/We-Want-GPU/Yi-Ko-SFT-FULL/result_2023-12-10 23:13:29.json b/We-Want-GPU/Yi-Ko-SFT-FULL/result_2023-12-10 23:13:29.json new file mode 100644 index 0000000000000000000000000000000000000000..1b7d3b6029a63f627c36e030e774d3428982241a --- /dev/null +++ b/We-Want-GPU/Yi-Ko-SFT-FULL/result_2023-12-10 23:13:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3225255972696246, + "acc_stderr": 0.013659980894277378, + "acc_norm": 0.3660409556313993, + "acc_norm_stderr": 0.01407722310847014 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3783110934076877, + "acc_stderr": 0.004839746491523513, + "acc_norm": 0.49133638717386974, + "acc_norm_stderr": 0.004989032307320727 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.017862091778507866, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.017862091778507866 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.02815023224453559, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.02815023224453559 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923323, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923323 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.043820947055509894, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.043820947055509894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.03996629574876719, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.03996629574876719 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179961, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179961 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.03332769068410789, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.03332769068410789 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.028040981380761554, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.028040981380761554 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688173, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688173 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.030197611600197953, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.030197611600197953 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228416, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228416 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047732, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047732 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3959537572254335, + "acc_stderr": 0.02632981334194626, + "acc_norm": 0.3959537572254335, + "acc_norm_stderr": 0.02632981334194626 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871595, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871595 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5155963302752293, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.5155963302752293, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238106, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238106 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510468, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510468 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981749, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981749 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.01954210156485411, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.01954210156485411 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.031141447823536023, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.031141447823536023 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.014149575348976253, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.014149575348976253 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898445, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898445 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30378096479791394, + "acc_stderr": 0.011745787720472458, + "acc_norm": 0.30378096479791394, + "acc_norm_stderr": 0.011745787720472458 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834559, + "mc2": 0.4291269663326706, + "mc2_stderr": 0.015080663366587326 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3412042502951594, + "acc_stderr": 0.016300368742137306, + "acc_norm": 0.41086186540731995, + "acc_norm_stderr": 0.016914972767841062 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "We-Want-GPU/Yi-Ko-SFT-FULL", + "model_sha": "f5c4893445511c2dd803297fb967d4af66a86c67", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/We-Want-GPU/yi-ko-SFT-LoRA-play-re-noprompt/result_2023-12-06 08:58:22.json b/We-Want-GPU/yi-ko-SFT-LoRA-play-re-noprompt/result_2023-12-06 08:58:22.json new file mode 100644 index 0000000000000000000000000000000000000000..cf2627986e4f80be3ecce067c21b2db59f2c582c --- /dev/null +++ b/We-Want-GPU/yi-ko-SFT-LoRA-play-re-noprompt/result_2023-12-06 08:58:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3319112627986348, + "acc_stderr": 0.013760988200880534, + "acc_norm": 0.3967576791808874, + "acc_norm_stderr": 0.014296513020180637 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38707428799044014, + "acc_stderr": 0.004860854240821965, + "acc_norm": 0.5099581756622187, + "acc_norm_stderr": 0.004988791687322851 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.017769250583533253, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.017769250583533253 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977978, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977978 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101737, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101737 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.028386198084177687, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.028386198084177687 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.028441638233540515, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.028441638233540515 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.035161847729521675, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.035161847729521675 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112143, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112143 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.026882643434022895, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.026882643434022895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144807, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5651376146788991, + "acc_stderr": 0.021254631465609273, + "acc_norm": 0.5651376146788991, + "acc_norm_stderr": 0.021254631465609273 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883034, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883034 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.040516463428741406, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.040516463428741406 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.019691459052354154, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.019691459052354154 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468636, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468636 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225418, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225418 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.03113088039623592, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.03113088039623592 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214936, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214936 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087298, + "mc2": 0.41549106272132785, + "mc2_stderr": 0.014935916833078716 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.017188904359077304, + "acc_norm": 0.5820543093270366, + "acc_norm_stderr": 0.01695729200527971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "We-Want-GPU/yi-ko-SFT-LoRA-play-re-noprompt", + "model_sha": "40ceba7ecae5a639e5facb03c229a0ac29869d9f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Weyaxi/Einstein-v7-Qwen2-7B/result_2024-07-16 18:15:10.json b/Weyaxi/Einstein-v7-Qwen2-7B/result_2024-07-16 18:15:10.json new file mode 100644 index 0000000000000000000000000000000000000000..2e5e33fa363bd6e3bece8ee2416307a1c21366f3 --- /dev/null +++ b/Weyaxi/Einstein-v7-Qwen2-7B/result_2024-07-16 18:15:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.01417591549000032, + "acc_norm": 0.4377133105802048, + "acc_norm_stderr": 0.014497573881108283 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3899621589324836, + "acc_stderr": 0.004867445945277161, + "acc_norm": 0.5220075682135032, + "acc_norm_stderr": 0.004984945635998313 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.672514619883041, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.672514619883041, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7669902912621359, + "acc_stderr": 0.041858325989283164, + "acc_norm": 0.7669902912621359, + "acc_norm_stderr": 0.041858325989283164 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6679438058748404, + "acc_stderr": 0.016841174655295707, + "acc_norm": 0.6679438058748404, + "acc_norm_stderr": 0.016841174655295707 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.6170212765957447, + "acc_stderr": 0.031778212502369216, + "acc_norm": 0.6170212765957447, + "acc_norm_stderr": 0.031778212502369216 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.0389136449583582, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.0389136449583582 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6784565916398714, + "acc_stderr": 0.026527724079528872, + "acc_norm": 0.6784565916398714, + "acc_norm_stderr": 0.026527724079528872 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5964125560538116, + "acc_stderr": 0.032928028193303135, + "acc_norm": 0.5964125560538116, + "acc_norm_stderr": 0.032928028193303135 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6564885496183206, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.6564885496183206, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.029620227874790465, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.029620227874790465 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6551724137931034, + "acc_stderr": 0.03960933549451207, + "acc_norm": 0.6551724137931034, + "acc_norm_stderr": 0.03960933549451207 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6554621848739496, + "acc_stderr": 0.03086868260412162, + "acc_norm": 0.6554621848739496, + "acc_norm_stderr": 0.03086868260412162 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.024503472557110915, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.024503472557110915 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.7129629629629629, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.7129629629629629, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.5320197044334976, + "acc_stderr": 0.03510766597959217, + "acc_norm": 0.5320197044334976, + "acc_norm_stderr": 0.03510766597959217 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6580645161290323, + "acc_stderr": 0.02698528957655273, + "acc_norm": 0.6580645161290323, + "acc_norm_stderr": 0.02698528957655273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.02514093595033544, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.02514093595033544 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6377358490566037, + "acc_stderr": 0.029582245128384303, + "acc_norm": 0.6377358490566037, + "acc_norm_stderr": 0.029582245128384303 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.03044452852881074, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.03044452852881074 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.582010582010582, + "acc_stderr": 0.02540255550326091, + "acc_norm": 0.582010582010582, + "acc_norm_stderr": 0.02540255550326091 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399914, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399914 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.630057803468208, + "acc_stderr": 0.02599247202930639, + "acc_norm": 0.630057803468208, + "acc_norm_stderr": 0.02599247202930639 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5828220858895705, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.5828220858895705, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6265432098765432, + "acc_stderr": 0.026915003011380154, + "acc_norm": 0.6265432098765432, + "acc_norm_stderr": 0.026915003011380154 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6528497409326425, + "acc_stderr": 0.03435696168361356, + "acc_norm": 0.6528497409326425, + "acc_norm_stderr": 0.03435696168361356 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.5, + "acc_stderr": 0.047036043419179864, + "acc_norm": 0.5, + "acc_norm_stderr": 0.047036043419179864 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.708256880733945, + "acc_stderr": 0.019489300968876532, + "acc_norm": 0.708256880733945, + "acc_norm_stderr": 0.019489300968876532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.5317460317460317, + "acc_stderr": 0.04463112720677175, + "acc_norm": 0.5317460317460317, + "acc_norm_stderr": 0.04463112720677175 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6241830065359477, + "acc_stderr": 0.027732834353363947, + "acc_norm": 0.6241830065359477, + "acc_norm_stderr": 0.027732834353363947 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.03984979653302872, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.03984979653302872 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.7039473684210527, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.7039473684210527, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.020165523313907908, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.020165523313907908 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.5, + "acc_stderr": 0.029827499313594685, + "acc_norm": 0.5, + "acc_norm_stderr": 0.029827499313594685 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.03338473403207401, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.03338473403207401 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3128491620111732, + "acc_stderr": 0.015506892594647274, + "acc_norm": 0.3128491620111732, + "acc_norm_stderr": 0.015506892594647274 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.74, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.74, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.029896163033125474, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.029896163033125474 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.673469387755102, + "acc_stderr": 0.030021056238440327, + "acc_norm": 0.673469387755102, + "acc_norm_stderr": 0.030021056238440327 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036416, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036416 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4211212516297262, + "acc_stderr": 0.012610325733489905, + "acc_norm": 0.4211212516297262, + "acc_norm_stderr": 0.012610325733489905 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7107843137254902, + "acc_stderr": 0.031822318676475544, + "acc_norm": 0.7107843137254902, + "acc_norm_stderr": 0.031822318676475544 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33047735618115054, + "mc1_stderr": 0.016466769613698286, + "mc2": 0.5044548379252799, + "mc2_stderr": 0.015831760460444766 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5914994096812278, + "acc_stderr": 0.016900062879427115, + "acc_norm": 0.6304604486422668, + "acc_norm_stderr": 0.016594883405685427 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Weyaxi/Einstein-v7-Qwen2-7B", + "model_sha": "d14562807c4a16a9f88502be83ff26e38919236b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Xwin-LM/Xwin-LM-7B-V0.2/result_2023-11-16 02:03:09.json b/Xwin-LM/Xwin-LM-7B-V0.2/result_2023-11-16 02:03:09.json new file mode 100644 index 0000000000000000000000000000000000000000..271b236acde2bd651c60e6ae6a2d065abd092924 --- /dev/null +++ b/Xwin-LM/Xwin-LM-7B-V0.2/result_2023-11-16 02:03:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29266211604095566, + "acc_stderr": 0.013295916103619403, + "acc_norm": 0.33276450511945393, + "acc_norm_stderr": 0.013769863046192305 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35321649073889666, + "acc_stderr": 0.004769924131304646, + "acc_norm": 0.4219279028082055, + "acc_norm_stderr": 0.004928578106026368 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4112388250319285, + "acc_stderr": 0.017595971908056573, + "acc_norm": 0.4112388250319285, + "acc_norm_stderr": 0.017595971908056573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3021276595744681, + "acc_stderr": 0.030017554471880557, + "acc_norm": 0.3021276595744681, + "acc_norm_stderr": 0.030017554471880557 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4115755627009646, + "acc_stderr": 0.027950481494401255, + "acc_norm": 0.4115755627009646, + "acc_norm_stderr": 0.027950481494401255 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.042607351576445594, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.042607351576445594 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.37373737373737376, + "acc_stderr": 0.03446897738659333, + "acc_norm": 0.37373737373737376, + "acc_norm_stderr": 0.03446897738659333 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.041443118108781506, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.041443118108781506 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.03104194130405927, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.03104194130405927 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28717948717948716, + "acc_stderr": 0.02293992541853061, + "acc_norm": 0.28717948717948716, + "acc_norm_stderr": 0.02293992541853061 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3741935483870968, + "acc_stderr": 0.027528904299845777, + "acc_norm": 0.3741935483870968, + "acc_norm_stderr": 0.027528904299845777 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5427350427350427, + "acc_stderr": 0.03263622596380688, + "acc_norm": 0.5427350427350427, + "acc_norm_stderr": 0.03263622596380688 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3433962264150943, + "acc_stderr": 0.02922452646912479, + "acc_norm": 0.3433962264150943, + "acc_norm_stderr": 0.02922452646912479 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247077, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247077 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.02345603738398202, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.02345603738398202 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.026362437574546538, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.026362437574546538 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3577981651376147, + "acc_stderr": 0.020552060784827818, + "acc_norm": 0.3577981651376147, + "acc_norm_stderr": 0.020552060784827818 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510468, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510468 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.019139943748487036, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.019139943748487036 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.027374128882631157, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.027374128882631157 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21323529411764705, + "acc_stderr": 0.024880971512294275, + "acc_norm": 0.21323529411764705, + "acc_norm_stderr": 0.024880971512294275 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35864978902953587, + "acc_stderr": 0.031219569445301843, + "acc_norm": 0.35864978902953587, + "acc_norm_stderr": 0.031219569445301843 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2835723598435463, + "acc_stderr": 0.011511900775968312, + "acc_norm": 0.2835723598435463, + "acc_norm_stderr": 0.011511900775968312 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.032566854844603886, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.032566854844603886 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.037937131711656344, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.037937131711656344 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.4620630521284231, + "mc2_stderr": 0.016151515792492546 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2632821723730815, + "acc_stderr": 0.015141752199573208, + "acc_norm": 0.31286894923258557, + "acc_norm_stderr": 0.015941010118302658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Xwin-LM/Xwin-LM-7B-V0.2", + "model_sha": "6e401a3d621f91f751d4dc97be1d6289325a8306", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/Yhyu13/LMCocktail-10.7B-v1/result_2023-12-23 15:58:05.json b/Yhyu13/LMCocktail-10.7B-v1/result_2023-12-23 15:58:05.json new file mode 100644 index 0000000000000000000000000000000000000000..ed152bd69a6e1482363a3c5f224f3215b974490d --- /dev/null +++ b/Yhyu13/LMCocktail-10.7B-v1/result_2023-12-23 15:58:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3771331058020478, + "acc_stderr": 0.014163366896192603, + "acc_norm": 0.4709897610921502, + "acc_norm_stderr": 0.014586776355294312 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39713204540928104, + "acc_stderr": 0.00488303775891996, + "acc_norm": 0.5318661621190998, + "acc_norm_stderr": 0.004979637330230311 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5900383141762452, + "acc_stderr": 0.01758767231233604, + "acc_norm": 0.5900383141762452, + "acc_norm_stderr": 0.01758767231233604 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5594855305466238, + "acc_stderr": 0.02819640057419742, + "acc_norm": 0.5594855305466238, + "acc_norm_stderr": 0.02819640057419742 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6767676767676768, + "acc_stderr": 0.03332299921070645, + "acc_norm": 0.6767676767676768, + "acc_norm_stderr": 0.03332299921070645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.041641887201693775, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.041641887201693775 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.025334667080954897, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.025334667080954897 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5064516129032258, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.5064516129032258, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389177, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389177 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066485, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.03804749744364763, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.03804749744364763 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137595, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137595 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.027586006221607708, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.027586006221607708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6787564766839378, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.6787564766839378, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594964, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594964 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6220183486238532, + "acc_stderr": 0.02078918706672811, + "acc_norm": 0.6220183486238532, + "acc_norm_stderr": 0.02078918706672811 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.028568699752225868, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.028568699752225868 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.47875816993464054, + "acc_stderr": 0.020209572388600244, + "acc_norm": 0.47875816993464054, + "acc_norm_stderr": 0.020209572388600244 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125146, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.03385177976044809, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.03385177976044809 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.311731843575419, + "acc_stderr": 0.015491756531894638, + "acc_norm": 0.311731843575419, + "acc_norm_stderr": 0.015491756531894638 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.48161764705882354, + "acc_stderr": 0.030352303395351964, + "acc_norm": 0.48161764705882354, + "acc_norm_stderr": 0.030352303395351964 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6408163265306123, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.6408163265306123, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030685820596610798, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030685820596610798 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38722294654498046, + "acc_stderr": 0.012441155326854931, + "acc_norm": 0.38722294654498046, + "acc_norm_stderr": 0.012441155326854931 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.034542365853806094, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.034542365853806094 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380027, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380027 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3635250917992656, + "mc1_stderr": 0.016838862883965817, + "mc2": 0.5248572009293102, + "mc2_stderr": 0.016409147736035586 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48642266824085006, + "acc_stderr": 0.01718401506040145, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.017185069732676538 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "Yhyu13/LMCocktail-10.7B-v1", + "model_sha": "79ec3a42118f0715666b86bacab2688b62e1433b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/abacusai/Liberated-Qwen1.5-14B/result_2024-06-12 22:47:43.json b/abacusai/Liberated-Qwen1.5-14B/result_2024-06-12 22:47:43.json new file mode 100644 index 0000000000000000000000000000000000000000..878610786587d07f93513572a3b65e2f59ebddfc --- /dev/null +++ b/abacusai/Liberated-Qwen1.5-14B/result_2024-06-12 22:47:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34897610921501704, + "acc_stderr": 0.013928933461382497, + "acc_norm": 0.3856655290102389, + "acc_norm_stderr": 0.014224250973257174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37293367855008963, + "acc_stderr": 0.00482596376877222, + "acc_norm": 0.4887472615016929, + "acc_norm_stderr": 0.004988517597998618 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6257309941520468, + "acc_stderr": 0.03711601185389481, + "acc_norm": 0.6257309941520468, + "acc_norm_stderr": 0.03711601185389481 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.0458212416016155, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.0458212416016155 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5964240102171137, + "acc_stderr": 0.017544332237926428, + "acc_norm": 0.5964240102171137, + "acc_norm_stderr": 0.017544332237926428 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.574468085106383, + "acc_stderr": 0.0323214691622447, + "acc_norm": 0.574468085106383, + "acc_norm_stderr": 0.0323214691622447 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5466237942122186, + "acc_stderr": 0.02827435985489423, + "acc_norm": 0.5466237942122186, + "acc_norm_stderr": 0.02827435985489423 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009225, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009225 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7373737373737373, + "acc_stderr": 0.03135305009533088, + "acc_norm": 0.7373737373737373, + "acc_norm_stderr": 0.03135305009533088 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.03149930577784906, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.03149930577784906 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5512820512820513, + "acc_stderr": 0.025217315184846486, + "acc_norm": 0.5512820512820513, + "acc_norm_stderr": 0.025217315184846486 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.49261083743842365, + "acc_stderr": 0.03517603540361008, + "acc_norm": 0.49261083743842365, + "acc_norm_stderr": 0.03517603540361008 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6096774193548387, + "acc_stderr": 0.027751256636969587, + "acc_norm": 0.6096774193548387, + "acc_norm_stderr": 0.027751256636969587 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.026453508054040332, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.026453508054040332 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389184, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630875, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630875 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681681, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681681 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.47619047619047616, + "acc_stderr": 0.025722097064388525, + "acc_norm": 0.47619047619047616, + "acc_norm_stderr": 0.025722097064388525 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5867052023121387, + "acc_stderr": 0.026511261369409244, + "acc_norm": 0.5867052023121387, + "acc_norm_stderr": 0.026511261369409244 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.02780165621232366, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.02780165621232366 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6269430051813472, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.6269430051813472, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04644602091222317, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04644602091222317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6440366972477064, + "acc_stderr": 0.020528559278244214, + "acc_norm": 0.6440366972477064, + "acc_norm_stderr": 0.020528559278244214 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.044631127206771704, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.044631127206771704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5816993464052288, + "acc_stderr": 0.028245134024387296, + "acc_norm": 0.5816993464052288, + "acc_norm_stderr": 0.028245134024387296 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.0398497965330287, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.0398497965330287 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6052631578947368, + "acc_stderr": 0.03977749934622073, + "acc_norm": 0.6052631578947368, + "acc_norm_stderr": 0.03977749934622073 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.020223946005074305, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.020223946005074305 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40070921985815605, + "acc_stderr": 0.029233465745573093, + "acc_norm": 0.40070921985815605, + "acc_norm_stderr": 0.029233465745573093 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2849162011173184, + "acc_stderr": 0.015096222302469804, + "acc_norm": 0.2849162011173184, + "acc_norm_stderr": 0.015096222302469804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.74, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.74, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.030320243265004137, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.030320243265004137 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.673469387755102, + "acc_stderr": 0.030021056238440317, + "acc_norm": 0.673469387755102, + "acc_norm_stderr": 0.030021056238440317 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.729957805907173, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.729957805907173, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36571056062581486, + "acc_stderr": 0.012301028188840567, + "acc_norm": 0.36571056062581486, + "acc_norm_stderr": 0.012301028188840567 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.03393388584958404, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.03393388584958404 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33659730722154224, + "mc1_stderr": 0.01654241280949487, + "mc2": 0.49113766293578015, + "mc2_stderr": 0.015994874090332005 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5076741440377804, + "acc_stderr": 0.01718832921965428, + "acc_norm": 0.6044864226682408, + "acc_norm_stderr": 0.016810815902206042 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "abacusai/Liberated-Qwen1.5-14B", + "model_sha": "cc0fa5102bfee821bb5e49f082731ccb9d1fedf1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/abacusai/Llama-3-Smaug-8B/result_2024-05-13 16:32:41.json b/abacusai/Llama-3-Smaug-8B/result_2024-05-13 16:32:41.json new file mode 100644 index 0000000000000000000000000000000000000000..39da8567d346e00f7ab037c5ae11692c086f53bb --- /dev/null +++ b/abacusai/Llama-3-Smaug-8B/result_2024-05-13 16:32:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3796928327645051, + "acc_stderr": 0.014182119866974872, + "acc_norm": 0.4351535836177474, + "acc_norm_stderr": 0.014487986197186052 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3664608643696475, + "acc_stderr": 0.004808526802718588, + "acc_norm": 0.48376817367058356, + "acc_norm_stderr": 0.00498715138109118 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.01776925058353325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.01776925058353325 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936338, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936338 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840632, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840632 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5102564102564102, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.5102564102564102, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.535483870967742, + "acc_stderr": 0.02837228779796294, + "acc_norm": 0.535483870967742, + "acc_norm_stderr": 0.02837228779796294 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652459, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652459 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.024594975128920938, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.024594975128920938 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5635838150289018, + "acc_stderr": 0.026700545424943687, + "acc_norm": 0.5635838150289018, + "acc_norm_stderr": 0.026700545424943687 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594384, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594384 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5596330275229358, + "acc_stderr": 0.021284310623761547, + "acc_norm": 0.5596330275229358, + "acc_norm_stderr": 0.021284310623761547 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.044359328928514664, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.044359328928514664 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.019997973035458333, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.019997973035458333 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590954, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590954 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.49107142857142855, + "acc_stderr": 0.04745033255489123, + "acc_norm": 0.49107142857142855, + "acc_norm_stderr": 0.04745033255489123 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647206, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647206 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.01513160884996376, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.01513160884996376 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125474, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125474 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6122448979591837, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.6122448979591837, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.031376240725616185, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.031376240725616185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35723598435462844, + "acc_stderr": 0.012238615750316506, + "acc_norm": 0.35723598435462844, + "acc_norm_stderr": 0.012238615750316506 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.03495624522015476, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.03495624522015476 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03681050869161549, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03681050869161549 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31211750305997554, + "mc1_stderr": 0.01622075676952091, + "mc2": 0.4908791473172507, + "mc2_stderr": 0.015867506225120915 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45218417945690675, + "acc_stderr": 0.017111567130916785, + "acc_norm": 0.5053128689492326, + "acc_norm_stderr": 0.01718938362722971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "abacusai/Llama-3-Smaug-8B", + "model_sha": "0a4f0a2344e414c2633cbbbc2674a0387b1fb4f2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/abacusai/bigstral-12b-32k/result_2024-07-31 17:42:37.json b/abacusai/bigstral-12b-32k/result_2024-07-31 17:42:37.json new file mode 100644 index 0000000000000000000000000000000000000000..a0ce9f7ed2c75b4d3ef664c706d74f927c95a94a --- /dev/null +++ b/abacusai/bigstral-12b-32k/result_2024-07-31 17:42:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3122866894197952, + "acc_stderr": 0.013542598541688065, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349819 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36436964748058154, + "acc_stderr": 0.004802694106203668, + "acc_norm": 0.46773551085441145, + "acc_norm_stderr": 0.00497938187671261 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4329501915708812, + "acc_stderr": 0.017718469101513975, + "acc_norm": 0.4329501915708812, + "acc_norm_stderr": 0.017718469101513975 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.04049122041702506, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.04049122041702506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.031778212502369216, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.031778212502369216 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.02825666072336018, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.02825666072336018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929774, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929774 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.02512465352588513, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.02512465352588513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509568, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509568 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165897, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165897 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.030679022765498828, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.030679022765498828 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.38113207547169814, + "acc_stderr": 0.029890609686286606, + "acc_norm": 0.38113207547169814, + "acc_norm_stderr": 0.029890609686286606 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948496, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.0344578996436275, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.0344578996436275 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602841997, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602841997 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4653179190751445, + "acc_stderr": 0.026854257928258886, + "acc_norm": 0.4653179190751445, + "acc_norm_stderr": 0.026854257928258886 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539284, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539284 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.01975172650876263, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.01975172650876263 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.015005762446786168, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.015005762446786168 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.02833295951403122, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.02833295951403122 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.03210353032241269, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.03210353032241269 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35528031290743156, + "acc_stderr": 0.012223623364044043, + "acc_norm": 0.35528031290743156, + "acc_norm_stderr": 0.012223623364044043 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.37576499388004897, + "mc1_stderr": 0.016954584060214307, + "mc2": 0.5463057344779203, + "mc2_stderr": 0.01632710027667355 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3860684769775679, + "acc_stderr": 0.01673813076032175, + "acc_norm": 0.38961038961038963, + "acc_norm_stderr": 0.01676616167189351 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "abacusai/bigstral-12b-32k", + "model_sha": "b78a5385ec1b04d6c97f25e9ba1dff18dc98305f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/abacusai/bigyi-15b/result_2024-07-31 17:42:46.json b/abacusai/bigyi-15b/result_2024-07-31 17:42:46.json new file mode 100644 index 0000000000000000000000000000000000000000..0051f6ac2c0378ae684784c8797ba60007f02744 --- /dev/null +++ b/abacusai/bigyi-15b/result_2024-07-31 17:42:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2790102389078498, + "acc_stderr": 0.01310678488360134, + "acc_norm": 0.318259385665529, + "acc_norm_stderr": 0.013611993916971453 + }, + "harness|ko_hellaswag|10": { + "acc": 0.31467835092611035, + "acc_stderr": 0.0046343856941700465, + "acc_norm": 0.37661820354511055, + "acc_norm_stderr": 0.0048354759576109225 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44189016602809705, + "acc_stderr": 0.017758800534214414, + "acc_norm": 0.44189016602809705, + "acc_norm_stderr": 0.017758800534214414 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.031907012423268113, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.031907012423268113 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.028150232244535594, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.028150232244535594 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792399, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792399 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416545, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416545 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102318, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102318 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536824, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536824 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036546, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036546 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540632, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540632 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.030572811310299604, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.030572811310299604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983045, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983045 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.035080801121998406, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.035080801121998406 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.02563425811555495, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.02563425811555495 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03942082639927213, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03942082639927213 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.404320987654321, + "acc_stderr": 0.027306625297327688, + "acc_norm": 0.404320987654321, + "acc_norm_stderr": 0.027306625297327688 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43119266055045874, + "acc_stderr": 0.021233365030319567, + "acc_norm": 0.43119266055045874, + "acc_norm_stderr": 0.021233365030319567 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.02824513402438729, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.02824513402438729 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.01954210156485411, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.01954210156485411 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4148936170212766, + "acc_stderr": 0.0293922365846125, + "acc_norm": 0.4148936170212766, + "acc_norm_stderr": 0.0293922365846125 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608043, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.014676252009319476, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.014676252009319476 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031246, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031246 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.46835443037974683, + "acc_stderr": 0.03248197400511075, + "acc_norm": 0.46835443037974683, + "acc_norm_stderr": 0.03248197400511075 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.011977676704715992, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.011977676704715992 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.0345423658538061, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.0345423658538061 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03588624800091707, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03588624800091707 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.01598359510181139, + "mc2": 0.5051069878730354, + "mc2_stderr": 0.01625240704073453 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3884297520661157, + "acc_stderr": 0.01675692157106941, + "acc_norm": 0.4592680047225502, + "acc_norm_stderr": 0.01713321827653767 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "abacusai/bigyi-15b", + "model_sha": "5ec2656b39515e3a903adfb05e5022b0f4eb5e2a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/abhishekchohan/Yi-9B-Forest-DPO-v1.0/result_2024-05-27 20:51:31.json b/abhishekchohan/Yi-9B-Forest-DPO-v1.0/result_2024-05-27 20:51:31.json new file mode 100644 index 0000000000000000000000000000000000000000..8200a7bf4efebc99cb36d7d848f6876a87b0ef50 --- /dev/null +++ b/abhishekchohan/Yi-9B-Forest-DPO-v1.0/result_2024-05-27 20:51:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2781569965870307, + "acc_stderr": 0.013094469919538805, + "acc_norm": 0.34044368600682595, + "acc_norm_stderr": 0.013847460518892978 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32732523401712804, + "acc_stderr": 0.004682780790508329, + "acc_norm": 0.401911969727146, + "acc_norm_stderr": 0.004892823415546543 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.038268824176603676, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.038268824176603676 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.048257293373563895, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.048257293373563895 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4559386973180077, + "acc_stderr": 0.017810403925435366, + "acc_norm": 0.4559386973180077, + "acc_norm_stderr": 0.017810403925435366 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066653, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066653 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923325, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923325 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.03526552724601198, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.03526552724601198 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5025641025641026, + "acc_stderr": 0.025350672979412188, + "acc_norm": 0.5025641025641026, + "acc_norm_stderr": 0.025350672979412188 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809445, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809445 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.029560707392465725, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.029560707392465725 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.47883597883597884, + "acc_stderr": 0.025728230952130723, + "acc_norm": 0.47883597883597884, + "acc_norm_stderr": 0.025728230952130723 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.02687408588351835, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.02687408588351835 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.03606065001832917, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.03606065001832917 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626574, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626574 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483205, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.019780465954777532, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.019780465954777532 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.028538650028878638, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.028538650028878638 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220508, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220508 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398864, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398864 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.03175195237583323, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.03175195237583323 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45569620253164556, + "acc_stderr": 0.03241920684693334, + "acc_norm": 0.45569620253164556, + "acc_norm_stderr": 0.03241920684693334 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585899, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585899 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.038049136539710114, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.038049136539710114 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.4912942986477937, + "mc2_stderr": 0.015895360210150537 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.017189767032130817 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "abhishekchohan/Yi-9B-Forest-DPO-v1.0", + "model_sha": "af6d3cc25c901619d118ebf616f7a5902413a4ea", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ahnyeonchan/legendary-river-koalpaca/result_2024-04-17 05:08:56.json b/ahnyeonchan/legendary-river-koalpaca/result_2024-04-17 05:08:56.json new file mode 100644 index 0000000000000000000000000000000000000000..c18fed4134b8efac653133bdf44bb0788eace775 --- /dev/null +++ b/ahnyeonchan/legendary-river-koalpaca/result_2024-04-17 05:08:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21416382252559726, + "acc_stderr": 0.011988383205966477, + "acc_norm": 0.2568259385665529, + "acc_norm_stderr": 0.0127669237941168 + }, + "harness|ko_hellaswag|10": { + "acc": 0.28579964150567616, + "acc_stderr": 0.004508710891053845, + "acc_norm": 0.3234415455088628, + "acc_norm_stderr": 0.0046683357254102935 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.040580420156460344, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.040580420156460344 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2796934865900383, + "acc_stderr": 0.01605079214803654, + "acc_norm": 0.2796934865900383, + "acc_norm_stderr": 0.01605079214803654 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334943, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334943 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.034605799075530255, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.034605799075530255 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2315112540192926, + "acc_stderr": 0.023956532766639133, + "acc_norm": 0.2315112540192926, + "acc_norm_stderr": 0.023956532766639133 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22137404580152673, + "acc_stderr": 0.0364129708131373, + "acc_norm": 0.22137404580152673, + "acc_norm_stderr": 0.0364129708131373 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179964, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179964 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.226890756302521, + "acc_stderr": 0.02720537153827947, + "acc_norm": 0.226890756302521, + "acc_norm_stderr": 0.02720537153827947 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.023661296393964273, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.023661296393964273 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03755265865037181, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03755265865037181 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.1921182266009852, + "acc_stderr": 0.02771931570961478, + "acc_norm": 0.1921182266009852, + "acc_norm_stderr": 0.02771931570961478 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.02645087448904276, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.02645087448904276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2264957264957265, + "acc_stderr": 0.027421007295392912, + "acc_norm": 0.2264957264957265, + "acc_norm_stderr": 0.027421007295392912 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.02804918631569524, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.02804918631569524 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.19090909090909092, + "acc_stderr": 0.03764425585984926, + "acc_norm": 0.19090909090909092, + "acc_norm_stderr": 0.03764425585984926 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073835, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073835 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.031157150869355547, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.031157150869355547 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.034765996075164785, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.034765996075164785 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.15, + "acc_stderr": 0.03588702812826371, + "acc_norm": 0.15, + "acc_norm_stderr": 0.03588702812826371 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2398843930635838, + "acc_stderr": 0.022989592543123567, + "acc_norm": 0.2398843930635838, + "acc_norm_stderr": 0.022989592543123567 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24382716049382716, + "acc_stderr": 0.023891879541959607, + "acc_norm": 0.24382716049382716, + "acc_norm_stderr": 0.023891879541959607 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2849740932642487, + "acc_stderr": 0.0325771407770966, + "acc_norm": 0.2849740932642487, + "acc_norm_stderr": 0.0325771407770966 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22752293577981653, + "acc_stderr": 0.0179744635787765, + "acc_norm": 0.22752293577981653, + "acc_norm_stderr": 0.0179744635787765 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.024051029739912248, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.024051029739912248 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.039849796533028704, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.039849796533028704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403165, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403165 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.026244920349842996, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.026244920349842996 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.17857142857142858, + "acc_stderr": 0.036352091215778065, + "acc_norm": 0.17857142857142858, + "acc_norm_stderr": 0.036352091215778065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.033953227263757976, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.033953227263757976 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.0314147080258659, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.0314147080258659 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460305, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460305 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2405475880052151, + "acc_stderr": 0.010916406735478947, + "acc_norm": 0.2405475880052151, + "acc_norm_stderr": 0.010916406735478947 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.03192271569548299, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.03192271569548299 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766379, + "mc2": 0.4418261893197511, + "mc2_stderr": 0.015666438546683932 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26564344746162927, + "acc_stderr": 0.015185107107791248, + "acc_norm": 0.3246753246753247, + "acc_norm_stderr": 0.016098883939346456 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ahnyeonchan/legendary-river-koalpaca", + "model_sha": "bb5a59f8541e8770242ce3386b50c237e5f9f543", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ai-human-lab/EEVE-Korean-10.8B-DPO-v1.0/result_2024-03-18 12:05:08.json b/ai-human-lab/EEVE-Korean-10.8B-DPO-v1.0/result_2024-03-18 12:05:08.json new file mode 100644 index 0000000000000000000000000000000000000000..0cacad553ceedc73ad2a3b4d5eb8172cb34eac06 --- /dev/null +++ b/ai-human-lab/EEVE-Korean-10.8B-DPO-v1.0/result_2024-03-18 12:05:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21075085324232082, + "acc_stderr": 0.011918271754852175, + "acc_norm": 0.24573378839590443, + "acc_norm_stderr": 0.012581033453730118 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25014937263493325, + "acc_stderr": 0.004322137759696175, + "acc_norm": 0.2440748854809799, + "acc_norm_stderr": 0.004286594977390909 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.31928480204342274, + "acc_stderr": 0.016671261749538726, + "acc_norm": 0.31928480204342274, + "acc_norm_stderr": 0.016671261749538726 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.029379170464124825, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.029379170464124825 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071856, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071856 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818777, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818777 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.03219079200419994, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.03219079200419994 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533086, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533086 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2689655172413793, + "acc_stderr": 0.03695183311650232, + "acc_norm": 0.2689655172413793, + "acc_norm_stderr": 0.03695183311650232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.028657491285071956, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.028657491285071956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.022556551010132368, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.022556551010132368 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.19704433497536947, + "acc_stderr": 0.02798672466673622, + "acc_norm": 0.19704433497536947, + "acc_norm_stderr": 0.02798672466673622 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23225806451612904, + "acc_stderr": 0.024022256130308235, + "acc_norm": 0.23225806451612904, + "acc_norm_stderr": 0.024022256130308235 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.41452991452991456, + "acc_stderr": 0.03227396567623778, + "acc_norm": 0.41452991452991456, + "acc_norm_stderr": 0.03227396567623778 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.026480357179895702, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.026480357179895702 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.025348097468097828, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.025348097468097828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.03445406271987054, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.03445406271987054 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.03152439186555401, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.03152439186555401 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483098, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483098 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21693121693121692, + "acc_stderr": 0.021227082449445055, + "acc_norm": 0.21693121693121692, + "acc_norm_stderr": 0.021227082449445055 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.024946792225272314, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.024946792225272314 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02622964917882116, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02622964917882116 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916646, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916646 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27339449541284405, + "acc_stderr": 0.01910929984609828, + "acc_norm": 0.27339449541284405, + "acc_norm_stderr": 0.01910929984609828 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.02591780611714716, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.02591780611714716 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.041733491480835, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.041733491480835 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952925, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952925 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29248366013071897, + "acc_stderr": 0.018403415710109786, + "acc_norm": 0.29248366013071897, + "acc_norm_stderr": 0.018403415710109786 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.02699219917306436, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.02699219917306436 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1712962962962963, + "acc_stderr": 0.0256953416438247, + "acc_norm": 0.1712962962962963, + "acc_norm_stderr": 0.0256953416438247 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22793296089385476, + "acc_stderr": 0.014030149950805098, + "acc_norm": 0.22793296089385476, + "acc_norm_stderr": 0.014030149950805098 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22794117647058823, + "acc_stderr": 0.025483081468029804, + "acc_norm": 0.22794117647058823, + "acc_norm_stderr": 0.025483081468029804 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.026537045312145277, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.026537045312145277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3037974683544304, + "acc_stderr": 0.02993669638713862, + "acc_norm": 0.3037974683544304, + "acc_norm_stderr": 0.02993669638713862 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2646675358539765, + "acc_stderr": 0.011267332992845535, + "acc_norm": 0.2646675358539765, + "acc_norm_stderr": 0.011267332992845535 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399813, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399813 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.03646204963253811, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.03646204963253811 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520674, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.1192443919716647, + "acc_stderr": 0.011141957788065315, + "acc_norm": 0.3837072018890201, + "acc_norm_stderr": 0.016718924637231826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ai-human-lab/EEVE-Korean-10.8B-DPO-v1.0", + "model_sha": "42d4c61bc87461447287824f701b11a6e21f62d6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ai-human-lab/EEVE-Korean-10.8B-RAFT/result_2024-06-27 13:43:51.json b/ai-human-lab/EEVE-Korean-10.8B-RAFT/result_2024-06-27 13:43:51.json new file mode 100644 index 0000000000000000000000000000000000000000..ea3796038f31b5b092977f4865268c5cea9b5eb7 --- /dev/null +++ b/ai-human-lab/EEVE-Korean-10.8B-RAFT/result_2024-06-27 13:43:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.47525597269624575, + "acc_stderr": 0.014593487694937735, + "acc_norm": 0.5332764505119454, + "acc_norm_stderr": 0.014578995859605811 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4654451304521012, + "acc_stderr": 0.004977851161904399, + "acc_norm": 0.6291575383389763, + "acc_norm_stderr": 0.004820431839600023 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6842105263157895, + "acc_stderr": 0.03565079670708311, + "acc_norm": 0.6842105263157895, + "acc_norm_stderr": 0.03565079670708311 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.04656147110012352, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.04656147110012352 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7113665389527458, + "acc_stderr": 0.016203792703197807, + "acc_norm": 0.7113665389527458, + "acc_norm_stderr": 0.016203792703197807 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5191489361702127, + "acc_stderr": 0.0326620429906468, + "acc_norm": 0.5191489361702127, + "acc_norm_stderr": 0.0326620429906468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6109324758842444, + "acc_stderr": 0.027690337536485372, + "acc_norm": 0.6109324758842444, + "acc_norm_stderr": 0.027690337536485372 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.03318833286217281, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.03318833286217281 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.042764865428145914, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.042764865428145914 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7929292929292929, + "acc_stderr": 0.02886977846026705, + "acc_norm": 0.7929292929292929, + "acc_norm_stderr": 0.02886977846026705 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.03169380235712997, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.03169380235712997 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5692307692307692, + "acc_stderr": 0.02510682066053976, + "acc_norm": 0.5692307692307692, + "acc_norm_stderr": 0.02510682066053976 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575494, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575494 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6387096774193548, + "acc_stderr": 0.02732754844795754, + "acc_norm": 0.6387096774193548, + "acc_norm_stderr": 0.02732754844795754 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8076923076923077, + "acc_stderr": 0.025819233256483713, + "acc_norm": 0.8076923076923077, + "acc_norm_stderr": 0.025819233256483713 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6113207547169811, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.6113207547169811, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.031343283582089536, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.031343283582089536 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4312169312169312, + "acc_stderr": 0.025506481698138208, + "acc_norm": 0.4312169312169312, + "acc_norm_stderr": 0.025506481698138208 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5763888888888888, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.5763888888888888, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.79, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.79, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6127167630057804, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.6127167630057804, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5950920245398773, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.5950920245398773, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6080246913580247, + "acc_stderr": 0.027163686038271143, + "acc_norm": 0.6080246913580247, + "acc_norm_stderr": 0.027163686038271143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7137614678899082, + "acc_stderr": 0.019379436628919975, + "acc_norm": 0.7137614678899082, + "acc_norm_stderr": 0.019379436628919975 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.027826109307283683, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.027826109307283683 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6052631578947368, + "acc_stderr": 0.03977749934622073, + "acc_norm": 0.6052631578947368, + "acc_norm_stderr": 0.03977749934622073 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.020219083895133924, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.020219083895133924 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887865, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887865 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19553072625698323, + "acc_stderr": 0.013264579220945092, + "acc_norm": 0.19553072625698323, + "acc_norm_stderr": 0.013264579220945092 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03032024326500413, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03032024326500413 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.689795918367347, + "acc_stderr": 0.029613459872484378, + "acc_norm": 0.689795918367347, + "acc_norm_stderr": 0.029613459872484378 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7679324894514767, + "acc_stderr": 0.02747974455080852, + "acc_norm": 0.7679324894514767, + "acc_norm_stderr": 0.02747974455080852 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41199478487614083, + "acc_stderr": 0.012570871032146068, + "acc_norm": 0.41199478487614083, + "acc_norm_stderr": 0.012570871032146068 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7058823529411765, + "acc_stderr": 0.03198001660115072, + "acc_norm": 0.7058823529411765, + "acc_norm_stderr": 0.03198001660115072 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386847, + "mc2": 0.47199151404000517, + "mc2_stderr": 0.015537832542459913 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5489964580873672, + "acc_stderr": 0.017107618859549343, + "acc_norm": 0.5690672963400236, + "acc_norm_stderr": 0.017025558196043143 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ai-human-lab/EEVE-Korean-10.8B-RAFT", + "model_sha": "2f6337e3c439f81672cb75e33d4338fab7155f42", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ai-human-lab/EEVE-Korean-10.8B-SFT-v0.1/result_2024-03-20 04:06:48.json b/ai-human-lab/EEVE-Korean-10.8B-SFT-v0.1/result_2024-03-20 04:06:48.json new file mode 100644 index 0000000000000000000000000000000000000000..9cca389e8117e63e25cf3a53e0c7396e2e836772 --- /dev/null +++ b/ai-human-lab/EEVE-Korean-10.8B-SFT-v0.1/result_2024-03-20 04:06:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21416382252559726, + "acc_stderr": 0.011988383205966483, + "acc_norm": 0.27559726962457337, + "acc_norm_stderr": 0.013057169655761838 + }, + "harness|ko_hellaswag|10": { + "acc": 0.24497112129057957, + "acc_stderr": 0.004291911350430716, + "acc_norm": 0.23610834495120495, + "acc_norm_stderr": 0.004238215815533088 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28607918263090676, + "acc_stderr": 0.01616087140512753, + "acc_norm": 0.28607918263090676, + "acc_norm_stderr": 0.01616087140512753 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.03712537833614866, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.03712537833614866 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426115, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426115 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924811, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924811 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.021020672680827912, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.021020672680827912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.02860595370200424, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.02860595370200424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749895, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749895 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.029252823291803627, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.029252823291803627 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23486238532110093, + "acc_stderr": 0.018175110510343567, + "acc_norm": 0.23486238532110093, + "acc_norm_stderr": 0.018175110510343567 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.031546980450822305, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.031546980450822305 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.017667841612378984, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.017667841612378984 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.0358870281282637, + "acc_norm": 0.15, + "acc_norm_stderr": 0.0358870281282637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676653, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.01527417621928336, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.10153482880755609, + "acc_stderr": 0.010384198041619998, + "acc_norm": 0.3116883116883117, + "acc_norm_stderr": 0.015924567607358334 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ai-human-lab/EEVE-Korean-10.8B-SFT-v0.1", + "model_sha": "b2403c9c73e9827ff28f4ec8f04595271b058185", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ai-human-lab/EEVE-Korean-10.8B-instruct-v4-sft/result_2024-07-02 04:46:13.json b/ai-human-lab/EEVE-Korean-10.8B-instruct-v4-sft/result_2024-07-02 04:46:13.json new file mode 100644 index 0000000000000000000000000000000000000000..48a4c7a31fb6fc560a327bc4dc9abc40d1de4ec3 --- /dev/null +++ b/ai-human-lab/EEVE-Korean-10.8B-instruct-v4-sft/result_2024-07-02 04:46:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.48976109215017066, + "acc_stderr": 0.014608326906285019, + "acc_norm": 0.5349829351535836, + "acc_norm_stderr": 0.014575583922019662 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46395140410276836, + "acc_stderr": 0.004976796060456436, + "acc_norm": 0.6303525194184425, + "acc_norm_stderr": 0.004817227292240275 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03615507630310935, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03615507630310935 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7164750957854407, + "acc_stderr": 0.016117318166832293, + "acc_norm": 0.7164750957854407, + "acc_norm_stderr": 0.016117318166832293 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5404255319148936, + "acc_stderr": 0.032579014820998335, + "acc_norm": 0.5404255319148936, + "acc_norm_stderr": 0.032579014820998335 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.038922121953330446, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.038922121953330446 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6237942122186495, + "acc_stderr": 0.02751392568354943, + "acc_norm": 0.6237942122186495, + "acc_norm_stderr": 0.02751392568354943 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.03318833286217281, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.03318833286217281 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.04243869242230523, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.04243869242230523 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7727272727272727, + "acc_stderr": 0.02985751567338641, + "acc_norm": 0.7727272727272727, + "acc_norm_stderr": 0.02985751567338641 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.03156663099215415, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.03156663099215415 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5564102564102564, + "acc_stderr": 0.025189149894764215, + "acc_norm": 0.5564102564102564, + "acc_norm_stderr": 0.025189149894764215 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6612903225806451, + "acc_stderr": 0.026923446059302834, + "acc_norm": 0.6612903225806451, + "acc_norm_stderr": 0.026923446059302834 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8076923076923077, + "acc_stderr": 0.025819233256483717, + "acc_norm": 0.8076923076923077, + "acc_norm_stderr": 0.025819233256483717 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5962264150943396, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.5962264150943396, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.030965903123573033, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.030965903123573033 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.03809342081273956, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.03809342081273956 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41798941798941797, + "acc_stderr": 0.025402555503260912, + "acc_norm": 0.41798941798941797, + "acc_norm_stderr": 0.025402555503260912 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5486111111111112, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.5486111111111112, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.81, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.81, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6242774566473989, + "acc_stderr": 0.026074314851657083, + "acc_norm": 0.6242774566473989, + "acc_norm_stderr": 0.026074314851657083 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5705521472392638, + "acc_stderr": 0.038890666191127236, + "acc_norm": 0.5705521472392638, + "acc_norm_stderr": 0.038890666191127236 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6234567901234568, + "acc_stderr": 0.026959344518747784, + "acc_norm": 0.6234567901234568, + "acc_norm_stderr": 0.026959344518747784 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7150259067357513, + "acc_stderr": 0.032577140777096614, + "acc_norm": 0.7150259067357513, + "acc_norm_stderr": 0.032577140777096614 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7376146788990826, + "acc_stderr": 0.018861885021534738, + "acc_norm": 0.7376146788990826, + "acc_norm_stderr": 0.018861885021534738 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6209150326797386, + "acc_stderr": 0.02778014120702333, + "acc_norm": 0.6209150326797386, + "acc_norm_stderr": 0.02778014120702333 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5921052631578947, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.5921052631578947, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.020203517280261447, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.020203517280261447 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.028838921471251458, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.028838921471251458 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098409, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098409 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.030306257722468314, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.030306257722468314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6816326530612244, + "acc_stderr": 0.02982253379398205, + "acc_norm": 0.6816326530612244, + "acc_norm_stderr": 0.02982253379398205 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41460234680573665, + "acc_stderr": 0.012582597058908284, + "acc_norm": 0.41460234680573665, + "acc_norm_stderr": 0.012582597058908284 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7009803921568627, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.7009803921568627, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7151515151515152, + "acc_stderr": 0.035243908445117815, + "acc_norm": 0.7151515151515152, + "acc_norm_stderr": 0.035243908445117815 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386847, + "mc2": 0.46712370950707965, + "mc2_stderr": 0.015514289067068013 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5785123966942148, + "acc_stderr": 0.016977101932601518, + "acc_norm": 0.5938606847697757, + "acc_norm_stderr": 0.016884749503191396 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ai-human-lab/EEVE-Korean-10.8B-instruct-v4-sft", + "model_sha": "7c938605f3d907013a6cd5d92b0d07b169032aa0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ai-human-lab/EEVE-Korean_Instruct-10.8B-expo/result_2024-07-12 00:52:44.json b/ai-human-lab/EEVE-Korean_Instruct-10.8B-expo/result_2024-07-12 00:52:44.json new file mode 100644 index 0000000000000000000000000000000000000000..5fa5d8c92d33e587c8ccae63fd0a9bf8ad82402b --- /dev/null +++ b/ai-human-lab/EEVE-Korean_Instruct-10.8B-expo/result_2024-07-12 00:52:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5068259385665529, + "acc_stderr": 0.014610029151379813, + "acc_norm": 0.5546075085324232, + "acc_norm_stderr": 0.01452398763834409 + }, + "harness|ko_hellaswag|10": { + "acc": 0.48536148177653854, + "acc_stderr": 0.004987642470249518, + "acc_norm": 0.658832901812388, + "acc_norm_stderr": 0.004731324409133256 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.695906432748538, + "acc_stderr": 0.03528211258245233, + "acc_norm": 0.695906432748538, + "acc_norm_stderr": 0.03528211258245233 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.735632183908046, + "acc_stderr": 0.01576998484069052, + "acc_norm": 0.735632183908046, + "acc_norm_stderr": 0.01576998484069052 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5361702127659574, + "acc_stderr": 0.0326003851183577, + "acc_norm": 0.5361702127659574, + "acc_norm_stderr": 0.0326003851183577 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.536144578313253, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.536144578313253, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6366559485530546, + "acc_stderr": 0.027316847674192717, + "acc_norm": 0.6366559485530546, + "acc_norm_stderr": 0.027316847674192717 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6367713004484304, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.6367713004484304, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.043171711948702556, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.043171711948702556 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.02962022787479048, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.02962022787479048 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319617, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319617 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6008403361344538, + "acc_stderr": 0.031811100324139245, + "acc_norm": 0.6008403361344538, + "acc_norm_stderr": 0.031811100324139245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5641025641025641, + "acc_stderr": 0.02514180151117749, + "acc_norm": 0.5641025641025641, + "acc_norm_stderr": 0.02514180151117749 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6387096774193548, + "acc_stderr": 0.027327548447957553, + "acc_norm": 0.6387096774193548, + "acc_norm_stderr": 0.027327548447957553 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.02514093595033543, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.02514093595033543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.03096590312357304, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.03096590312357304 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.02504375731852019, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.02504375731852019 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5972222222222222, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.5972222222222222, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6242774566473989, + "acc_stderr": 0.026074314851657083, + "acc_norm": 0.6242774566473989, + "acc_norm_stderr": 0.026074314851657083 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5950920245398773, + "acc_stderr": 0.03856672163548914, + "acc_norm": 0.5950920245398773, + "acc_norm_stderr": 0.03856672163548914 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6141975308641975, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.6141975308641975, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7875647668393783, + "acc_stderr": 0.029519282616817234, + "acc_norm": 0.7875647668393783, + "acc_norm_stderr": 0.029519282616817234 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7486238532110092, + "acc_stderr": 0.01859920636028741, + "acc_norm": 0.7486238532110092, + "acc_norm_stderr": 0.01859920636028741 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6013071895424836, + "acc_stderr": 0.028036092273891776, + "acc_norm": 0.6013071895424836, + "acc_norm_stderr": 0.028036092273891776 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.020196594933541194, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.020196594933541194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.02949482760014438, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.02949482760014438 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.033769221512523345, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.033769221512523345 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.1787709497206704, + "acc_stderr": 0.012814800991359322, + "acc_norm": 0.1787709497206704, + "acc_norm_stderr": 0.012814800991359322 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5404411764705882, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.5404411764705882, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6775510204081633, + "acc_stderr": 0.029923100563683906, + "acc_norm": 0.6775510204081633, + "acc_norm_stderr": 0.029923100563683906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7848101265822784, + "acc_stderr": 0.026750826994676163, + "acc_norm": 0.7848101265822784, + "acc_norm_stderr": 0.026750826994676163 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41395045632333766, + "acc_stderr": 0.012579699631289258, + "acc_norm": 0.41395045632333766, + "acc_norm_stderr": 0.012579699631289258 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7598039215686274, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.7598039215686274, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7333333333333333, + "acc_stderr": 0.03453131801885416, + "acc_norm": 0.7333333333333333, + "acc_norm_stderr": 0.03453131801885416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.36107711138310894, + "mc1_stderr": 0.016814312844836882, + "mc2": 0.49675886832572, + "mc2_stderr": 0.01571627637168158 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.51357733175915, + "acc_stderr": 0.01718401506040146, + "acc_norm": 0.5230224321133412, + "acc_norm_stderr": 0.01717212154672763 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ai-human-lab/EEVE-Korean_Instruct-10.8B-expo", + "model_sha": "136fc8774e191f27c886990d0d5e9891da6682d0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/algograp-Inc/algograpV4/result_2024-04-15 09:29:35.json b/algograp-Inc/algograpV4/result_2024-04-15 09:29:35.json new file mode 100644 index 0000000000000000000000000000000000000000..6851f0c881f4b41a690831b302c9557ef7f4d3b3 --- /dev/null +++ b/algograp-Inc/algograpV4/result_2024-04-15 09:29:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5068259385665529, + "acc_stderr": 0.014610029151379813, + "acc_norm": 0.5520477815699659, + "acc_norm_stderr": 0.01453201149821167 + }, + "harness|ko_hellaswag|10": { + "acc": 0.48645688109938257, + "acc_stderr": 0.004987950663406554, + "acc_norm": 0.6607249551882095, + "acc_norm_stderr": 0.004724956665879966 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.695906432748538, + "acc_stderr": 0.03528211258245233, + "acc_norm": 0.695906432748538, + "acc_norm_stderr": 0.03528211258245233 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7381864623243933, + "acc_stderr": 0.015720838678445252, + "acc_norm": 0.7381864623243933, + "acc_norm_stderr": 0.015720838678445252 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5276595744680851, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.5276595744680851, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.536144578313253, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.536144578313253, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6366559485530546, + "acc_stderr": 0.027316847674192714, + "acc_norm": 0.6366559485530546, + "acc_norm_stderr": 0.027316847674192714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6233183856502242, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.6233183856502242, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.043171711948702556, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.043171711948702556 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.02962022787479048, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.02962022787479048 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319617, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319617 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6008403361344538, + "acc_stderr": 0.031811100324139245, + "acc_norm": 0.6008403361344538, + "acc_norm_stderr": 0.031811100324139245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5692307692307692, + "acc_stderr": 0.025106820660539753, + "acc_norm": 0.5692307692307692, + "acc_norm_stderr": 0.025106820660539753 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036543, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036543 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6419354838709678, + "acc_stderr": 0.02727389059430063, + "acc_norm": 0.6419354838709678, + "acc_norm_stderr": 0.02727389059430063 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8247863247863247, + "acc_stderr": 0.024904439098918214, + "acc_norm": 0.8247863247863247, + "acc_norm_stderr": 0.024904439098918214 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.569811320754717, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.569811320754717, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935558, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935558 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851123, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851123 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5972222222222222, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.5972222222222222, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6416184971098265, + "acc_stderr": 0.02581675679158419, + "acc_norm": 0.6416184971098265, + "acc_norm_stderr": 0.02581675679158419 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6134969325153374, + "acc_stderr": 0.03825825548848606, + "acc_norm": 0.6134969325153374, + "acc_norm_stderr": 0.03825825548848606 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.02700252103451647, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.02700252103451647 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7772020725388601, + "acc_stderr": 0.03003114797764154, + "acc_norm": 0.7772020725388601, + "acc_norm_stderr": 0.03003114797764154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.744954128440367, + "acc_stderr": 0.018688500856535832, + "acc_norm": 0.744954128440367, + "acc_norm_stderr": 0.018688500856535832 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6209150326797386, + "acc_stderr": 0.027780141207023337, + "acc_norm": 0.6209150326797386, + "acc_norm_stderr": 0.027780141207023337 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5921052631578947, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.5921052631578947, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.020130388312904528, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.020130388312904528 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.029462189233370586, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.029462189233370586 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.03395322726375797, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.03395322726375797 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.17653631284916202, + "acc_stderr": 0.012751770640520488, + "acc_norm": 0.17653631284916202, + "acc_norm_stderr": 0.012751770640520488 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5404411764705882, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.5404411764705882, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6938775510204082, + "acc_stderr": 0.02950489645459596, + "acc_norm": 0.6938775510204082, + "acc_norm_stderr": 0.02950489645459596 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7932489451476793, + "acc_stderr": 0.026361651668389104, + "acc_norm": 0.7932489451476793, + "acc_norm_stderr": 0.026361651668389104 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41590612777053454, + "acc_stderr": 0.012588323850313608, + "acc_norm": 0.41590612777053454, + "acc_norm_stderr": 0.012588323850313608 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7647058823529411, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.7647058823529411, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7515151515151515, + "acc_stderr": 0.03374402644139405, + "acc_norm": 0.7515151515151515, + "acc_norm_stderr": 0.03374402644139405 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3537331701346389, + "mc1_stderr": 0.01673781435884615, + "mc2": 0.49129550769120267, + "mc2_stderr": 0.015660093201731295 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5206611570247934, + "acc_stderr": 0.017175671279836446, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.017142736117643304 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "algograp-Inc/algograpV4", + "model_sha": "7ea38a7ddf62b785bafdebedad4f5694531450a4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/allknowingroger/MultiverseEx26-7B-slerp/result_2024-07-03 10:02:46.json b/allknowingroger/MultiverseEx26-7B-slerp/result_2024-07-03 10:02:46.json new file mode 100644 index 0000000000000000000000000000000000000000..da20a2476edfb54704b92e1bed10e424c8b3b3e2 --- /dev/null +++ b/allknowingroger/MultiverseEx26-7B-slerp/result_2024-07-03 10:02:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938218, + "acc_norm": 0.45819112627986347, + "acc_norm_stderr": 0.0145602203087147 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3957379008165704, + "acc_stderr": 0.00488009208340804, + "acc_norm": 0.5262895837482573, + "acc_norm_stderr": 0.00498287934069141 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45721583652618136, + "acc_stderr": 0.017814385238534427, + "acc_norm": 0.45721583652618136, + "acc_norm_stderr": 0.017814385238534427 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.03711725190740749, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.03711725190740749 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40836012861736337, + "acc_stderr": 0.027917050748484627, + "acc_norm": 0.40836012861736337, + "acc_norm_stderr": 0.027917050748484627 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.041443118108781506, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.041443118108781506 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.028156036538233217, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.028156036538233217 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961827, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961827 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616255, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.0250437573185202, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.0250437573185202 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05000000000000001, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05000000000000001 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.036080032255696545, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.036080032255696545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5064220183486239, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.5064220183486239, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.02852638345214263, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.02852638345214263 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.01502408388332288, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.01502408388332288 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.028418208619406794, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.028418208619406794 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763127, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763127 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.01210681720306721, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.01210681720306721 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03471157907953425, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03471157907953425 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.42105263157894735, + "mc1_stderr": 0.017283936248136508, + "mc2": 0.5970677210841024, + "mc2_stderr": 0.016291411166915383 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42384887839433294, + "acc_stderr": 0.01698981083462825, + "acc_norm": 0.42621015348288077, + "acc_norm_stderr": 0.01700212260948926 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "allknowingroger/MultiverseEx26-7B-slerp", + "model_sha": "43f18d84e025693f00e9be335bf12fce96089b2f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/amphora/olaf-l.0.1/result_2023-10-21 02:17:55.json b/amphora/olaf-l.0.1/result_2023-10-21 02:17:55.json new file mode 100644 index 0000000000000000000000000000000000000000..4ac3e7afaf72bd57f989b06ec2b8293784808703 --- /dev/null +++ b/amphora/olaf-l.0.1/result_2023-10-21 02:17:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40784982935153585, + "acc_stderr": 0.01436109728844971, + "acc_norm": 0.47013651877133106, + "acc_norm_stderr": 0.014585305840007105 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40728938458474406, + "acc_stderr": 0.00490325426417762, + "acc_norm": 0.5451105357498506, + "acc_norm_stderr": 0.004969431900874312 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.017862091778507852, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.017862091778507852 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0314108219759624, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0314108219759624 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.02518914989476419, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.02518914989476419 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138653, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138653 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.02397386199899208, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.02397386199899208 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.03602573571288441, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.03602573571288441 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.038351539543994194, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.038351539543994194 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5321100917431193, + "acc_stderr": 0.021393071222680814, + "acc_norm": 0.5321100917431193, + "acc_norm_stderr": 0.021393071222680814 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.02847293847803353, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.02847293847803353 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.01911721391149515, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.01911721391149515 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.02847350127296377, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.02847350127296377 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828979, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828979 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254174, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254174 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42448979591836733, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.42448979591836733, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31290743155149936, + "acc_stderr": 0.011842529823063, + "acc_norm": 0.31290743155149936, + "acc_norm_stderr": 0.011842529823063 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875833, + "mc2": 0.4439993647512429, + "mc2_stderr": 0.014990045797851265 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4167650531286895, + "acc_stderr": 0.01695048914610883, + "acc_norm": 0.4817001180637544, + "acc_norm_stderr": 0.017178836639177738 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "amphora/olaf-l.0.1", + "model_sha": "1fe9598f2ec7fe35ce77e773ef35b97b893b11d0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/amphora/polyglot-5.8B-CoT-e1/result_2023-09-28 03:35:31.json b/amphora/polyglot-5.8B-CoT-e1/result_2023-09-28 03:35:31.json new file mode 100644 index 0000000000000000000000000000000000000000..38c73748db11cf781585bcda08eba4516653a9ef --- /dev/null +++ b/amphora/polyglot-5.8B-CoT-e1/result_2023-09-28 03:35:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29266211604095566, + "acc_stderr": 0.013295916103619413, + "acc_norm": 0.31399317406143346, + "acc_norm_stderr": 0.013562691224726291 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37004580760804623, + "acc_stderr": 0.00481829899101255, + "acc_norm": 0.47470623381796456, + "acc_norm_stderr": 0.004983392650570958 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727665, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727665 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20434227330779056, + "acc_stderr": 0.014419123980931906, + "acc_norm": 0.20434227330779056, + "acc_norm_stderr": 0.014419123980931906 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.22127659574468084, + "acc_stderr": 0.027136349602424063, + "acc_norm": 0.22127659574468084, + "acc_norm_stderr": 0.027136349602424063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.22289156626506024, + "acc_stderr": 0.03240004825594688, + "acc_norm": 0.22289156626506024, + "acc_norm_stderr": 0.03240004825594688 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.11659192825112108, + "acc_stderr": 0.021539639816244464, + "acc_norm": 0.11659192825112108, + "acc_norm_stderr": 0.021539639816244464 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.03383201223244441, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.03383201223244441 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.18620689655172415, + "acc_stderr": 0.03243946159004616, + "acc_norm": 0.18620689655172415, + "acc_norm_stderr": 0.03243946159004616 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207763, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207763 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02934457250063435, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02934457250063435 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.024321738484602364, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.024321738484602364 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036625, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036625 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3258064516129032, + "acc_stderr": 0.0266620105785671, + "acc_norm": 0.3258064516129032, + "acc_norm_stderr": 0.0266620105785671 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.02749566368372406, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.02749566368372406 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721376, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721376 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073835, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073835 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02256989707491842, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02256989707491842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26878612716763006, + "acc_stderr": 0.023868003262500114, + "acc_norm": 0.26878612716763006, + "acc_norm_stderr": 0.023868003262500114 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.03351953879521271, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.03351953879521271 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.02346842983245115, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.02346842983245115 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3431192660550459, + "acc_stderr": 0.02035477773608604, + "acc_norm": 0.3431192660550459, + "acc_norm_stderr": 0.02035477773608604 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.17355371900826447, + "acc_stderr": 0.03457272836917669, + "acc_norm": 0.17355371900826447, + "acc_norm_stderr": 0.03457272836917669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.0166848209291486, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.0166848209291486 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.20921985815602837, + "acc_stderr": 0.02426476943998849, + "acc_norm": 0.20921985815602837, + "acc_norm_stderr": 0.02426476943998849 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347018, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347018 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403325, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403325 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.03113088039623593, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.03113088039623593 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25316455696202533, + "acc_stderr": 0.0283046579430353, + "acc_norm": 0.25316455696202533, + "acc_norm_stderr": 0.0283046579430353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.011005971399927235, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.011005971399927235 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752332, + "mc2": 0.39158327266747156, + "mc2_stderr": 0.014622481693781006 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2632821723730815, + "acc_stderr": 0.015141752199573207, + "acc_norm": 0.3837072018890201, + "acc_norm_stderr": 0.01671892463723183 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "amphora/polyglot-5.8B-CoT-e1", + "model_sha": "e8f4cb1d884cf4d67e3e8afc0aab09c62a0d68c6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/amphora/small-instruct/result_2023-10-09 19:17:00.json b/amphora/small-instruct/result_2023-10-09 19:17:00.json new file mode 100644 index 0000000000000000000000000000000000000000..7ee1aabcfaac0409d65e78eda40b2b088653c9cd --- /dev/null +++ b/amphora/small-instruct/result_2023-10-09 19:17:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2773037542662116, + "acc_stderr": 0.013082095839059374, + "acc_norm": 0.32764505119453924, + "acc_norm_stderr": 0.013715847940719346 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34863572993427605, + "acc_stderr": 0.00475564501626385, + "acc_norm": 0.4313881696873133, + "acc_norm_stderr": 0.004942578520987342 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.03760178006026621, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.03760178006026621 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26309067688378035, + "acc_stderr": 0.01574549716904906, + "acc_norm": 0.26309067688378035, + "acc_norm_stderr": 0.01574549716904906 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.029513196625539355, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.029513196625539355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.02608270069539966, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.02608270069539966 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21973094170403587, + "acc_stderr": 0.0277901770643836, + "acc_norm": 0.21973094170403587, + "acc_norm_stderr": 0.0277901770643836 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124505, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124505 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003336, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.02851025151234191, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.02851025151234191 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2, + "acc_stderr": 0.020280805062535722, + "acc_norm": 0.2, + "acc_norm_stderr": 0.020280805062535722 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2019704433497537, + "acc_stderr": 0.028247350122180267, + "acc_norm": 0.2019704433497537, + "acc_norm_stderr": 0.028247350122180267 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.267741935483871, + "acc_stderr": 0.02518900666021238, + "acc_norm": 0.267741935483871, + "acc_norm_stderr": 0.02518900666021238 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.27350427350427353, + "acc_stderr": 0.029202540153431183, + "acc_norm": 0.27350427350427353, + "acc_norm_stderr": 0.029202540153431183 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.026480357179895712, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.026480357179895712 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878285, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878285 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712156, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712156 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.031157150869355575, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.031157150869355575 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525214, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525214 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.023786203255508283, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.023786203255508283 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2716049382716049, + "acc_stderr": 0.02474862449053737, + "acc_norm": 0.2716049382716049, + "acc_norm_stderr": 0.02474862449053737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.03119584087770029, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.03119584087770029 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.018272575810231867, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.018272575810231867 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1349206349206349, + "acc_stderr": 0.030557101589417515, + "acc_norm": 0.1349206349206349, + "acc_norm_stderr": 0.030557101589417515 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.238562091503268, + "acc_stderr": 0.024404394928087866, + "acc_norm": 0.238562091503268, + "acc_norm_stderr": 0.024404394928087866 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.26973684210526316, + "acc_stderr": 0.03611780560284898, + "acc_norm": 0.26973684210526316, + "acc_norm_stderr": 0.03611780560284898 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.01818521895431809, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.01818521895431809 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729903, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729903 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697623, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697623 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.22685185185185186, + "acc_stderr": 0.028561650102422273, + "acc_norm": 0.22685185185185186, + "acc_norm_stderr": 0.028561650102422273 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098426, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098426 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1875, + "acc_stderr": 0.023709788253811766, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.023709788253811766 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174934, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174934 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24315514993481094, + "acc_stderr": 0.010956556654417353, + "acc_norm": 0.24315514993481094, + "acc_norm_stderr": 0.010956556654417353 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.015000674373570342, + "mc2": 0.415216441138711, + "mc2_stderr": 0.015096025074072256 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2585596221959858, + "acc_stderr": 0.01505335443896398, + "acc_norm": 0.3577331759149941, + "acc_norm_stderr": 0.01647980893574998 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "amphora/small-instruct", + "model_sha": "f88e14dc4b3b2b4f00261e77458497fac4f7a600", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/asapppppp/Llama2-ko-DPO-13B_lora_finetuning/result_2023-11-14 00:14:33.json b/asapppppp/Llama2-ko-DPO-13B_lora_finetuning/result_2023-11-14 00:14:33.json new file mode 100644 index 0000000000000000000000000000000000000000..1ffce89369745a33b64ba98798a2091ac04b9b59 --- /dev/null +++ b/asapppppp/Llama2-ko-DPO-13B_lora_finetuning/result_2023-11-14 00:14:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.22696245733788395, + "acc_stderr": 0.01224049153613286, + "acc_norm": 0.22696245733788395, + "acc_norm_stderr": 0.01224049153613286 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2504481179047998, + "acc_stderr": 0.004323856300539177, + "acc_norm": 0.2504481179047998, + "acc_norm_stderr": 0.004323856300539177 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 1.0, + "mc1_stderr": 0.0, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.014846044968252247, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.014846044968252247 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "asapppppp/Llama2-ko-DPO-13B_lora_finetuning", + "model_sha": "34a143bbc3acf9826e32df75f38f0854cd8c6970", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/asapppppp/kullm-polyglot-12.8b-v2_lora_finetuning/result_2023-11-09 05:59:46.json b/asapppppp/kullm-polyglot-12.8b-v2_lora_finetuning/result_2023-11-09 05:59:46.json new file mode 100644 index 0000000000000000000000000000000000000000..58927976d881e988965c5ddf389a08f3a51e6919 --- /dev/null +++ b/asapppppp/kullm-polyglot-12.8b-v2_lora_finetuning/result_2023-11-09 05:59:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.189419795221843, + "acc_stderr": 0.011450705115910767, + "acc_norm": 0.23720136518771331, + "acc_norm_stderr": 0.012430399829260847 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25423222465644296, + "acc_stderr": 0.004345388614520031, + "acc_norm": 0.2634933280223063, + "acc_norm_stderr": 0.004396273173717444 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28735632183908044, + "acc_stderr": 0.0161824107306827, + "acc_norm": 0.28735632183908044, + "acc_norm_stderr": 0.0161824107306827 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426115, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426115 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924811, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924811 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.021020672680827912, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.021020672680827912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.02860595370200424, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.02860595370200424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749895, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749895 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.029252823291803627, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.029252823291803627 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.02405102973991225, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.02405102973991225 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.017667841612378984, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.017667841612378984 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.0358870281282637, + "acc_norm": 0.15, + "acc_norm_stderr": 0.0358870281282637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676653, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.03058759135160425, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.03058759135160425 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139404, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139404 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.21909424724602203, + "mc1_stderr": 0.014480038578757445, + "mc2": 0.4671081145098644, + "mc2_stderr": 0.01682496498545748 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.0743801652892562, + "acc_stderr": 0.009021104510906089, + "acc_norm": 0.23258559622195984, + "acc_norm_stderr": 0.01452516918241648 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "asapppppp/kullm-polyglot-12.8b-v2_lora_finetuning", + "model_sha": "1902b727d43126031c1d5a8a28727e6427db578c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/asapppppp/polyglot_12.8B_lora_finetuning/result_2023-11-06 06:06:47.json b/asapppppp/polyglot_12.8B_lora_finetuning/result_2023-11-06 06:06:47.json new file mode 100644 index 0000000000000000000000000000000000000000..d8545173e16ce6bf5c2e39ba8a349d54b7dcd08f --- /dev/null +++ b/asapppppp/polyglot_12.8B_lora_finetuning/result_2023-11-06 06:06:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20563139931740615, + "acc_stderr": 0.011810745260742574, + "acc_norm": 0.23378839590443687, + "acc_norm_stderr": 0.012368225378507135 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25632344154550885, + "acc_stderr": 0.004357101984278613, + "acc_norm": 0.2621987651862179, + "acc_norm_stderr": 0.004389312748012148 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727665, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727665 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.33980582524271846, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.33980582524271846, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21328224776500637, + "acc_stderr": 0.014648172749593515, + "acc_norm": 0.21328224776500637, + "acc_norm_stderr": 0.014648172749593515 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.03712537833614865, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.03712537833614865 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.026754391348039783, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.026754391348039783 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.18674698795180722, + "acc_stderr": 0.030338749144500576, + "acc_norm": 0.18674698795180722, + "acc_norm_stderr": 0.030338749144500576 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24758842443729903, + "acc_stderr": 0.024513879973621967, + "acc_norm": 0.24758842443729903, + "acc_norm_stderr": 0.024513879973621967 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.14349775784753363, + "acc_stderr": 0.02352937126961819, + "acc_norm": 0.14349775784753363, + "acc_norm_stderr": 0.02352937126961819 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3282828282828283, + "acc_stderr": 0.03345678422756775, + "acc_norm": 0.3282828282828283, + "acc_norm_stderr": 0.03345678422756775 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.024321738484602364, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.024321738484602364 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094631, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094631 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.026522709674667768, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.026522709674667768 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.20512820512820512, + "acc_stderr": 0.026453508054040332, + "acc_norm": 0.20512820512820512, + "acc_norm_stderr": 0.026453508054040332 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.035676037996391706, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.035676037996391706 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.022894082489925992, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.022894082489925992 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3471502590673575, + "acc_stderr": 0.03435696168361355, + "acc_norm": 0.3471502590673575, + "acc_norm_stderr": 0.03435696168361355 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281335, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281335 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3431192660550459, + "acc_stderr": 0.020354777736086037, + "acc_norm": 0.3431192660550459, + "acc_norm_stderr": 0.020354777736086037 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.025829163272757482, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.025829163272757482 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.1322314049586777, + "acc_stderr": 0.030922788320445805, + "acc_norm": 0.1322314049586777, + "acc_norm_stderr": 0.030922788320445805 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316091, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316091 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.016819028375736393, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.016819028375736393 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23049645390070922, + "acc_stderr": 0.025123739226872405, + "acc_norm": 0.23049645390070922, + "acc_norm_stderr": 0.025123739226872405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966342, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966342 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174906, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2320675105485232, + "acc_stderr": 0.027479744550808507, + "acc_norm": 0.2320675105485232, + "acc_norm_stderr": 0.027479744550808507 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23468057366362452, + "acc_stderr": 0.010824026872449353, + "acc_norm": 0.23468057366362452, + "acc_norm_stderr": 0.010824026872449353 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.03096451792692339, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.03096451792692339 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.03287666758603489, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.03287666758603489 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22031823745410037, + "mc1_stderr": 0.014509045171487288, + "mc2": 0.4700873980893058, + "mc2_stderr": 0.016808897693551126 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.0743801652892562, + "acc_stderr": 0.00902110451090609, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.014676495332267255 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "asapppppp/polyglot_12.8B_lora_finetuning", + "model_sha": "c47154b0514ec6e194274ed6e512acb20aededdf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/automerger/YamshadowExperiment28-7B/result_2024-05-30 09:32:40.json b/automerger/YamshadowExperiment28-7B/result_2024-05-30 09:32:40.json new file mode 100644 index 0000000000000000000000000000000000000000..f21e8cfe2ffb5b8c18006f74b1db13c1e27d9420 --- /dev/null +++ b/automerger/YamshadowExperiment28-7B/result_2024-05-30 09:32:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39419795221843, + "acc_stderr": 0.014280522667467323, + "acc_norm": 0.45307167235494883, + "acc_norm_stderr": 0.014546892052005626 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3959370643298148, + "acc_stderr": 0.0048805154313231605, + "acc_norm": 0.5264887472615017, + "acc_norm_stderr": 0.004982774293927769 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46360153256704983, + "acc_stderr": 0.01783252407959326, + "acc_norm": 0.46360153256704983, + "acc_norm_stderr": 0.01783252407959326 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029319, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029319 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42443729903536975, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.42443729903536975, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.02533900301010653, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.02533900301010653 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796183, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083015, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083015 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376543, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376543 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225882, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225882 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762633, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762633 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2770949720670391, + "acc_stderr": 0.014968772435812145, + "acc_norm": 0.2770949720670391, + "acc_norm_stderr": 0.014968772435812145 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898438, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898438 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34224250325945244, + "acc_stderr": 0.01211793999870587, + "acc_norm": 0.34224250325945244, + "acc_norm_stderr": 0.01211793999870587 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.034602283272391704, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.034602283272391704 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4320685434516524, + "mc1_stderr": 0.01734120239498825, + "mc2": 0.6078403846843102, + "mc2_stderr": 0.01625612695618396 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4155844155844156, + "acc_stderr": 0.01694358631307657, + "acc_norm": 0.42621015348288077, + "acc_norm_stderr": 0.01700212260948926 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "automerger/YamshadowExperiment28-7B", + "model_sha": "76972ed8aacba1fd14f78e6f8d347f087f8b6800", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/KoAlpaca-KoRWKV-1.5B/result_2023-10-31 18:36:44.json b/beomi/KoAlpaca-KoRWKV-1.5B/result_2023-10-31 18:36:44.json new file mode 100644 index 0000000000000000000000000000000000000000..f2efb421117eeb48ccf558cea1c1e0489f9f9690 --- /dev/null +++ b/beomi/KoAlpaca-KoRWKV-1.5B/result_2023-10-31 18:36:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19965870307167236, + "acc_stderr": 0.011681625756888683, + "acc_norm": 0.26023890784982934, + "acc_norm_stderr": 0.012821930225112554 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2802230631348337, + "acc_stderr": 0.0044819026375056545, + "acc_norm": 0.30850428201553476, + "acc_norm_stderr": 0.004609320024893897 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2388250319284802, + "acc_stderr": 0.015246803197398696, + "acc_norm": 0.2388250319284802, + "acc_norm_stderr": 0.015246803197398696 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.1477832512315271, + "acc_stderr": 0.024969621333521274, + "acc_norm": 0.1477832512315271, + "acc_norm_stderr": 0.024969621333521274 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473835, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473835 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2275132275132275, + "acc_stderr": 0.021591269407823778, + "acc_norm": 0.2275132275132275, + "acc_norm_stderr": 0.021591269407823778 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1944954128440367, + "acc_stderr": 0.016970289090458043, + "acc_norm": 0.1944954128440367, + "acc_norm_stderr": 0.016970289090458043 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.024051029739912258, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.024051029739912258 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1875, + "acc_stderr": 0.023709788253811766, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.023709788253811766 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546195, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546195 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.01100597139992723, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.01100597139992723 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834564, + "mc2": 0.4765435436005545, + "mc2_stderr": 0.016302547246046926 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2809917355371901, + "acc_stderr": 0.015453559655458277, + "acc_norm": 0.5053128689492326, + "acc_norm_stderr": 0.01718938362722971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/KoAlpaca-KoRWKV-1.5B", + "model_sha": "d463395c698beef06743bfa019d27aa185f8a3af", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/KoAlpaca-KoRWKV-6B/result_2023-10-31 18:35:44.json b/beomi/KoAlpaca-KoRWKV-6B/result_2023-10-31 18:35:44.json new file mode 100644 index 0000000000000000000000000000000000000000..68405dba8691d02da58fdaa3f6afa4881445b1c9 --- /dev/null +++ b/beomi/KoAlpaca-KoRWKV-6B/result_2023-10-31 18:35:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2764505119453925, + "acc_stderr": 0.013069662474252428, + "acc_norm": 0.3191126279863481, + "acc_norm_stderr": 0.013621696119173304 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3461461860187214, + "acc_stderr": 0.00474768200349145, + "acc_norm": 0.4402509460266879, + "acc_norm_stderr": 0.004954026775425764 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.01598281477469563, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.01598281477469563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838746, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838746 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.03106939026078942, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.03106939026078942 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818777, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818777 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.0384487613978527, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.0384487613978527 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.037800192304380156, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.037800192304380156 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993179, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993179 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694433, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694433 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24838709677419354, + "acc_stderr": 0.024580028921481006, + "acc_norm": 0.24838709677419354, + "acc_norm_stderr": 0.024580028921481006 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.25213675213675213, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.25213675213675213, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.025604233470899105, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.025604233470899105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844065, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.17218543046357615, + "acc_stderr": 0.030826136961962382, + "acc_norm": 0.17218543046357615, + "acc_norm_stderr": 0.030826136961962382 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.02218203720294837, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.02218203720294837 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2861271676300578, + "acc_stderr": 0.024332146779134128, + "acc_norm": 0.2861271676300578, + "acc_norm_stderr": 0.024332146779134128 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2993827160493827, + "acc_stderr": 0.02548311560119547, + "acc_norm": 0.2993827160493827, + "acc_norm_stderr": 0.02548311560119547 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.03775205013583638, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.03775205013583638 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22385321100917432, + "acc_stderr": 0.017871217767790215, + "acc_norm": 0.22385321100917432, + "acc_norm_stderr": 0.017871217767790215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.04026187527591203, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.04026187527591203 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.017917974069594726, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.017917974069594726 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266733, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266733 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2711864406779661, + "acc_stderr": 0.011354581451622981, + "acc_norm": 0.2711864406779661, + "acc_norm_stderr": 0.011354581451622981 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604243, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604243 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.035014387062967806, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.035014387062967806 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.015000674373570338, + "mc2": 0.40076474326126255, + "mc2_stderr": 0.014949577614485286 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4132231404958678, + "acc_stderr": 0.01692948023449523, + "acc_norm": 0.5478158205430933, + "acc_norm_stderr": 0.017111567130916796 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/KoAlpaca-KoRWKV-6B", + "model_sha": "427ee72c4350f26de1b287a0c07b842e7d168dbc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/KoAlpaca-Polyglot-12.8B/result_2023-09-26 09:57:09.json b/beomi/KoAlpaca-Polyglot-12.8B/result_2023-09-26 09:57:09.json new file mode 100644 index 0000000000000000000000000000000000000000..55ba56b7b40b3c6a06a5231cd5aada8f4526cdb2 --- /dev/null +++ b/beomi/KoAlpaca-Polyglot-12.8B/result_2023-09-26 09:57:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31569965870307165, + "acc_stderr": 0.013582571095815291, + "acc_norm": 0.3438566552901024, + "acc_norm_stderr": 0.013880644570156208 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3873730332603067, + "acc_stderr": 0.004861544478451863, + "acc_norm": 0.4980083648675563, + "acc_norm_stderr": 0.004989741826250387 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26947637292464877, + "acc_stderr": 0.01586624307321505, + "acc_norm": 0.26947637292464877, + "acc_norm_stderr": 0.01586624307321505 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.03712537833614867, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.03712537833614867 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.029241883869628827, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.029241883869628827 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.033293941190735296, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.033293941190735296 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.02608270069539966, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.02608270069539966 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.026936111912802277, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.026936111912802277 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365897, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365897 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.21379310344827587, + "acc_stderr": 0.03416520447747549, + "acc_norm": 0.21379310344827587, + "acc_norm_stderr": 0.03416520447747549 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.036186648199362445, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.036186648199362445 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.027025433498882374, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.027025433498882374 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2, + "acc_stderr": 0.020280805062535722, + "acc_norm": 0.2, + "acc_norm_stderr": 0.020280805062535722 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.03893542518824847, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.03893542518824847 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678243, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678243 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24838709677419354, + "acc_stderr": 0.02458002892148101, + "acc_norm": 0.24838709677419354, + "acc_norm_stderr": 0.02458002892148101 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3034188034188034, + "acc_stderr": 0.030118210106942645, + "acc_norm": 0.3034188034188034, + "acc_norm_stderr": 0.030118210106942645 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2, + "acc_stderr": 0.02461829819586651, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02461829819586651 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.041723430387053825, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.041723430387053825 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.03014777593540922, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.03014777593540922 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400168, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.037161774375660164, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.037161774375660164 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26878612716763006, + "acc_stderr": 0.02386800326250011, + "acc_norm": 0.26878612716763006, + "acc_norm_stderr": 0.02386800326250011 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.02517104191530968, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.02517104191530968 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700286, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700286 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.038351539543994194, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.038351539543994194 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.01822407811729908, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.01822407811729908 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.034550710191021496, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.034550710191021496 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.0249541843248799, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.0249541843248799 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2809917355371901, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810535, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810535 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26633986928104575, + "acc_stderr": 0.017883188134667192, + "acc_norm": 0.26633986928104575, + "acc_norm_stderr": 0.017883188134667192 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.027696910713093936, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.027696910713093936 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220513, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220513 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.02315746830855938, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.02315746830855938 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29957805907172996, + "acc_stderr": 0.029818024749753102, + "acc_norm": 0.29957805907172996, + "acc_norm_stderr": 0.029818024749753102 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25684485006518903, + "acc_stderr": 0.011158455853098862, + "acc_norm": 0.25684485006518903, + "acc_norm_stderr": 0.011158455853098862 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.0340150671524904, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.0340150671524904 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.4196185756093357, + "mc2_stderr": 0.01602551288494906 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2987012987012987, + "acc_stderr": 0.015735657391438295, + "acc_norm": 0.3482880755608028, + "acc_norm_stderr": 0.016379926739148037 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/KoAlpaca-Polyglot-12.8B", + "model_sha": "5f225e9c5ae6c7238fc2316da0b8a9922019674d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/KoAlpaca-Polyglot-5.8B/result_2023-09-26 09:56:49.json b/beomi/KoAlpaca-Polyglot-5.8B/result_2023-09-26 09:56:49.json new file mode 100644 index 0000000000000000000000000000000000000000..5c613b1f0c0a2ca6366a1c65368f7de0e3252cc8 --- /dev/null +++ b/beomi/KoAlpaca-Polyglot-5.8B/result_2023-09-26 09:56:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2593856655290102, + "acc_stderr": 0.012808273573927094, + "acc_norm": 0.3037542662116041, + "acc_norm_stderr": 0.01343890918477876 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3390758812985461, + "acc_stderr": 0.004724281487819373, + "acc_norm": 0.4146584345747859, + "acc_norm_stderr": 0.004916561213591286 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.26900584795321636, + "acc_stderr": 0.03401052620104088, + "acc_norm": 0.26900584795321636, + "acc_norm_stderr": 0.03401052620104088 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23243933588761176, + "acc_stderr": 0.015104550008905699, + "acc_norm": 0.23243933588761176, + "acc_norm_stderr": 0.015104550008905699 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23829787234042554, + "acc_stderr": 0.027851252973889788, + "acc_norm": 0.23829787234042554, + "acc_norm_stderr": 0.027851252973889788 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.034106466140718564, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.034106466140718564 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.27009646302250806, + "acc_stderr": 0.025218040373410622, + "acc_norm": 0.27009646302250806, + "acc_norm_stderr": 0.025218040373410622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2556053811659193, + "acc_stderr": 0.029275891003969927, + "acc_norm": 0.2556053811659193, + "acc_norm_stderr": 0.029275891003969927 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22137404580152673, + "acc_stderr": 0.0364129708131373, + "acc_norm": 0.22137404580152673, + "acc_norm_stderr": 0.0364129708131373 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909281, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909281 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.03074630074212451, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.03074630074212451 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.13725490196078433, + "acc_stderr": 0.034240846698915216, + "acc_norm": 0.13725490196078433, + "acc_norm_stderr": 0.034240846698915216 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31092436974789917, + "acc_stderr": 0.030066761582977934, + "acc_norm": 0.31092436974789917, + "acc_norm_stderr": 0.030066761582977934 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28974358974358977, + "acc_stderr": 0.02300062824368796, + "acc_norm": 0.28974358974358977, + "acc_norm_stderr": 0.02300062824368796 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.038935425188248475, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.038935425188248475 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114475, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114475 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.02637756702864586, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.02637756702864586 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.028120966503914418, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.028120966503914418 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21132075471698114, + "acc_stderr": 0.025125766484827845, + "acc_norm": 0.21132075471698114, + "acc_norm_stderr": 0.025125766484827845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.043091187099464585, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.043091187099464585 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.029929415408348384, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.029929415408348384 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.03456425745086998, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.03456425745086998 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.021851509822031708, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.021851509822031708 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.02344582627654555, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.02344582627654555 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02492200116888632, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02492200116888632 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3316062176165803, + "acc_stderr": 0.03397636541089116, + "acc_norm": 0.3316062176165803, + "acc_norm_stderr": 0.03397636541089116 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21834862385321102, + "acc_stderr": 0.01771260052872273, + "acc_norm": 0.21834862385321102, + "acc_norm_stderr": 0.01771260052872273 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.025058503316958157, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.025058503316958157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2231404958677686, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.2231404958677686, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952925, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952925 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.017035229258034038, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.017035229258034038 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.025518731049537773, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.025518731049537773 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.03485946096475741, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.03485946096475741 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.03038805130167812, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.03038805130167812 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.0141022236231526, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.0141022236231526 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2536764705882353, + "acc_stderr": 0.02643132987078953, + "acc_norm": 0.2536764705882353, + "acc_norm_stderr": 0.02643132987078953 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.027212835884073132, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.027212835884073132 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26140808344198174, + "acc_stderr": 0.011222528169771316, + "acc_norm": 0.26140808344198174, + "acc_norm_stderr": 0.011222528169771316 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923413, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923413 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731613, + "mc2": 0.40043350315231013, + "mc2_stderr": 0.01604778937263507 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2892561983471074, + "acc_stderr": 0.015588800386053555, + "acc_norm": 0.31759149940968123, + "acc_norm_stderr": 0.016005581876229306 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/KoAlpaca-Polyglot-5.8B", + "model_sha": "cb1597cbaf4a98e52e6b767381a80893e4818477", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/KoRWKV-1.5B/result_2023-10-31 18:37:02.json b/beomi/KoRWKV-1.5B/result_2023-10-31 18:37:02.json new file mode 100644 index 0000000000000000000000000000000000000000..9af029677be357ca14786b39e627a3507e5eb2ed --- /dev/null +++ b/beomi/KoRWKV-1.5B/result_2023-10-31 18:37:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2150170648464164, + "acc_stderr": 0.012005717634133616, + "acc_norm": 0.2687713310580205, + "acc_norm_stderr": 0.012955065963710686 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3241386178052181, + "acc_stderr": 0.004670955399641123, + "acc_norm": 0.3950408285202151, + "acc_norm_stderr": 0.004878603699686037 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824564, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824564 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.01598281477469563, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.01598281477469563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.037125378336148665, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.037125378336148665 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386705, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386705 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944967, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944967 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.032361983509282745, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.032361983509282745 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494564, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494564 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23949579831932774, + "acc_stderr": 0.027722065493361252, + "acc_norm": 0.23949579831932774, + "acc_norm_stderr": 0.027722065493361252 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.021763733684173923, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.021763733684173923 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.03826076324884863, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.03826076324884863 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23225806451612904, + "acc_stderr": 0.024022256130308235, + "acc_norm": 0.23225806451612904, + "acc_norm_stderr": 0.024022256130308235 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.20085470085470086, + "acc_stderr": 0.02624677294689048, + "acc_norm": 0.20085470085470086, + "acc_norm_stderr": 0.02624677294689048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2641509433962264, + "acc_stderr": 0.027134291628741695, + "acc_norm": 0.2641509433962264, + "acc_norm_stderr": 0.027134291628741695 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072775, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.032424147574830975, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.032424147574830975 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.02226181769240018, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.02226181769240018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.15, + "acc_stderr": 0.03588702812826373, + "acc_norm": 0.15, + "acc_norm_stderr": 0.03588702812826373 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.0227797190887334, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.0227797190887334 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3471502590673575, + "acc_stderr": 0.03435696168361355, + "acc_norm": 0.3471502590673575, + "acc_norm_stderr": 0.03435696168361355 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27522935779816515, + "acc_stderr": 0.019149093743155196, + "acc_norm": 0.27522935779816515, + "acc_norm_stderr": 0.019149093743155196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.024170840879341023, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.024170840879341023 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.016992723465466233, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.016992723465466233 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.024987106365642973, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.024987106365642973 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190735, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190735 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.033723432716530624, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.033723432716530624 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925312, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925312 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003472, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003472 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3306122448979592, + "acc_stderr": 0.030116426296540582, + "acc_norm": 0.3306122448979592, + "acc_norm_stderr": 0.030116426296540582 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2503259452411995, + "acc_stderr": 0.011064151027165441, + "acc_norm": 0.2503259452411995, + "acc_norm_stderr": 0.011064151027165441 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693247, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693247 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.03287666758603489, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.03287666758603489 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.01489627744104187, + "mc2": 0.40377476002097423, + "mc2_stderr": 0.01506211934008226 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3955135773317591, + "acc_stderr": 0.016810815902206042, + "acc_norm": 0.5489964580873672, + "acc_norm_stderr": 0.017107618859549346 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/KoRWKV-1.5B", + "model_sha": "e2e327ae9075c634e8b127f262412d670038621e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/KoRWKV-6B/result_2023-10-31 18:36:08.json b/beomi/KoRWKV-6B/result_2023-10-31 18:36:08.json new file mode 100644 index 0000000000000000000000000000000000000000..d437c8a2407d3850b01ed125f6851617b49d6f15 --- /dev/null +++ b/beomi/KoRWKV-6B/result_2023-10-31 18:36:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.24146757679180889, + "acc_stderr": 0.01250656483973943, + "acc_norm": 0.28668941979522183, + "acc_norm_stderr": 0.013214986329274779 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3467436765584545, + "acc_stderr": 0.004749606196363337, + "acc_norm": 0.4356701852220673, + "acc_norm_stderr": 0.004948310399746081 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.26900584795321636, + "acc_stderr": 0.03401052620104089, + "acc_norm": 0.26900584795321636, + "acc_norm_stderr": 0.03401052620104089 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.039891398595317706, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.039891398595317706 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27458492975734355, + "acc_stderr": 0.01595982993308404, + "acc_norm": 0.27458492975734355, + "acc_norm_stderr": 0.01595982993308404 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.037125378336148665, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.037125378336148665 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2, + "acc_stderr": 0.0261488180184245, + "acc_norm": 0.2, + "acc_norm_stderr": 0.0261488180184245 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2469879518072289, + "acc_stderr": 0.03357351982064537, + "acc_norm": 0.2469879518072289, + "acc_norm_stderr": 0.03357351982064537 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818774, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818774 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21524663677130046, + "acc_stderr": 0.027584066602208274, + "acc_norm": 0.21524663677130046, + "acc_norm_stderr": 0.027584066602208274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.0298575156733864, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.0298575156733864 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.03077805742293167, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.03077805742293167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2846153846153846, + "acc_stderr": 0.022878322799706304, + "acc_norm": 0.2846153846153846, + "acc_norm_stderr": 0.022878322799706304 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.02509189237885928, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.02509189237885928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2094017094017094, + "acc_stderr": 0.026655699653922737, + "acc_norm": 0.2094017094017094, + "acc_norm_stderr": 0.026655699653922737 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891356, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891356 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.15454545454545454, + "acc_stderr": 0.03462262571262667, + "acc_norm": 0.15454545454545454, + "acc_norm_stderr": 0.03462262571262667 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.030965903123573026, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.030965903123573026 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2275132275132275, + "acc_stderr": 0.021591269407823778, + "acc_norm": 0.2275132275132275, + "acc_norm_stderr": 0.021591269407823778 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.033961162058453336, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.033961162058453336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.022289638852617904, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.022289638852617904 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924034, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924034 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.02346842983245116, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.02346842983245116 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.31088082901554404, + "acc_stderr": 0.03340361906276587, + "acc_norm": 0.31088082901554404, + "acc_norm_stderr": 0.03340361906276587 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03835153954399421, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03835153954399421 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24770642201834864, + "acc_stderr": 0.018508143602547805, + "acc_norm": 0.24770642201834864, + "acc_norm_stderr": 0.018508143602547805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011744, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.02495418432487991, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.02495418432487991 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.03984979653302871, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.03984979653302871 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533156, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533156 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729906, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729906 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225417, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225417 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.028123429335142783, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.028123429335142783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.22685788787483702, + "acc_stderr": 0.010696348133569924, + "acc_norm": 0.22685788787483702, + "acc_norm_stderr": 0.010696348133569924 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604243, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604243 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.03256866661681102, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.03256866661681102 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22399020807833536, + "mc1_stderr": 0.014594964329474205, + "mc2": 0.38068228949426847, + "mc2_stderr": 0.014620809751439413 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4037780401416765, + "acc_stderr": 0.016869031540298632, + "acc_norm": 0.5407319952774499, + "acc_norm_stderr": 0.017133218276537673 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/KoRWKV-6B", + "model_sha": "541600070459baf0f1be9560181d5ceb77794085", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/Llama-3-KoEn-8B/result_2024-07-09 06:21:22.json b/beomi/Llama-3-KoEn-8B/result_2024-07-09 06:21:22.json new file mode 100644 index 0000000000000000000000000000000000000000..859dc521b4b27436efa24c31cf66fb5afa6765a4 --- /dev/null +++ b/beomi/Llama-3-KoEn-8B/result_2024-07-09 06:21:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4138225255972696, + "acc_stderr": 0.014392730009221007, + "acc_norm": 0.47696245733788395, + "acc_norm_stderr": 0.014595873205358269 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4283011352320255, + "acc_stderr": 0.004938212723748203, + "acc_norm": 0.5805616411073491, + "acc_norm_stderr": 0.0049245863623016565 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6081871345029239, + "acc_stderr": 0.037439798259263996, + "acc_norm": 0.6081871345029239, + "acc_norm_stderr": 0.037439798259263996 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107675, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107675 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6232439335887612, + "acc_stderr": 0.01732829290730306, + "acc_norm": 0.6232439335887612, + "acc_norm_stderr": 0.01732829290730306 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.042763494943765974, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.042763494943765974 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736128, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736128 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5419354838709678, + "acc_stderr": 0.028343787250540615, + "acc_norm": 0.5419354838709678, + "acc_norm_stderr": 0.028343787250540615 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456645, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456645 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.023393826500484882, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.023393826500484882 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5246913580246914, + "acc_stderr": 0.027786800931427443, + "acc_norm": 0.5246913580246914, + "acc_norm_stderr": 0.027786800931427443 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6311926605504588, + "acc_stderr": 0.020686227560729534, + "acc_norm": 0.6311926605504588, + "acc_norm_stderr": 0.020686227560729534 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.02843109544417664, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.02843109544417664 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.040633027314866704, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.040633027314866704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.020017629214213094, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.020017629214213094 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611324, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611324 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.032847388576472056, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.032847388576472056 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.030320243265004144, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.030320243265004144 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33376792698826596, + "acc_stderr": 0.012043812655846142, + "acc_norm": 0.33376792698826596, + "acc_norm_stderr": 0.012043812655846142 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.038435669935887186, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.038435669935887186 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.41178080951652124, + "mc2_stderr": 0.014701076629216495 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4344746162927981, + "acc_stderr": 0.017042098620824935, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.017090852631668336 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/Llama-3-KoEn-8B", + "model_sha": "43bacd328630f27df0438cabd8c1baa7058c4769", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/Llama-3-Open-Ko-8B-Instruct-preview/result_2024-04-27 12:05:24.json b/beomi/Llama-3-Open-Ko-8B-Instruct-preview/result_2024-04-27 12:05:24.json new file mode 100644 index 0000000000000000000000000000000000000000..5e3359122430130198c8fc5b30bb01d48842fca4 --- /dev/null +++ b/beomi/Llama-3-Open-Ko-8B-Instruct-preview/result_2024-04-27 12:05:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3199658703071672, + "acc_stderr": 0.013631345807016195, + "acc_norm": 0.34982935153583616, + "acc_norm_stderr": 0.01393680921215828 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33827922724556864, + "acc_stderr": 0.004721571443354455, + "acc_norm": 0.4175463055168293, + "acc_norm_stderr": 0.0049214665913350445 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.037439798259264016, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.037439798259264016 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.04498676320572924, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.04498676320572924 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3563218390804598, + "acc_stderr": 0.017125853762755897, + "acc_norm": 0.3563218390804598, + "acc_norm_stderr": 0.017125853762755897 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.029771642712491227, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.029771642712491227 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.0357160923005348, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.0357160923005348 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3633440514469453, + "acc_stderr": 0.027316847674192717, + "acc_norm": 0.3633440514469453, + "acc_norm_stderr": 0.027316847674192717 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289202, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289202 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533084, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533084 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.03996629574876718, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.03996629574876718 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3277310924369748, + "acc_stderr": 0.030489911417673227, + "acc_norm": 0.3277310924369748, + "acc_norm_stderr": 0.030489911417673227 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3, + "acc_stderr": 0.023234581088428498, + "acc_norm": 0.3, + "acc_norm_stderr": 0.023234581088428498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04668408033024932, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04668408033024932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.031447125816782405, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.031447125816782405 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3258064516129032, + "acc_stderr": 0.026662010578567104, + "acc_norm": 0.3258064516129032, + "acc_norm_stderr": 0.026662010578567104 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.49145299145299143, + "acc_stderr": 0.032751303000970296, + "acc_norm": 0.49145299145299143, + "acc_norm_stderr": 0.032751303000970296 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3169811320754717, + "acc_stderr": 0.02863723563980092, + "acc_norm": 0.3169811320754717, + "acc_norm_stderr": 0.02863723563980092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766114, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3681592039800995, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.3681592039800995, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.035506839891655796, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.035506839891655796 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.024946792225272314, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.024946792225272314 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.02640614597362565, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.02640614597362565 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32124352331606215, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.32124352331606215, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27522935779816515, + "acc_stderr": 0.019149093743155196, + "acc_norm": 0.27522935779816515, + "acc_norm_stderr": 0.019149093743155196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.033954900208561116, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.033954900208561116 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4297520661157025, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2236842105263158, + "acc_stderr": 0.03391160934343602, + "acc_norm": 0.2236842105263158, + "acc_norm_stderr": 0.03391160934343602 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.018492596536396955, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.018492596536396955 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022128, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1712962962962963, + "acc_stderr": 0.02569534164382468, + "acc_norm": 0.1712962962962963, + "acc_norm_stderr": 0.02569534164382468 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22905027932960895, + "acc_stderr": 0.014054314935614548, + "acc_norm": 0.22905027932960895, + "acc_norm_stderr": 0.014054314935614548 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.024562204314142314, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.024562204314142314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.21224489795918366, + "acc_stderr": 0.026176967197866764, + "acc_norm": 0.21224489795918366, + "acc_norm_stderr": 0.026176967197866764 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4177215189873418, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.4177215189873418, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26988265971316816, + "acc_stderr": 0.011337381084250414, + "acc_norm": 0.26988265971316816, + "acc_norm_stderr": 0.011337381084250414 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923403, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923403 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268048, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268048 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.4633799265743292, + "mc2_stderr": 0.015657144419865893 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2809917355371901, + "acc_stderr": 0.015453559655458277, + "acc_norm": 0.345926800472255, + "acc_norm_stderr": 0.01635385341434758 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/Llama-3-Open-Ko-8B-Instruct-preview", + "model_sha": "d8c93440c5c0426f0127e2baf822ce5b60fa3a73", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/Llama-3-Open-Ko-8B/result_2024-04-27 12:05:51.json b/beomi/Llama-3-Open-Ko-8B/result_2024-04-27 12:05:51.json new file mode 100644 index 0000000000000000000000000000000000000000..daacadd3de50ec5e9517837b08143d1d3f081945 --- /dev/null +++ b/beomi/Llama-3-Open-Ko-8B/result_2024-04-27 12:05:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30716723549488056, + "acc_stderr": 0.013481034054980945, + "acc_norm": 0.3728668941979522, + "acc_norm_stderr": 0.014131176760131162 + }, + "harness|ko_hellaswag|10": { + "acc": 0.351822346146186, + "acc_stderr": 0.004765629263643522, + "acc_norm": 0.4600677155945031, + "acc_norm_stderr": 0.004973842670559798 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.03722965741385539, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.03722965741385539 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.0430125039969088, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.0430125039969088 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.016905207420803554, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.016905207420803554 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.035509201856896294, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.035509201856896294 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.38263665594855306, + "acc_stderr": 0.027604689028581975, + "acc_norm": 0.38263665594855306, + "acc_norm_stderr": 0.027604689028581975 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.28699551569506726, + "acc_stderr": 0.030360379710291964, + "acc_norm": 0.28699551569506726, + "acc_norm_stderr": 0.030360379710291964 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.040393149787245626, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.040393149787245626 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.031353050095330834, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.031353050095330834 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.037800192304380156, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.037800192304380156 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3277310924369748, + "acc_stderr": 0.030489911417673227, + "acc_norm": 0.3277310924369748, + "acc_norm_stderr": 0.030489911417673227 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.30256410256410254, + "acc_stderr": 0.02329088805377272, + "acc_norm": 0.30256410256410254, + "acc_norm_stderr": 0.02329088805377272 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970104, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3741935483870968, + "acc_stderr": 0.027528904299845777, + "acc_norm": 0.3741935483870968, + "acc_norm_stderr": 0.027528904299845777 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4829059829059829, + "acc_stderr": 0.032736940493481824, + "acc_norm": 0.4829059829059829, + "acc_norm_stderr": 0.032736940493481824 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.29056603773584905, + "acc_stderr": 0.02794321998933716, + "acc_norm": 0.29056603773584905, + "acc_norm_stderr": 0.02794321998933716 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959316, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959316 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.472636815920398, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.472636815920398, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.024818350129436593, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.024818350129436593 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3734567901234568, + "acc_stderr": 0.02691500301138015, + "acc_norm": 0.3734567901234568, + "acc_norm_stderr": 0.02691500301138015 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26238532110091745, + "acc_stderr": 0.018861885021534745, + "acc_norm": 0.26238532110091745, + "acc_norm_stderr": 0.018861885021534745 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.03395490020856111, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.03395490020856111 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.02712195607138887, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.02712195607138887 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.045454545454545456, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.045454545454545456 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2236842105263158, + "acc_stderr": 0.033911609343436025, + "acc_norm": 0.2236842105263158, + "acc_norm_stderr": 0.033911609343436025 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.018824219512706207, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.018824219512706207 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372432, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372432 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.02725720260611495, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.02725720260611495 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788163, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788163 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2666232073011734, + "acc_stderr": 0.011293836031612143, + "acc_norm": 0.2666232073011734, + "acc_norm_stderr": 0.011293836031612143 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.031660096793998144, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.031660096793998144 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707689, + "mc2": 0.40364218014263636, + "mc2_stderr": 0.014941872761582384 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3069657615112161, + "acc_stderr": 0.015857588095362824, + "acc_norm": 0.4628099173553719, + "acc_norm_stderr": 0.0171427361176433 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/Llama-3-Open-Ko-8B", + "model_sha": "6545770e92e468ebd85d68d152899089329879a0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/OPEN-SOLAR-KO-10.7B/result_2024-01-17 07:15:10.json b/beomi/OPEN-SOLAR-KO-10.7B/result_2024-01-17 07:15:10.json new file mode 100644 index 0000000000000000000000000000000000000000..d42bd1c8316be7519829bfdd13bee67138bc70db --- /dev/null +++ b/beomi/OPEN-SOLAR-KO-10.7B/result_2024-01-17 07:15:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44197952218430037, + "acc_stderr": 0.014512682523128345, + "acc_norm": 0.49829351535836175, + "acc_norm_stderr": 0.014611305705056983 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44503087034455285, + "acc_stderr": 0.0049595354431706115, + "acc_norm": 0.6038637721569409, + "acc_norm_stderr": 0.004880937933163286 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6909323116219668, + "acc_stderr": 0.01652498891970219, + "acc_norm": 0.6909323116219668, + "acc_norm_stderr": 0.01652498891970219 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5884244372990354, + "acc_stderr": 0.027950481494401262, + "acc_norm": 0.5884244372990354, + "acc_norm_stderr": 0.027950481494401262 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6565656565656566, + "acc_stderr": 0.03383201223244443, + "acc_norm": 0.6565656565656566, + "acc_norm_stderr": 0.03383201223244443 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.541025641025641, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.541025641025641, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5580645161290323, + "acc_stderr": 0.028251557906849748, + "acc_norm": 0.5580645161290323, + "acc_norm_stderr": 0.028251557906849748 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.028120966503914397, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.028120966503914397 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608466, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608466 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425072, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373131, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373131 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.045981880578165414, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.045981880578165414 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5944954128440367, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.5944954128440367, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.04068590050224971, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.04068590050224971 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4297385620915033, + "acc_stderr": 0.020027122784928533, + "acc_norm": 0.4297385620915033, + "acc_norm_stderr": 0.020027122784928533 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.014655780837497717, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.014655780837497717 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.031557828165561665, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.031557828165561665 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7046413502109705, + "acc_stderr": 0.02969633871342288, + "acc_norm": 0.7046413502109705, + "acc_norm_stderr": 0.02969633871342288 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3272490221642764, + "acc_stderr": 0.011983819806464747, + "acc_norm": 0.3272490221642764, + "acc_norm_stderr": 0.011983819806464747 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.03471157907953427, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.03471157907953427 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777308, + "mc2": 0.4357474888038803, + "mc2_stderr": 0.014937494440354825 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5796930342384888, + "acc_stderr": 0.016970598281177706, + "acc_norm": 0.6103896103896104, + "acc_norm_stderr": 0.016766161671893515 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/OPEN-SOLAR-KO-10.7B", + "model_sha": "489c9577eb724a792b839ecfe9d343dbd5547886", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/SOLAR-KO-10.7B/result_2024-01-08 09:14:42.json b/beomi/SOLAR-KO-10.7B/result_2024-01-08 09:14:42.json new file mode 100644 index 0000000000000000000000000000000000000000..9f0fede515dc604a5e6d6c1427377edb95ec1f0c --- /dev/null +++ b/beomi/SOLAR-KO-10.7B/result_2024-01-08 09:14:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44197952218430037, + "acc_stderr": 0.014512682523128345, + "acc_norm": 0.49829351535836175, + "acc_norm_stderr": 0.014611305705056983 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44503087034455285, + "acc_stderr": 0.0049595354431706115, + "acc_norm": 0.6038637721569409, + "acc_norm_stderr": 0.004880937933163286 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6909323116219668, + "acc_stderr": 0.01652498891970219, + "acc_norm": 0.6909323116219668, + "acc_norm_stderr": 0.01652498891970219 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5884244372990354, + "acc_stderr": 0.027950481494401262, + "acc_norm": 0.5884244372990354, + "acc_norm_stderr": 0.027950481494401262 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6565656565656566, + "acc_stderr": 0.03383201223244443, + "acc_norm": 0.6565656565656566, + "acc_norm_stderr": 0.03383201223244443 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.541025641025641, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.541025641025641, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5580645161290323, + "acc_stderr": 0.028251557906849748, + "acc_norm": 0.5580645161290323, + "acc_norm_stderr": 0.028251557906849748 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.028120966503914397, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.028120966503914397 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608466, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608466 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425072, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373131, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373131 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.045981880578165414, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.045981880578165414 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5944954128440367, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.5944954128440367, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.04068590050224971, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.04068590050224971 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4297385620915033, + "acc_stderr": 0.020027122784928533, + "acc_norm": 0.4297385620915033, + "acc_norm_stderr": 0.020027122784928533 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.014655780837497717, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.014655780837497717 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.031557828165561665, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.031557828165561665 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7046413502109705, + "acc_stderr": 0.02969633871342288, + "acc_norm": 0.7046413502109705, + "acc_norm_stderr": 0.02969633871342288 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3272490221642764, + "acc_stderr": 0.011983819806464747, + "acc_norm": 0.3272490221642764, + "acc_norm_stderr": 0.011983819806464747 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.03471157907953427, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.03471157907953427 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777308, + "mc2": 0.43574748880388037, + "mc2_stderr": 0.014937494440354825 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5796930342384888, + "acc_stderr": 0.016970598281177706, + "acc_norm": 0.6103896103896104, + "acc_norm_stderr": 0.016766161671893515 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/SOLAR-KO-10.7B", + "model_sha": "489c9577eb724a792b839ecfe9d343dbd5547886", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/SOLAR-KOEN-10.8B/result_2024-02-19 04:23:14.json b/beomi/SOLAR-KOEN-10.8B/result_2024-02-19 04:23:14.json new file mode 100644 index 0000000000000000000000000000000000000000..3e43736dc5caebc23cc728901031d32ed5709339 --- /dev/null +++ b/beomi/SOLAR-KOEN-10.8B/result_2024-02-19 04:23:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39334470989761094, + "acc_stderr": 0.014275101465693024, + "acc_norm": 0.44795221843003413, + "acc_norm_stderr": 0.014532011498211666 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4146584345747859, + "acc_stderr": 0.004916561213591279, + "acc_norm": 0.5792670782712607, + "acc_norm_stderr": 0.004926678108601343 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5964912280701754, + "acc_stderr": 0.03762738699917057, + "acc_norm": 0.5964912280701754, + "acc_norm_stderr": 0.03762738699917057 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6385696040868455, + "acc_stderr": 0.01717960132890074, + "acc_norm": 0.6385696040868455, + "acc_norm_stderr": 0.01717960132890074 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996796, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996796 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5498392282958199, + "acc_stderr": 0.028256660723360184, + "acc_norm": 0.5498392282958199, + "acc_norm_stderr": 0.028256660723360184 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6919191919191919, + "acc_stderr": 0.03289477330098615, + "acc_norm": 0.6919191919191919, + "acc_norm_stderr": 0.03289477330098615 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5630252100840336, + "acc_stderr": 0.032219436365661956, + "acc_norm": 0.5630252100840336, + "acc_norm_stderr": 0.032219436365661956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.025334667080954897, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.025334667080954897 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5548387096774193, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.5548387096774193, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113114, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.03320685889744324, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.03320685889744324 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.038118909889404105, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.038118909889404105 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.02686462436675665, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.02686462436675665 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6587155963302752, + "acc_stderr": 0.020328612816592432, + "acc_norm": 0.6587155963302752, + "acc_norm_stderr": 0.020328612816592432 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604676, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604676 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874144, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874144 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.020130388312904524, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.020130388312904524 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697626, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697626 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.033723432716530624, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.033723432716530624 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.0142426300705749, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.0142426300705749 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.03113730429718581, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.03113730429718581 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3683181225554107, + "acc_stderr": 0.012319403369564642, + "acc_norm": 0.3683181225554107, + "acc_norm_stderr": 0.012319403369564642 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.034542365853806094, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.034542365853806094 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237031, + "mc2": 0.42574350940627886, + "mc2_stderr": 0.015335486100569009 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46989374262101535, + "acc_stderr": 0.017159163590170216, + "acc_norm": 0.500590318772137, + "acc_norm_stderr": 0.01719034212344859 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/SOLAR-KOEN-10.8B", + "model_sha": "e5d1c4d7ad72feef8616a75a25848aa3b9b7dcd9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/Solar-Ko-Recovery-11B/result_2024-07-02 05:34:10.json b/beomi/Solar-Ko-Recovery-11B/result_2024-07-02 05:34:10.json new file mode 100644 index 0000000000000000000000000000000000000000..ffb4807f93c732dde96fd7c7a015103d4826dfb6 --- /dev/null +++ b/beomi/Solar-Ko-Recovery-11B/result_2024-07-02 05:34:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4854948805460751, + "acc_stderr": 0.014605241081370053, + "acc_norm": 0.5392491467576792, + "acc_norm_stderr": 0.014566303676636586 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4682334196375224, + "acc_stderr": 0.0049797006957479515, + "acc_norm": 0.6474805815574587, + "acc_norm_stderr": 0.004767782256041002 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7076023391812866, + "acc_stderr": 0.034886477134579215, + "acc_norm": 0.7076023391812866, + "acc_norm_stderr": 0.034886477134579215 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7184466019417476, + "acc_stderr": 0.04453254836326469, + "acc_norm": 0.7184466019417476, + "acc_norm_stderr": 0.04453254836326469 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7266922094508301, + "acc_stderr": 0.01593668106262856, + "acc_norm": 0.7266922094508301, + "acc_norm_stderr": 0.01593668106262856 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6322869955156951, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.6322869955156951, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.648854961832061, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.648854961832061, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7626262626262627, + "acc_stderr": 0.030313710538198906, + "acc_norm": 0.7626262626262627, + "acc_norm_stderr": 0.030313710538198906 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207761, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207761 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.592436974789916, + "acc_stderr": 0.03191863374478466, + "acc_norm": 0.592436974789916, + "acc_norm_stderr": 0.03191863374478466 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.558974358974359, + "acc_stderr": 0.02517404838400073, + "acc_norm": 0.558974358974359, + "acc_norm_stderr": 0.02517404838400073 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356462, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356462 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6161290322580645, + "acc_stderr": 0.027666182075539635, + "acc_norm": 0.6161290322580645, + "acc_norm_stderr": 0.027666182075539635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.027236013946196663, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.027236013946196663 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5660377358490566, + "acc_stderr": 0.030503292013342596, + "acc_norm": 0.5660377358490566, + "acc_norm_stderr": 0.030503292013342596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.038073017265045125, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.038073017265045125 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602842, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6069364161849711, + "acc_stderr": 0.026296227915613667, + "acc_norm": 0.6069364161849711, + "acc_norm_stderr": 0.026296227915613667 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334384, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334384 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6265432098765432, + "acc_stderr": 0.02691500301138016, + "acc_norm": 0.6265432098765432, + "acc_norm_stderr": 0.02691500301138016 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.03292296639155141, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.03292296639155141 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6990825688073394, + "acc_stderr": 0.019664751366802114, + "acc_norm": 0.6990825688073394, + "acc_norm_stderr": 0.019664751366802114 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6013071895424836, + "acc_stderr": 0.028036092273891776, + "acc_norm": 0.6013071895424836, + "acc_norm_stderr": 0.028036092273891776 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.020226862710039473, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.020226862710039473 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40070921985815605, + "acc_stderr": 0.029233465745573083, + "acc_norm": 0.40070921985815605, + "acc_norm_stderr": 0.029233465745573083 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2212290502793296, + "acc_stderr": 0.013882164598887296, + "acc_norm": 0.2212290502793296, + "acc_norm_stderr": 0.013882164598887296 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.03030625772246832, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.03030625772246832 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6244897959183674, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.6244897959183674, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7341772151898734, + "acc_stderr": 0.028756799629658342, + "acc_norm": 0.7341772151898734, + "acc_norm_stderr": 0.028756799629658342 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3956975228161669, + "acc_stderr": 0.01248929073544901, + "acc_norm": 0.3956975228161669, + "acc_norm_stderr": 0.01248929073544901 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.03296245110172229, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.03296245110172229 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.42752806094445267, + "mc2_stderr": 0.014859167765401108 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6481700118063755, + "acc_stderr": 0.01641820645121805, + "acc_norm": 0.6623376623376623, + "acc_norm_stderr": 0.016259075784754943 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/Solar-Ko-Recovery-11B", + "model_sha": "c8306dcac4a180e5b10feadd5b4dbed446499aa7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/Yi-Ko-6B/result_2023-12-01 23:58:13.json b/beomi/Yi-Ko-6B/result_2023-12-01 23:58:13.json new file mode 100644 index 0000000000000000000000000000000000000000..4640f797e418c923b1323a9b5000d7dc3f61c5f3 --- /dev/null +++ b/beomi/Yi-Ko-6B/result_2023-12-01 23:58:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3438566552901024, + "acc_stderr": 0.013880644570156211, + "acc_norm": 0.4104095563139932, + "acc_norm_stderr": 0.014374922192642664 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39822744473212507, + "acc_stderr": 0.004885323175701673, + "acc_norm": 0.5338577972515435, + "acc_norm_stderr": 0.004978328190775523 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5274584929757343, + "acc_stderr": 0.017852981266633944, + "acc_norm": 0.5274584929757343, + "acc_norm_stderr": 0.017852981266633944 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.03394853965156402, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.03394853965156402 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736118, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736118 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438804, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438804 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5290322580645161, + "acc_stderr": 0.02839601640276099, + "acc_norm": 0.5290322580645161, + "acc_norm_stderr": 0.02839601640276099 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.029343114798094462, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.029343114798094462 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.03070948699255654, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.03070948699255654 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.02794045713622842, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02794045713622842 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.0379401267469703, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.0379401267469703 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149138, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.020707458164352984, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.020707458164352984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576073, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576073 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.01988622103750187, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.01988622103750187 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.031987615467631264, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.031987615467631264 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32985658409387225, + "acc_stderr": 0.012008129938540479, + "acc_norm": 0.32985658409387225, + "acc_norm_stderr": 0.012008129938540479 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522509, + "mc2": 0.4163606442574381, + "mc2_stderr": 0.014821458524779802 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5737898465171193, + "acc_stderr": 0.017002122609489252, + "acc_norm": 0.6162927981109799, + "acc_norm_stderr": 0.016718924637231826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/Yi-Ko-6B", + "model_sha": "edb86b47219f600f391da5821a07a22ab450b11b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/gemma-mling-7b/result_2024-04-16 07:19:13.json b/beomi/gemma-mling-7b/result_2024-04-16 07:19:13.json new file mode 100644 index 0000000000000000000000000000000000000000..4057ce83efd2b12471655484a1e0c32457442b3b --- /dev/null +++ b/beomi/gemma-mling-7b/result_2024-04-16 07:19:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42918088737201365, + "acc_stderr": 0.014464085894870653, + "acc_norm": 0.49402730375426623, + "acc_norm_stderr": 0.014610348300255793 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43796056562437763, + "acc_stderr": 0.004951222171763104, + "acc_norm": 0.5918143796056562, + "acc_norm_stderr": 0.004904933500255878 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7017543859649122, + "acc_stderr": 0.03508771929824563, + "acc_norm": 0.7017543859649122, + "acc_norm_stderr": 0.03508771929824563 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7281553398058253, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.7281553398058253, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6462324393358876, + "acc_stderr": 0.0170981847081619, + "acc_norm": 0.6462324393358876, + "acc_norm_stderr": 0.0170981847081619 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5361702127659574, + "acc_stderr": 0.03260038511835771, + "acc_norm": 0.5361702127659574, + "acc_norm_stderr": 0.03260038511835771 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333045, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333045 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5884244372990354, + "acc_stderr": 0.027950481494401266, + "acc_norm": 0.5884244372990354, + "acc_norm_stderr": 0.027950481494401266 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999998, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999998 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006718, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006718 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5798319327731093, + "acc_stderr": 0.032061837832361516, + "acc_norm": 0.5798319327731093, + "acc_norm_stderr": 0.032061837832361516 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5461538461538461, + "acc_stderr": 0.025242770987126194, + "acc_norm": 0.5461538461538461, + "acc_norm_stderr": 0.025242770987126194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6419354838709678, + "acc_stderr": 0.02727389059430063, + "acc_norm": 0.6419354838709678, + "acc_norm_stderr": 0.02727389059430063 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417593, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417593 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.03074634997572347, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.03074634997572347 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.02986960509531691, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02986960509531691 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7014925373134329, + "acc_stderr": 0.03235743789355043, + "acc_norm": 0.7014925373134329, + "acc_norm_stderr": 0.03235743789355043 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4021164021164021, + "acc_stderr": 0.025253032554997692, + "acc_norm": 0.4021164021164021, + "acc_norm_stderr": 0.025253032554997692 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.02683080599895224, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.02683080599895224 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.595679012345679, + "acc_stderr": 0.02730662529732768, + "acc_norm": 0.595679012345679, + "acc_norm_stderr": 0.02730662529732768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6528497409326425, + "acc_stderr": 0.03435696168361355, + "acc_norm": 0.6528497409326425, + "acc_norm_stderr": 0.03435696168361355 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366597, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366597 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6844036697247706, + "acc_stderr": 0.01992611751386967, + "acc_norm": 0.6844036697247706, + "acc_norm_stderr": 0.01992611751386967 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6078431372549019, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.6078431372549019, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6447368421052632, + "acc_stderr": 0.03894734487013317, + "acc_norm": 0.6447368421052632, + "acc_norm_stderr": 0.03894734487013317 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.02021703065318646, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.02021703065318646 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.02904919034254345, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.02904919034254345 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.033723432716530624, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.033723432716530624 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261448, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261448 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5, + "acc_stderr": 0.030372836961539352, + "acc_norm": 0.5, + "acc_norm_stderr": 0.030372836961539352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6408163265306123, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.6408163265306123, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.729957805907173, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.729957805907173, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39374185136897, + "acc_stderr": 0.01247853227256444, + "acc_norm": 0.39374185136897, + "acc_norm_stderr": 0.01247853227256444 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03308611113236436, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03308611113236436 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.037131580674819135, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.037131580674819135 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23378212974296206, + "mc1_stderr": 0.01481619599193158, + "mc2": 0.38084952552734835, + "mc2_stderr": 0.014575507013209059 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.602125147579693, + "acc_stderr": 0.01682795905473339, + "acc_norm": 0.6269185360094451, + "acc_norm_stderr": 0.016627318275137443 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/gemma-mling-7b", + "model_sha": "3f442e28bd50db6c438ce2a15b3a003532babba0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/kollama-13b/result_2023-09-26 17:41:30.json b/beomi/kollama-13b/result_2023-09-26 17:41:30.json new file mode 100644 index 0000000000000000000000000000000000000000..22962cd72290ddbaf9190d2001d0a2857b636fc0 --- /dev/null +++ b/beomi/kollama-13b/result_2023-09-26 17:41:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.18430034129692832, + "acc_stderr": 0.011330517933037432, + "acc_norm": 0.24061433447098976, + "acc_norm_stderr": 0.012491468532390559 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2724556861183031, + "acc_stderr": 0.004443131632679339, + "acc_norm": 0.2983469428400717, + "acc_norm_stderr": 0.004565974937793705 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.26900584795321636, + "acc_stderr": 0.0340105262010409, + "acc_norm": 0.26900584795321636, + "acc_norm_stderr": 0.0340105262010409 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.015671006009339582, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.015671006009339582 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2861736334405145, + "acc_stderr": 0.025670259242188947, + "acc_norm": 0.2861736334405145, + "acc_norm_stderr": 0.025670259242188947 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.030216831011508773, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.030216831011508773 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.02619980880756189, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.02619980880756189 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.02992941540834839, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.02992941540834839 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2191358024691358, + "acc_stderr": 0.023016705640262185, + "acc_norm": 0.2191358024691358, + "acc_norm_stderr": 0.023016705640262185 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24036697247706423, + "acc_stderr": 0.01832060732096407, + "acc_norm": 0.24036697247706423, + "acc_norm_stderr": 0.01832060732096407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604675, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604675 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290396, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.0348594609647574, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.0348594609647574 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3877551020408163, + "acc_stderr": 0.03119223072679566, + "acc_norm": 0.3877551020408163, + "acc_norm_stderr": 0.03119223072679566 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693257, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693257 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715021, + "mc2": 0.47018197225111685, + "mc2_stderr": 0.016150007373089376 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2101534828807556, + "acc_stderr": 0.014007301224897517, + "acc_norm": 0.3707201889020071, + "acc_norm_stderr": 0.016605801289212588 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/kollama-13b", + "model_sha": "d25ffb8c1a147e67c1bce0aca49a710395ce18ae", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/llama-2-ko-7b-emb-dev/result_2023-12-28 05:13:27.json b/beomi/llama-2-ko-7b-emb-dev/result_2023-12-28 05:13:27.json new file mode 100644 index 0000000000000000000000000000000000000000..e1a7266a65a81bc8f5ef8f1e249bbe3a8033b3b6 --- /dev/null +++ b/beomi/llama-2-ko-7b-emb-dev/result_2023-12-28 05:13:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2790102389078498, + "acc_stderr": 0.013106784883601355, + "acc_norm": 0.3455631399317406, + "acc_norm_stderr": 0.01389693846114568 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33808006373232424, + "acc_stderr": 0.004720891597174718, + "acc_norm": 0.45120493925512845, + "acc_norm_stderr": 0.0049659636472103195 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.34502923976608185, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.34502923976608185, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.31545338441890164, + "acc_stderr": 0.016617501738763394, + "acc_norm": 0.31545338441890164, + "acc_norm_stderr": 0.016617501738763394 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31189710610932475, + "acc_stderr": 0.026311858071854155, + "acc_norm": 0.31189710610932475, + "acc_norm_stderr": 0.026311858071854155 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416827, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416827 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.04039314978724562, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.04039314978724562 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.038061426873099935, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.038061426873099935 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277723, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277723 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2358974358974359, + "acc_stderr": 0.021525965407408726, + "acc_norm": 0.2358974358974359, + "acc_norm_stderr": 0.021525965407408726 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.04414343666854932, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.04414343666854932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22167487684729065, + "acc_stderr": 0.0292255758924896, + "acc_norm": 0.22167487684729065, + "acc_norm_stderr": 0.0292255758924896 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2903225806451613, + "acc_stderr": 0.025822106119415895, + "acc_norm": 0.2903225806451613, + "acc_norm_stderr": 0.025822106119415895 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3247863247863248, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.3247863247863248, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.29056603773584905, + "acc_stderr": 0.02794321998933716, + "acc_norm": 0.29056603773584905, + "acc_norm_stderr": 0.02794321998933716 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.04172343038705383, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.04172343038705383 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145668, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145668 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03333333333333334, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03333333333333334 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173042, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173042 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708614, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708614 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.024547617794803828, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.024547617794803828 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.025089478523765127, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.025089478523765127 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27461139896373055, + "acc_stderr": 0.032210245080411516, + "acc_norm": 0.27461139896373055, + "acc_norm_stderr": 0.032210245080411516 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25137614678899084, + "acc_stderr": 0.018599206360287415, + "acc_norm": 0.25137614678899084, + "acc_norm_stderr": 0.018599206360287415 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.03512207412302052, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.03512207412302052 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.026090162504279035, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.026090162504279035 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17105263157894737, + "acc_stderr": 0.030643607071677098, + "acc_norm": 0.17105263157894737, + "acc_norm_stderr": 0.030643607071677098 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3022875816993464, + "acc_stderr": 0.018579232711113874, + "acc_norm": 0.3022875816993464, + "acc_norm_stderr": 0.018579232711113874 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340461014, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340461014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.029886910547626978, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.029886910547626978 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1801470588235294, + "acc_stderr": 0.02334516361654488, + "acc_norm": 0.1801470588235294, + "acc_norm_stderr": 0.02334516361654488 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174934, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174934 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.33755274261603374, + "acc_stderr": 0.030781549102026216, + "acc_norm": 0.33755274261603374, + "acc_norm_stderr": 0.030781549102026216 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26401564537157757, + "acc_stderr": 0.011258435537723814, + "acc_norm": 0.26401564537157757, + "acc_norm_stderr": 0.011258435537723814 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02910225438967407, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02910225438967407 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055952, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055952 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485083, + "mc2": 0.42530376345187815, + "mc2_stderr": 0.015252754425393767 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.15584415584415584, + "acc_stderr": 0.012470141877923077, + "acc_norm": 0.3577331759149941, + "acc_norm_stderr": 0.016479808935749976 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/llama-2-ko-7b-emb-dev", + "model_sha": "f1ff977bd4ee3f0c2a3ee7dd1c4b7750e3a0766c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/llama-2-ko-7b-emb-dev/result_2023-12-29 06:46:01.json b/beomi/llama-2-ko-7b-emb-dev/result_2023-12-29 06:46:01.json new file mode 100644 index 0000000000000000000000000000000000000000..97a79f16b02e2028b6b45d12d986418ae66a68f1 --- /dev/null +++ b/beomi/llama-2-ko-7b-emb-dev/result_2023-12-29 06:46:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30204778156996587, + "acc_stderr": 0.013417519144716429, + "acc_norm": 0.378839590443686, + "acc_norm_stderr": 0.014175915490000324 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35480979884485164, + "acc_stderr": 0.0047747781803451845, + "acc_norm": 0.47390957976498704, + "acc_norm_stderr": 0.0049829835924591935 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.033014059469872487, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.033014059469872487 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.04245022486384493, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.04245022486384493 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3269476372924649, + "acc_stderr": 0.01677490818013146, + "acc_norm": 0.3269476372924649, + "acc_norm_stderr": 0.01677490818013146 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3215434083601286, + "acc_stderr": 0.026527724079528872, + "acc_norm": 0.3215434083601286, + "acc_norm_stderr": 0.026527724079528872 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.03089861088247751, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.03089861088247751 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438013, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438013 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.03077805742293167, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.03077805742293167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3, + "acc_stderr": 0.023234581088428494, + "acc_norm": 0.3, + "acc_norm_stderr": 0.023234581088428494 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.043300437496507416, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.043300437496507416 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.03295797566311271, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.03295797566311271 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.026522709674667768, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.026522709674667768 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3034188034188034, + "acc_stderr": 0.03011821010694266, + "acc_norm": 0.3034188034188034, + "acc_norm_stderr": 0.03011821010694266 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2830188679245283, + "acc_stderr": 0.0277242364927009, + "acc_norm": 0.2830188679245283, + "acc_norm_stderr": 0.0277242364927009 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910508, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3880597014925373, + "acc_stderr": 0.0344578996436275, + "acc_norm": 0.3880597014925373, + "acc_norm_stderr": 0.0344578996436275 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.023135287974325635, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.023135287974325635 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624576, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624576 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26878612716763006, + "acc_stderr": 0.023868003262500104, + "acc_norm": 0.26878612716763006, + "acc_norm_stderr": 0.023868003262500104 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.03487825168497892, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.03487825168497892 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.025773111169630453, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.025773111169630453 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916647, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916647 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26055045871559634, + "acc_stderr": 0.018819182034850068, + "acc_norm": 0.26055045871559634, + "acc_norm_stderr": 0.018819182034850068 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242494, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242494 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.026643278474508755, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.026643278474508755 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4132231404958678, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.4132231404958678, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.017667841612379002, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.017667841612379002 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902006, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902006 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828977, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828977 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290803, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290803 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2607561929595828, + "acc_stderr": 0.011213471559602334, + "acc_norm": 0.2607561929595828, + "acc_norm_stderr": 0.011213471559602334 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624337, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624337 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148123, + "mc2": 0.4175794689167079, + "mc2_stderr": 0.01513967881843377 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.13695395513577333, + "acc_stderr": 0.011820043946570876, + "acc_norm": 0.33530106257378983, + "acc_norm_stderr": 0.016230981232989817 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/llama-2-ko-7b-emb-dev", + "model_sha": "3796dc4797838aa3c3a9cd22a3d2b73b931fc684", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/llama-2-ko-7b-emb-dev/result_2023-12-30 09:33:08.json b/beomi/llama-2-ko-7b-emb-dev/result_2023-12-30 09:33:08.json new file mode 100644 index 0000000000000000000000000000000000000000..d28333944cc424b8fdda9ba5be6bcba1fbd9a02e --- /dev/null +++ b/beomi/llama-2-ko-7b-emb-dev/result_2023-12-30 09:33:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32593856655290104, + "acc_stderr": 0.013697432466693239, + "acc_norm": 0.40273037542662116, + "acc_norm_stderr": 0.014332236306790147 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3641704839673372, + "acc_stderr": 0.004802133511654224, + "acc_norm": 0.49083847839075884, + "acc_norm_stderr": 0.004988943721711207 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.03301405946987249, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.03301405946987249 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.30395913154533843, + "acc_stderr": 0.016448321686769043, + "acc_norm": 0.30395913154533843, + "acc_norm_stderr": 0.016448321686769043 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.02558306248998484, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.02558306248998484 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727774, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727774 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.028510251512341923, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.028510251512341923 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.02228214120420443, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.02228214120420443 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.02967833314144446, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.02967833314144446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.026148685930671742, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.026148685930671742 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2830188679245283, + "acc_stderr": 0.0277242364927009, + "acc_norm": 0.2830188679245283, + "acc_norm_stderr": 0.0277242364927009 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.03479185572599661, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.03479185572599661 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2935323383084577, + "acc_stderr": 0.03220024104534204, + "acc_norm": 0.2935323383084577, + "acc_norm_stderr": 0.03220024104534204 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.0321473730202947, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.0321473730202947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.03512385283705051, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.03512385283705051 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02492200116888633, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02492200116888633 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20207253886010362, + "acc_stderr": 0.02897908979429673, + "acc_norm": 0.20207253886010362, + "acc_norm_stderr": 0.02897908979429673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24220183486238533, + "acc_stderr": 0.01836817630659862, + "acc_norm": 0.24220183486238533, + "acc_norm_stderr": 0.01836817630659862 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242494, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242494 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.02555316999182652, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.02555316999182652 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880585, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880585 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467764, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467764 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791044, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.24632352941176472, + "acc_stderr": 0.02617343857052, + "acc_norm": 0.24632352941176472, + "acc_norm_stderr": 0.02617343857052 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.0265370453121453, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.0265370453121453 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31645569620253167, + "acc_stderr": 0.03027497488021898, + "acc_norm": 0.31645569620253167, + "acc_norm_stderr": 0.03027497488021898 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.028867431449849313, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.028867431449849313 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139405, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139405 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283347, + "mc2": 0.430843038646161, + "mc2_stderr": 0.015222244438027463 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.15348288075560804, + "acc_stderr": 0.012392606565325119, + "acc_norm": 0.3435655253837072, + "acc_norm_stderr": 0.016327334806429145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/llama-2-ko-7b-emb-dev", + "model_sha": "3796dc4797838aa3c3a9cd22a3d2b73b931fc684", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/llama-2-ko-7b-emb-dev/result_2023-12-31 09:24:44.json b/beomi/llama-2-ko-7b-emb-dev/result_2023-12-31 09:24:44.json new file mode 100644 index 0000000000000000000000000000000000000000..c2855517086636e8296c98b3834f18d4e2437c53 --- /dev/null +++ b/beomi/llama-2-ko-7b-emb-dev/result_2023-12-31 09:24:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3387372013651877, + "acc_stderr": 0.013830568927974334, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.014422181226303031 + }, + "harness|ko_hellaswag|10": { + "acc": 0.364070902210715, + "acc_stderr": 0.0048018528813297484, + "acc_norm": 0.49741087432782316, + "acc_norm_stderr": 0.004989714512282407 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21637426900584794, + "acc_stderr": 0.03158149539338733, + "acc_norm": 0.21637426900584794, + "acc_norm_stderr": 0.03158149539338733 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3052362707535121, + "acc_stderr": 0.016467711947635123, + "acc_norm": 0.3052362707535121, + "acc_norm_stderr": 0.016467711947635123 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595852, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595852 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.03097669299853443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.03097669299853443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632924, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632924 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168264, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.038808483010823944, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.038808483010823944 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.031156269519646826, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.031156269519646826 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.037245636197746325, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.037245636197746325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2815126050420168, + "acc_stderr": 0.02921354941437216, + "acc_norm": 0.2815126050420168, + "acc_norm_stderr": 0.02921354941437216 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.02228214120420443, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.02228214120420443 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.032406615658684086, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.032406615658684086 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.02573654274559453, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.02573654274559453 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.029872577708891165, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.029872577708891165 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493875, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493875 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.02742001935094527, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.02742001935094527 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.34328358208955223, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.34328358208955223, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.02394851290546836, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.02394851290546836 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.025171041915309684, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.025171041915309684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.029778663037752954, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.029778663037752954 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26605504587155965, + "acc_stderr": 0.018946022322225597, + "acc_norm": 0.26605504587155965, + "acc_norm_stderr": 0.018946022322225597 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047182, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047182 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25326797385620914, + "acc_stderr": 0.017593486895366828, + "acc_norm": 0.25326797385620914, + "acc_norm_stderr": 0.017593486895366828 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.02769691071309394, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.02769691071309394 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031218, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031218 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.02826388994378461, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.02826388994378461 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842544, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842544 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26401564537157757, + "acc_stderr": 0.011258435537723814, + "acc_norm": 0.26401564537157757, + "acc_norm_stderr": 0.011258435537723814 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.015201522246299953, + "mc2": 0.4275383331125476, + "mc2_stderr": 0.01526305656191646 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.14403778040141677, + "acc_stderr": 0.012072030576668953, + "acc_norm": 0.3707201889020071, + "acc_norm_stderr": 0.016605801289212598 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/llama-2-ko-7b-emb-dev", + "model_sha": "d0e8d08d5f41082f3f48ec990edc2eb521ac2e73", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/llama-2-ko-7b/result_2023-09-26 12:00:48.json b/beomi/llama-2-ko-7b/result_2023-09-26 12:00:48.json new file mode 100644 index 0000000000000000000000000000000000000000..3d0d0cfd3631e091caff2c99dcb627c3e211f605 --- /dev/null +++ b/beomi/llama-2-ko-7b/result_2023-09-26 12:00:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3174061433447099, + "acc_stderr": 0.01360223908803817, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349814 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38309101772555265, + "acc_stderr": 0.004851466623601449, + "acc_norm": 0.4958175662218682, + "acc_norm_stderr": 0.00498960683837107 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.016905207420803554, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.016905207420803554 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3021276595744681, + "acc_stderr": 0.030017554471880557, + "acc_norm": 0.3021276595744681, + "acc_norm_stderr": 0.030017554471880557 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572196, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378949, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378949 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31092436974789917, + "acc_stderr": 0.030066761582977924, + "acc_norm": 0.31092436974789917, + "acc_norm_stderr": 0.030066761582977924 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2743589743589744, + "acc_stderr": 0.022622765767493197, + "acc_norm": 0.2743589743589744, + "acc_norm_stderr": 0.022622765767493197 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.026148685930671746, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.026148685930671746 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36752136752136755, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.36752136752136755, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505416, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505416 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959912, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959912 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.026105673861409825, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.026105673861409825 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.020828148517022596, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.020828148517022596 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.027582811415159614, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.027582811415159614 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.018120224251484577, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.018120224251484577 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936484, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936484 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35864978902953587, + "acc_stderr": 0.031219569445301833, + "acc_norm": 0.35864978902953587, + "acc_norm_stderr": 0.031219569445301833 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733096, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733096 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.37061663539899015, + "mc2_stderr": 0.014735219813379136 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28807556080283353, + "acc_stderr": 0.01556986967483836, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.016943586313076575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/llama-2-ko-7b", + "model_sha": "4e6e7eb86c9c98600cad6d7ae9942204302a48a1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/llama-2-koen-13b/result_2023-11-20 05:57:13.json b/beomi/llama-2-koen-13b/result_2023-11-20 05:57:13.json new file mode 100644 index 0000000000000000000000000000000000000000..a9c86fc049b22fc793c34042251fed151731fffb --- /dev/null +++ b/beomi/llama-2-koen-13b/result_2023-11-20 05:57:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38054607508532423, + "acc_stderr": 0.014188277712349814, + "acc_norm": 0.439419795221843, + "acc_norm_stderr": 0.014503747823580123 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4133638717386975, + "acc_stderr": 0.004914305798575696, + "acc_norm": 0.5623381796454889, + "acc_norm_stderr": 0.004950848456984544 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5159642401021711, + "acc_stderr": 0.017870847506081734, + "acc_norm": 0.5159642401021711, + "acc_norm_stderr": 0.017870847506081734 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.025174048384000777, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.025174048384000777 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.031426169937919246, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.031426169937919246 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5192660550458715, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.5192660550458715, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.01952431674486634, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.01952431674486634 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534792, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534792 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30182529335071706, + "acc_stderr": 0.01172435051810589, + "acc_norm": 0.30182529335071706, + "acc_norm_stderr": 0.01172435051810589 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156474, + "mc2": 0.409778749789472, + "mc2_stderr": 0.014729442757477942 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4592680047225502, + "acc_stderr": 0.01713321827653767, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.017115418225226872 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/llama-2-koen-13b", + "model_sha": "087f0a3d78be66478382bb9a3aad0a1594215e53", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beomi/open-llama-2-ko-7b/result_2023-12-14 14:14:49.json b/beomi/open-llama-2-ko-7b/result_2023-12-14 14:14:49.json new file mode 100644 index 0000000000000000000000000000000000000000..03c5f84d51ba8197946d3c474920408701ca148a --- /dev/null +++ b/beomi/open-llama-2-ko-7b/result_2023-12-14 14:14:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3293515358361775, + "acc_stderr": 0.013734057652635474, + "acc_norm": 0.40017064846416384, + "acc_norm_stderr": 0.014317197787809169 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38498307110137425, + "acc_stderr": 0.004855968578998728, + "acc_norm": 0.502688707428799, + "acc_norm_stderr": 0.004989709267191013 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.034678266857038266, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.034678266857038266 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3218390804597701, + "acc_stderr": 0.0167063814150579, + "acc_norm": 0.3218390804597701, + "acc_norm_stderr": 0.0167063814150579 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.04049122041702506, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.04049122041702506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768076, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768076 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534425, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2765273311897106, + "acc_stderr": 0.02540383297817961, + "acc_norm": 0.2765273311897106, + "acc_norm_stderr": 0.02540383297817961 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365928, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365928 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863797, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863797 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23846153846153847, + "acc_stderr": 0.021606294494647727, + "acc_norm": 0.23846153846153847, + "acc_norm_stderr": 0.021606294494647727 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.046166311118017125, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.046166311118017125 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358608, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358608 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.29354838709677417, + "acc_stderr": 0.0259060870213193, + "acc_norm": 0.29354838709677417, + "acc_norm_stderr": 0.0259060870213193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36324786324786323, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.36324786324786323, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30566037735849055, + "acc_stderr": 0.028353298073322666, + "acc_norm": 0.30566037735849055, + "acc_norm_stderr": 0.028353298073322666 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910508, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766118, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766118 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.031862098516411426, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.031862098516411426 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02256989707491841, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02256989707491841 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.02394851290546836, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.02394851290546836 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724146, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724146 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.026105673861409825, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.026105673861409825 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.03161877917935411, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.03161877917935411 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26605504587155965, + "acc_stderr": 0.018946022322225597, + "acc_norm": 0.26605504587155965, + "acc_norm_stderr": 0.018946022322225597 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047182, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047182 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242557, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242557 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3305785123966942, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.033176727875331574, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.033176727875331574 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.315359477124183, + "acc_stderr": 0.018798086284886883, + "acc_norm": 0.315359477124183, + "acc_norm_stderr": 0.018798086284886883 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.02541642838876748, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.02541642838876748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1948529411764706, + "acc_stderr": 0.02406059942348742, + "acc_norm": 0.1948529411764706, + "acc_norm_stderr": 0.02406059942348742 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.34285714285714286, + "acc_stderr": 0.03038726291954773, + "acc_norm": 0.34285714285714286, + "acc_norm_stderr": 0.03038726291954773 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31645569620253167, + "acc_stderr": 0.030274974880218974, + "acc_norm": 0.31645569620253167, + "acc_norm_stderr": 0.030274974880218974 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23378212974296206, + "mc1_stderr": 0.014816195991931586, + "mc2": 0.38671616095132844, + "mc2_stderr": 0.014642090656734802 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30460448642266824, + "acc_stderr": 0.015823367273129385, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.016977101932601532 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beomi/open-llama-2-ko-7b", + "model_sha": "84ae8774f8b586b3b84cb1b0b48860d3fec8745f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/beowolx/CodeNinja-1.0-OpenChat-7B/result_2024-07-27 03:42:41.json b/beowolx/CodeNinja-1.0-OpenChat-7B/result_2024-07-27 03:42:41.json new file mode 100644 index 0000000000000000000000000000000000000000..ca8f4ab2a1e4f20c6fd7fb7cde56405fe901e679 --- /dev/null +++ b/beowolx/CodeNinja-1.0-OpenChat-7B/result_2024-07-27 03:42:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28071672354948807, + "acc_stderr": 0.013131238126975584, + "acc_norm": 0.33361774744027306, + "acc_norm_stderr": 0.013778687054176538 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3460466042620992, + "acc_stderr": 0.004747360500742475, + "acc_norm": 0.4215295757817168, + "acc_norm_stderr": 0.004927948061486062 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.40485312899106, + "acc_stderr": 0.017553246467720236, + "acc_norm": 0.40485312899106, + "acc_norm_stderr": 0.017553246467720236 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800256, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800256 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.02812534098397271, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.02812534098397271 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449296, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449296 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416542, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416542 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.02486499515976776, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.02486499515976776 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036546, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036546 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688166, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688166 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.38113207547169814, + "acc_stderr": 0.029890609686286616, + "acc_norm": 0.38113207547169814, + "acc_norm_stderr": 0.029890609686286616 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.02467786284133278, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.02467786284133278 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960719, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960719 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377906, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377906 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.036080032255696545, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.036080032255696545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281338, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281338 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46605504587155966, + "acc_stderr": 0.021387863350353996, + "acc_norm": 0.46605504587155966, + "acc_norm_stderr": 0.021387863350353996 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.04343525428949098, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.04343525428949098 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.019506291693954843, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.019506291693954843 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963754, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963754 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3094972067039106, + "acc_stderr": 0.015461169002371537, + "acc_norm": 0.3094972067039106, + "acc_norm_stderr": 0.015461169002371537 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682485, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682485 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669276, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669276 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5021097046413502, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.5021097046413502, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29986962190352023, + "acc_stderr": 0.011702660860193986, + "acc_norm": 0.29986962190352023, + "acc_norm_stderr": 0.011702660860193986 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.037818873532059816, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.037818873532059816 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31946144430844553, + "mc1_stderr": 0.0163226441829605, + "mc2": 0.5007915665364245, + "mc2_stderr": 0.015888987315996028 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3612750885478158, + "acc_stderr": 0.01651546302241201, + "acc_norm": 0.38961038961038963, + "acc_norm_stderr": 0.0167661616718935 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "beowolx/CodeNinja-1.0-OpenChat-7B", + "model_sha": "9934c04c767e6ae0f792712a060f02915391d4ec", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/berkeley-nest/Starling-LM-7B-alpha/result_2024-01-04 08:22:03.json b/berkeley-nest/Starling-LM-7B-alpha/result_2024-01-04 08:22:03.json new file mode 100644 index 0000000000000000000000000000000000000000..c7ddbf61351b0e6c24bf5c045f0908cea4ede651 --- /dev/null +++ b/berkeley-nest/Starling-LM-7B-alpha/result_2024-01-04 08:22:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3242320819112628, + "acc_stderr": 0.013678810399518824, + "acc_norm": 0.36860068259385664, + "acc_norm_stderr": 0.014097810678042194 + }, + "harness|ko_hellaswag|10": { + "acc": 0.368352917745469, + "acc_stderr": 0.004813719952829966, + "acc_norm": 0.47122087233618803, + "acc_norm_stderr": 0.0049815090992763504 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4661558109833972, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.4661558109833972, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986483, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509568, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509568 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347343, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347343 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.029343114798094455, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.029343114798094455 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961827, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961827 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948485, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.024419234966819067, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.024419234966819067 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.036072280610477514, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.036072280610477514 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5321100917431193, + "acc_stderr": 0.021393071222680804, + "acc_norm": 0.5321100917431193, + "acc_norm_stderr": 0.021393071222680804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483184, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483184 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.019706875804085616, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.019706875804085616 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553983, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553983 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983576, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983576 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763128, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763128 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.318122555410691, + "acc_stderr": 0.011895407281104099, + "acc_norm": 0.318122555410691, + "acc_norm_stderr": 0.011895407281104099 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.4743809652459109, + "mc2_stderr": 0.01582347326512627 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4332939787485242, + "acc_stderr": 0.017036683641893098, + "acc_norm": 0.45218417945690675, + "acc_norm_stderr": 0.017111567130916785 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "berkeley-nest/Starling-LM-7B-alpha", + "model_sha": "f721e85293598f2ef774e483ae95343e39811577", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/ANHSY_0.1/result_2023-11-10 10:26:46.json b/blueapple8259/ANHSY_0.1/result_2023-11-10 10:26:46.json new file mode 100644 index 0000000000000000000000000000000000000000..31a2edaa2d8b076b087d4366f2ff44e71c8e4a5b --- /dev/null +++ b/blueapple8259/ANHSY_0.1/result_2023-11-10 10:26:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19965870307167236, + "acc_stderr": 0.01168162575688868, + "acc_norm": 0.2593856655290102, + "acc_norm_stderr": 0.012808273573927102 + }, + "harness|ko_hellaswag|10": { + "acc": 0.28759211312487554, + "acc_stderr": 0.0045171484341805035, + "acc_norm": 0.32423819956184025, + "acc_norm_stderr": 0.004671328673217806 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.015671006009339565, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.015671006009339565 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02850485647051421, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02850485647051421 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2604501607717042, + "acc_stderr": 0.02492672322484555, + "acc_norm": 0.2604501607717042, + "acc_norm_stderr": 0.02492672322484555 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20707070707070707, + "acc_stderr": 0.028869778460267045, + "acc_norm": 0.20707070707070707, + "acc_norm_stderr": 0.028869778460267045 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03724563619774631, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03724563619774631 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.20168067226890757, + "acc_stderr": 0.02606431340630453, + "acc_norm": 0.20168067226890757, + "acc_norm_stderr": 0.02606431340630453 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02242127361292371, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02242127361292371 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15763546798029557, + "acc_stderr": 0.025639014131172404, + "acc_norm": 0.15763546798029557, + "acc_norm_stderr": 0.025639014131172404 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1967741935483871, + "acc_stderr": 0.02261640942074202, + "acc_norm": 0.1967741935483871, + "acc_norm_stderr": 0.02261640942074202 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.18803418803418803, + "acc_stderr": 0.02559819368665224, + "acc_norm": 0.18803418803418803, + "acc_norm_stderr": 0.02559819368665224 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724136, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724136 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21890547263681592, + "acc_stderr": 0.029239174636647, + "acc_norm": 0.21890547263681592, + "acc_norm_stderr": 0.029239174636647 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918417, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.02277971908873339, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.02277971908873339 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565317, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565317 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.038924311065187525, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.038924311065187525 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24036697247706423, + "acc_stderr": 0.01832060732096407, + "acc_norm": 0.24036697247706423, + "acc_norm_stderr": 0.01832060732096407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011743, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.02355083135199509, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.02355083135199509 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2892561983471074, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.017322789207784326, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.017322789207784326 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432417, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432417 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925324, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925324 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03004261583271486, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03004261583271486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2163265306122449, + "acc_stderr": 0.026358916334904017, + "acc_norm": 0.2163265306122449, + "acc_norm_stderr": 0.026358916334904017 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.43499689918333406, + "mc2_stderr": 0.015335243970671835 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33412042502951594, + "acc_stderr": 0.016216763304239688, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.017090852631668332 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/ANHSY_0.1", + "model_sha": "62bb441a62b634f0fb14e909bebfabae6506ed39", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/ANHSY_half_0.2/result_2023-11-12 15:17:33.json b/blueapple8259/ANHSY_half_0.2/result_2023-11-12 15:17:33.json new file mode 100644 index 0000000000000000000000000000000000000000..215c58eeab3dfe81f37a8cd2725cd15533358056 --- /dev/null +++ b/blueapple8259/ANHSY_half_0.2/result_2023-11-12 15:17:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20648464163822525, + "acc_stderr": 0.011828865619002316, + "acc_norm": 0.257679180887372, + "acc_norm_stderr": 0.012780770562768402 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2930691097390958, + "acc_stderr": 0.004542396269999217, + "acc_norm": 0.32971519617606054, + "acc_norm_stderr": 0.00469148881303216 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.18128654970760233, + "acc_stderr": 0.029547741687640027, + "acc_norm": 0.18128654970760233, + "acc_norm_stderr": 0.029547741687640027 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26436781609195403, + "acc_stderr": 0.01576998484069052, + "acc_norm": 0.26436781609195403, + "acc_norm_stderr": 0.01576998484069052 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.03329394119073528, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.03329394119073528 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2861736334405145, + "acc_stderr": 0.025670259242188943, + "acc_norm": 0.2861736334405145, + "acc_norm_stderr": 0.025670259242188943 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.030769352008229132, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.030769352008229132 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.19696969696969696, + "acc_stderr": 0.028335609732463348, + "acc_norm": 0.19696969696969696, + "acc_norm_stderr": 0.028335609732463348 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.02971914287634285, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.02971914287634285 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35384615384615387, + "acc_stderr": 0.024243783994062167, + "acc_norm": 0.35384615384615387, + "acc_norm_stderr": 0.024243783994062167 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.038935425188248475, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.038935425188248475 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.02989611429173355, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.02989611429173355 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335137, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335137 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721377, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721377 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.030769444967296018, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.030769444967296018 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21676300578034682, + "acc_stderr": 0.022183477668412856, + "acc_norm": 0.21676300578034682, + "acc_norm_stderr": 0.022183477668412856 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.03291099578615771, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.03291099578615771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24691358024691357, + "acc_stderr": 0.023993501709042117, + "acc_norm": 0.24691358024691357, + "acc_norm_stderr": 0.023993501709042117 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.03410780251836183, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.03410780251836183 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.01827257581023187, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.01827257581023187 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.025457756696667874, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.025457756696667874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2809917355371901, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.16447368421052633, + "acc_stderr": 0.030167533468632726, + "acc_norm": 0.16447368421052633, + "acc_norm_stderr": 0.030167533468632726 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.01740181671142765, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.01740181671142765 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902013, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902013 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.03562367850095391, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.03562367850095391 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.014333522059217889, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.014333522059217889 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.21224489795918366, + "acc_stderr": 0.026176967197866764, + "acc_norm": 0.21224489795918366, + "acc_norm_stderr": 0.026176967197866764 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598035, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598035 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24119947848761408, + "acc_stderr": 0.010926496102034947, + "acc_norm": 0.24119947848761408, + "acc_norm_stderr": 0.010926496102034947 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693254, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.01522589934082682, + "mc2": 0.4338664363865149, + "mc2_stderr": 0.0154180940348635 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33884297520661155, + "acc_stderr": 0.016272952997019124, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.017014038119297463 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/ANHSY_half_0.2", + "model_sha": "71a877f97ed8246d44a4fe81e7fbc9b5049e4dff", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/ANHSY_test/result_2023-11-11 03:28:51.json b/blueapple8259/ANHSY_test/result_2023-11-11 03:28:51.json new file mode 100644 index 0000000000000000000000000000000000000000..a9daa17be38789788b1154616b2add3788c7e778 --- /dev/null +++ b/blueapple8259/ANHSY_test/result_2023-11-11 03:28:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20136518771331058, + "acc_stderr": 0.011718927477444269, + "acc_norm": 0.25170648464163825, + "acc_norm_stderr": 0.012682496334042961 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2892850029874527, + "acc_stderr": 0.004525037849178839, + "acc_norm": 0.32822146982672773, + "acc_norm_stderr": 0.00468606242115814 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.01588988836256049, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.01588988836256049 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.18723404255319148, + "acc_stderr": 0.025501588341883614, + "acc_norm": 0.18723404255319148, + "acc_norm_stderr": 0.025501588341883614 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306085, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306085 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.029620227874790458, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.029620227874790458 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.02738140692786896, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.02738140692786896 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.022489389793654835, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.022489389793654835 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252628, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.025736542745594525, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.025736542745594525 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.025819233256483727, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.025819233256483727 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230196, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230196 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.03036049015401464, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.03036049015401464 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0230836585869842, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0230836585869842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724148, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724148 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24691358024691357, + "acc_stderr": 0.023993501709042096, + "acc_norm": 0.24691358024691357, + "acc_norm_stderr": 0.023993501709042096 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909902, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909902 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22752293577981653, + "acc_stderr": 0.0179744635787765, + "acc_norm": 0.22752293577981653, + "acc_norm_stderr": 0.0179744635787765 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.14285714285714285, + "acc_stderr": 0.031298431857438094, + "acc_norm": 0.14285714285714285, + "acc_norm_stderr": 0.031298431857438094 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.025160998214292456, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.025160998214292456 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.03197565821032501, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.03197565821032501 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148594, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148594 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.026244920349843007, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.026244920349843007 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925328, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925328 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483924, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.16326530612244897, + "acc_stderr": 0.023661699177098598, + "acc_norm": 0.16326530612244897, + "acc_norm_stderr": 0.023661699177098598 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2522816166883963, + "acc_stderr": 0.011092789056875229, + "acc_norm": 0.2522816166883963, + "acc_norm_stderr": 0.011092789056875229 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604243, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604243 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.44204190262154125, + "mc2_stderr": 0.015345648446767756 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3364817001180638, + "acc_stderr": 0.016245085294386556, + "acc_norm": 0.4427390791027155, + "acc_norm_stderr": 0.017077254131556217 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/ANHSY_test", + "model_sha": "eb2f1cb1cc7a4dfab1e641fb65c64293ed14006c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/ANHSY_test2/result_2023-11-11 16:14:35.json b/blueapple8259/ANHSY_test2/result_2023-11-11 16:14:35.json new file mode 100644 index 0000000000000000000000000000000000000000..3545f076310a491abbc7a11cf8976932d11753b7 --- /dev/null +++ b/blueapple8259/ANHSY_test2/result_2023-11-11 16:14:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20733788395904437, + "acc_stderr": 0.011846905782971368, + "acc_norm": 0.2440273037542662, + "acc_norm_stderr": 0.012551447627856259 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2900816570404302, + "acc_stderr": 0.004528723951878253, + "acc_norm": 0.32772356104361683, + "acc_norm_stderr": 0.004684241685200313 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245232, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245232 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.280970625798212, + "acc_stderr": 0.01607312785122125, + "acc_norm": 0.280970625798212, + "acc_norm_stderr": 0.01607312785122125 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.02880998985410299, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.02880998985410299 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26366559485530544, + "acc_stderr": 0.02502553850053234, + "acc_norm": 0.26366559485530544, + "acc_norm_stderr": 0.02502553850053234 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.027157150479563824, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.027157150479563824 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.18686868686868688, + "acc_stderr": 0.027772533334218967, + "acc_norm": 0.18686868686868688, + "acc_norm_stderr": 0.027772533334218967 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.028359620870533946, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.028359620870533946 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.021763733684173933, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.021763733684173933 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036625, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036625 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18719211822660098, + "acc_stderr": 0.027444924966882618, + "acc_norm": 0.18719211822660098, + "acc_norm_stderr": 0.027444924966882618 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.024892469172462833, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.024892469172462833 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.02795182680892433, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.02795182680892433 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108608, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.038950910157241364, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.038950910157241364 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935558, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935558 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641145, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641145 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633345, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633345 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.03396116205845335, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.03396116205845335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3160621761658031, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.3160621761658031, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796627, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796627 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21241830065359477, + "acc_stderr": 0.023420375478296136, + "acc_norm": 0.21241830065359477, + "acc_norm_stderr": 0.023420375478296136 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.01755581809132227, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.01755581809132227 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23049645390070922, + "acc_stderr": 0.02512373922687241, + "acc_norm": 0.23049645390070922, + "acc_norm_stderr": 0.02512373922687241 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347018, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347018 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767867, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767867 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.025409301953225678, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.025409301953225678 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.010986307870045519, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.010986307870045519 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693257, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693257 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826824, + "mc2": 0.44746742387165916, + "mc2_stderr": 0.015687397901643654 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3435655253837072, + "acc_stderr": 0.016327334806429134, + "acc_norm": 0.4510035419126328, + "acc_norm_stderr": 0.017107618859549357 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/ANHSY_test2", + "model_sha": "bb2cfbebcdf945d14ec8f53215d9f9b5bbc96742", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/SmallKo/result_2024-03-02 04:34:18.json b/blueapple8259/SmallKo/result_2024-03-02 04:34:18.json new file mode 100644 index 0000000000000000000000000000000000000000..7839f465220563a5379b22f3944048dbe771ace7 --- /dev/null +++ b/blueapple8259/SmallKo/result_2024-03-02 04:34:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.18600682593856654, + "acc_stderr": 0.011370940183266735, + "acc_norm": 0.2568259385665529, + "acc_norm_stderr": 0.012766923794116801 + }, + "harness|ko_hellaswag|10": { + "acc": 0.26996614220274845, + "acc_stderr": 0.00443034623465038, + "acc_norm": 0.2846046604262099, + "acc_norm_stderr": 0.004503037601847085 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2573099415204678, + "acc_stderr": 0.03352799844161865, + "acc_norm": 0.2573099415204678, + "acc_norm_stderr": 0.03352799844161865 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2835249042145594, + "acc_stderr": 0.016117318166832283, + "acc_norm": 0.2835249042145594, + "acc_norm_stderr": 0.016117318166832283 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838725, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838725 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.02575586592263294, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.02575586592263294 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.0324430528300873, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.0324430528300873 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728745, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728745 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03053289223393205, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03053289223393205 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2689655172413793, + "acc_stderr": 0.03695183311650232, + "acc_norm": 0.2689655172413793, + "acc_norm_stderr": 0.03695183311650232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.027025433498882385, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.027025433498882385 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.024121125416941176, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.024121125416941176 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2019704433497537, + "acc_stderr": 0.028247350122180277, + "acc_norm": 0.2019704433497537, + "acc_norm_stderr": 0.028247350122180277 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.267741935483871, + "acc_stderr": 0.02518900666021238, + "acc_norm": 0.267741935483871, + "acc_norm_stderr": 0.02518900666021238 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2641509433962264, + "acc_stderr": 0.027134291628741716, + "acc_norm": 0.2641509433962264, + "acc_norm_stderr": 0.027134291628741716 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.038950910157241364, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.038950910157241364 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.036848815213890225, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.036848815213890225 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014624, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014624 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1907514450867052, + "acc_stderr": 0.029957851329869337, + "acc_norm": 0.1907514450867052, + "acc_norm_stderr": 0.029957851329869337 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.25722543352601157, + "acc_stderr": 0.023532925431044283, + "acc_norm": 0.25722543352601157, + "acc_norm_stderr": 0.023532925431044283 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25, + "acc_stderr": 0.02409347123262133, + "acc_norm": 0.25, + "acc_norm_stderr": 0.02409347123262133 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.03480175668466037, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.03480175668466037 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22385321100917432, + "acc_stderr": 0.017871217767790232, + "acc_norm": 0.22385321100917432, + "acc_norm_stderr": 0.017871217767790232 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.035670166752768614, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.035670166752768614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.024051029739912258, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.024051029739912258 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.30578512396694213, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.13815789473684212, + "acc_stderr": 0.028081042939576552, + "acc_norm": 0.13815789473684212, + "acc_norm_stderr": 0.028081042939576552 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.016639319350313264, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.016639319350313264 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902013, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902013 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.26838235294117646, + "acc_stderr": 0.026917481224377232, + "acc_norm": 0.26838235294117646, + "acc_norm_stderr": 0.026917481224377232 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3183673469387755, + "acc_stderr": 0.029822533793982052, + "acc_norm": 0.3183673469387755, + "acc_norm_stderr": 0.029822533793982052 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.22784810126582278, + "acc_stderr": 0.027303484599069422, + "acc_norm": 0.22784810126582278, + "acc_norm_stderr": 0.027303484599069422 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.031145570659486782, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.031145570659486782 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834562, + "mc2": 0.4782618487671573, + "mc2_stderr": 0.0160111822942309 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2833530106257379, + "acc_stderr": 0.01549285208459724, + "acc_norm": 0.4085005903187721, + "acc_norm_stderr": 0.016900062879427115 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/SmallKo", + "model_sha": "6cfa6c9d845c862d9a198d9f758623d9605a05e4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/TinyKo-V2/result_2023-12-18 13:43:41.json b/blueapple8259/TinyKo-V2/result_2023-12-18 13:43:41.json new file mode 100644 index 0000000000000000000000000000000000000000..c1e3ac2eb9f10293768112cd153d8453d731bcee --- /dev/null +++ b/blueapple8259/TinyKo-V2/result_2023-12-18 13:43:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19880546075085323, + "acc_stderr": 0.011662850198175534, + "acc_norm": 0.2431740614334471, + "acc_norm_stderr": 0.012536554144587096 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25582553276239794, + "acc_stderr": 0.004354325017137537, + "acc_norm": 0.25423222465644296, + "acc_norm_stderr": 0.004345388614520016 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.03301405946987252, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.03301405946987252 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822585, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822585 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2541507024265645, + "acc_stderr": 0.015569254692045792, + "acc_norm": 0.2541507024265645, + "acc_norm_stderr": 0.015569254692045792 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.03547854198560823, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.03547854198560823 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.028659179374292326, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.028659179374292326 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824665, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824665 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426115, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426115 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3991031390134529, + "acc_stderr": 0.032867453125679603, + "acc_norm": 0.3991031390134529, + "acc_norm_stderr": 0.032867453125679603 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.0359546161177469, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.0359546161177469 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.02655220782821529, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.02655220782821529 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2184873949579832, + "acc_stderr": 0.026841514322958955, + "acc_norm": 0.2184873949579832, + "acc_norm_stderr": 0.026841514322958955 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21025641025641026, + "acc_stderr": 0.020660597485026945, + "acc_norm": 0.21025641025641026, + "acc_norm_stderr": 0.020660597485026945 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24516129032258063, + "acc_stderr": 0.024472243840895518, + "acc_norm": 0.24516129032258063, + "acc_norm_stderr": 0.024472243840895518 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.02987257770889117, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.02987257770889117 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708094, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708094 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.17880794701986755, + "acc_stderr": 0.031287448506007225, + "acc_norm": 0.17880794701986755, + "acc_norm_stderr": 0.031287448506007225 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.02992941540834839, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.02992941540834839 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.034765901043041336, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.034765901043041336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.024477222856135118, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.024477222856135118 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19170984455958548, + "acc_stderr": 0.02840895362624528, + "acc_norm": 0.19170984455958548, + "acc_norm_stderr": 0.02840895362624528 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02380518652488816, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02380518652488816 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2231404958677686, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.2231404958677686, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.031546980450822305, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.031546980450822305 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.017667841612378984, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.017667841612378984 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.14351851851851852, + "acc_stderr": 0.02391077925264438, + "acc_norm": 0.14351851851851852, + "acc_norm_stderr": 0.02391077925264438 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331161, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331161 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.0284588209914603, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.0284588209914603 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676653, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.02977177522814563, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.02977177522814563 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.4992612656093796, + "mc2_stderr": 0.016216678646274893 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2255017709563164, + "acc_stderr": 0.014368122149532182, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.017077254131556224 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/TinyKo-V2", + "model_sha": "ad90efb3381d5672fa95cc202734e341710e83e8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/TinyKo-V3/result_2023-12-23 12:03:51.json b/blueapple8259/TinyKo-V3/result_2023-12-23 12:03:51.json new file mode 100644 index 0000000000000000000000000000000000000000..3b61ce0eee049b57caa981582baa8140a8e467a2 --- /dev/null +++ b/blueapple8259/TinyKo-V3/result_2023-12-23 12:03:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1962457337883959, + "acc_stderr": 0.011606019881416279, + "acc_norm": 0.2525597269624573, + "acc_norm_stderr": 0.012696728980207708 + }, + "harness|ko_hellaswag|10": { + "acc": 0.26180043815972914, + "acc_stderr": 0.004387161203087963, + "acc_norm": 0.26558454491137223, + "acc_norm_stderr": 0.004407413723383402 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26436781609195403, + "acc_stderr": 0.01576998484069052, + "acc_norm": 0.26436781609195403, + "acc_norm_stderr": 0.01576998484069052 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2297872340425532, + "acc_stderr": 0.027501752944412428, + "acc_norm": 0.2297872340425532, + "acc_norm_stderr": 0.027501752944412428 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.035294868015111155, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.035294868015111155 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26366559485530544, + "acc_stderr": 0.02502553850053234, + "acc_norm": 0.26366559485530544, + "acc_norm_stderr": 0.02502553850053234 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455005, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455005 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677698, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677698 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02962022787479048, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02962022787479048 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.19327731092436976, + "acc_stderr": 0.025649470265889183, + "acc_norm": 0.19327731092436976, + "acc_norm_stderr": 0.025649470265889183 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.19487179487179487, + "acc_stderr": 0.02008316759518139, + "acc_norm": 0.19487179487179487, + "acc_norm_stderr": 0.02008316759518139 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.042365112580946336, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.042365112580946336 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22660098522167488, + "acc_stderr": 0.029454863835292982, + "acc_norm": 0.22660098522167488, + "acc_norm_stderr": 0.029454863835292982 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2161290322580645, + "acc_stderr": 0.02341529343356852, + "acc_norm": 0.2161290322580645, + "acc_norm_stderr": 0.02341529343356852 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22641509433962265, + "acc_stderr": 0.025757559893106748, + "acc_norm": 0.22641509433962265, + "acc_norm_stderr": 0.025757559893106748 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.19090909090909092, + "acc_stderr": 0.03764425585984926, + "acc_norm": 0.19090909090909092, + "acc_norm_stderr": 0.03764425585984926 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844075, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.03076944496729601, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.03076944496729601 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.19653179190751446, + "acc_stderr": 0.030299574664788147, + "acc_norm": 0.19653179190751446, + "acc_norm_stderr": 0.030299574664788147 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.033519538795212696, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.033519538795212696 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023132376234543346, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023132376234543346 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796638, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796638 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.024051029739912258, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.024051029739912258 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.03984979653302872, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.03984979653302872 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.03197565821032501, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.03197565821032501 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.01690661592728815, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.01690661592728815 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.02971127586000536, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.02971127586000536 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.022571771025494757, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.022571771025494757 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22448979591836735, + "acc_stderr": 0.02671143055553839, + "acc_norm": 0.22448979591836735, + "acc_norm_stderr": 0.02671143055553839 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598046, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598046 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24445893089960888, + "acc_stderr": 0.010976425013113902, + "acc_norm": 0.24445893089960888, + "acc_norm_stderr": 0.010976425013113902 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766375, + "mc2": 0.5182014726410905, + "mc2_stderr": 0.01613083897465494 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2255017709563164, + "acc_stderr": 0.01436812214953218, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.017115418225226862 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/TinyKo-V3", + "model_sha": "e1105108a78beec3508e6a6ee0591aac17e97df9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/TinyKo-V4/result_2024-01-11 12:54:19.json b/blueapple8259/TinyKo-V4/result_2024-01-11 12:54:19.json new file mode 100644 index 0000000000000000000000000000000000000000..05f08f61817aa353dae4b8e54cdce79b42bcaa6d --- /dev/null +++ b/blueapple8259/TinyKo-V4/result_2024-01-11 12:54:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19368600682593856, + "acc_stderr": 0.01154842540997854, + "acc_norm": 0.2363481228668942, + "acc_norm_stderr": 0.012414960524301836 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25871340370444135, + "acc_stderr": 0.004370328224831781, + "acc_norm": 0.24975104560844452, + "acc_norm_stderr": 0.004319842107724392 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03615507630310933, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03615507630310933 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1650485436893204, + "acc_stderr": 0.03675668832233188, + "acc_norm": 0.1650485436893204, + "acc_norm_stderr": 0.03675668832233188 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386684, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824665, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824665 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.18006430868167203, + "acc_stderr": 0.021823422857744947, + "acc_norm": 0.18006430868167203, + "acc_norm_stderr": 0.021823422857744947 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.026552207828215293, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.026552207828215293 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924812, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924812 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15763546798029557, + "acc_stderr": 0.025639014131172404, + "acc_norm": 0.15763546798029557, + "acc_norm_stderr": 0.025639014131172404 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.16774193548387098, + "acc_stderr": 0.02125546406537133, + "acc_norm": 0.16774193548387098, + "acc_norm_stderr": 0.02125546406537133 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.19205298013245034, + "acc_stderr": 0.03216298420593613, + "acc_norm": 0.19205298013245034, + "acc_norm_stderr": 0.03216298420593613 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.029929415408348377, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.029929415408348377 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.031265112061730424, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.031265112061730424 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.02084229093011465, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.02084229093011465 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1908256880733945, + "acc_stderr": 0.016847676400091105, + "acc_norm": 0.1908256880733945, + "acc_norm_stderr": 0.016847676400091105 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.031922715695482995, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.031922715695482995 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.4962249494707522, + "mc2_stderr": 0.016236816419762445 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21605667060212513, + "acc_stderr": 0.01414949671604312, + "acc_norm": 0.5478158205430933, + "acc_norm_stderr": 0.017111567130916796 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/TinyKo-V4", + "model_sha": "5862f0e15fa9c24ee541242b9cfdc60c3b80c04a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/TinyKo-v5-a/result_2024-02-03 05:48:58.json b/blueapple8259/TinyKo-v5-a/result_2024-02-03 05:48:58.json new file mode 100644 index 0000000000000000000000000000000000000000..abf428818803347abea46edaf36b4d910a185b0a --- /dev/null +++ b/blueapple8259/TinyKo-v5-a/result_2024-02-03 05:48:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2150170648464164, + "acc_stderr": 0.012005717634133614, + "acc_norm": 0.24658703071672355, + "acc_norm_stderr": 0.012595726268790129 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2678749253136825, + "acc_stderr": 0.004419469983939183, + "acc_norm": 0.2800238996215893, + "acc_norm_stderr": 0.004480929450281556 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245232, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245232 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24010217113665389, + "acc_stderr": 0.015274685213734195, + "acc_norm": 0.24010217113665389, + "acc_norm_stderr": 0.015274685213734195 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.16296296296296298, + "acc_stderr": 0.0319054147448284, + "acc_norm": 0.16296296296296298, + "acc_norm_stderr": 0.0319054147448284 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838725, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838725 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632945, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632945 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22137404580152673, + "acc_stderr": 0.0364129708131373, + "acc_norm": 0.22137404580152673, + "acc_norm_stderr": 0.0364129708131373 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20202020202020202, + "acc_stderr": 0.028606204289229865, + "acc_norm": 0.20202020202020202, + "acc_norm_stderr": 0.028606204289229865 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.027025433498882385, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.027025433498882385 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.19487179487179487, + "acc_stderr": 0.020083167595181393, + "acc_norm": 0.19487179487179487, + "acc_norm_stderr": 0.020083167595181393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15763546798029557, + "acc_stderr": 0.025639014131172404, + "acc_norm": 0.15763546798029557, + "acc_norm_stderr": 0.025639014131172404 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2, + "acc_stderr": 0.022755204959542936, + "acc_norm": 0.2, + "acc_norm_stderr": 0.022755204959542936 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.20512820512820512, + "acc_stderr": 0.026453508054040332, + "acc_norm": 0.20512820512820512, + "acc_norm_stderr": 0.026453508054040332 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.026341480371118362, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.026341480371118362 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795832, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795832 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.18543046357615894, + "acc_stderr": 0.03173284384294285, + "acc_norm": 0.18543046357615894, + "acc_norm_stderr": 0.03173284384294285 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.22254335260115607, + "acc_stderr": 0.02239421566194282, + "acc_norm": 0.22254335260115607, + "acc_norm_stderr": 0.02239421566194282 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.0316187791793541, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.0316187791793541 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22752293577981653, + "acc_stderr": 0.017974463578776495, + "acc_norm": 0.22752293577981653, + "acc_norm_stderr": 0.017974463578776495 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.0380952380952381, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.0380952380952381 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.024051029739912258, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.024051029739912258 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.0398497965330287, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.0398497965330287 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2434640522875817, + "acc_stderr": 0.017362473762146627, + "acc_norm": 0.2434640522875817, + "acc_norm_stderr": 0.017362473762146627 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.17857142857142858, + "acc_stderr": 0.036352091215778065, + "acc_norm": 0.17857142857142858, + "acc_norm_stderr": 0.036352091215778065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.025416428388767478, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.025416428388767478 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19852941176470587, + "acc_stderr": 0.024231013370541107, + "acc_norm": 0.19852941176470587, + "acc_norm_stderr": 0.024231013370541107 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24050632911392406, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.24050632911392406, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24185136897001303, + "acc_stderr": 0.010936550813827052, + "acc_norm": 0.24185136897001303, + "acc_norm_stderr": 0.010936550813827052 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02910225438967409, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02910225438967409 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.03192271569548299, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.03192271569548299 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.01512742709652069, + "mc2": 0.4499720901598634, + "mc2_stderr": 0.015863470782751174 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27863046044864226, + "acc_stderr": 0.015413739494345673, + "acc_norm": 0.42739079102715466, + "acc_norm_stderr": 0.017008129844823156 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/TinyKo-v5-a", + "model_sha": "bba4c4262254649603cb53cc62a76140e81fc74a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/TinyKo-v5-b/result_2024-02-03 05:49:06.json b/blueapple8259/TinyKo-v5-b/result_2024-02-03 05:49:06.json new file mode 100644 index 0000000000000000000000000000000000000000..7b1c90d574d41996b28b7319b361710440f2dd06 --- /dev/null +++ b/blueapple8259/TinyKo-v5-b/result_2024-02-03 05:49:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20648464163822525, + "acc_stderr": 0.011828865619002316, + "acc_norm": 0.2440273037542662, + "acc_norm_stderr": 0.012551447627856262 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2665803624775941, + "acc_stderr": 0.0044126741709764605, + "acc_norm": 0.2810197171878112, + "acc_norm_stderr": 0.0044857844685766615 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.17777777777777778, + "acc_stderr": 0.0330278985990172, + "acc_norm": 0.17777777777777778, + "acc_norm_stderr": 0.0330278985990172 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2, + "acc_stderr": 0.0261488180184245, + "acc_norm": 0.2, + "acc_norm_stderr": 0.0261488180184245 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.025583062489984827, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.025583062489984827 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.1717171717171717, + "acc_stderr": 0.026869716187429914, + "acc_norm": 0.1717171717171717, + "acc_norm_stderr": 0.026869716187429914 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.19747899159663865, + "acc_stderr": 0.02585916412205146, + "acc_norm": 0.19747899159663865, + "acc_norm_stderr": 0.02585916412205146 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2076923076923077, + "acc_stderr": 0.02056753956724679, + "acc_norm": 0.2076923076923077, + "acc_norm_stderr": 0.02056753956724679 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23225806451612904, + "acc_stderr": 0.024022256130308235, + "acc_norm": 0.23225806451612904, + "acc_norm_stderr": 0.024022256130308235 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.20085470085470086, + "acc_stderr": 0.02624677294689049, + "acc_norm": 0.20085470085470086, + "acc_norm_stderr": 0.02624677294689049 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.25660377358490566, + "acc_stderr": 0.026880647889052, + "acc_norm": 0.25660377358490566, + "acc_norm_stderr": 0.026880647889052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.18181818181818182, + "acc_stderr": 0.036942843353378024, + "acc_norm": 0.18181818181818182, + "acc_norm_stderr": 0.036942843353378024 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844075, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.15894039735099338, + "acc_stderr": 0.029852788528701008, + "acc_norm": 0.15894039735099338, + "acc_norm_stderr": 0.029852788528701008 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.031157150869355575, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.031157150869355575 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.022497230190967547, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.022497230190967547 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.023576881744005723, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.023576881744005723 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653697, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653697 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.0316187791793541, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.0316187791793541 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322004, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322004 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.01792308766780305, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.01792308766780305 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848877, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848877 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.15789473684210525, + "acc_stderr": 0.029674167520101453, + "acc_norm": 0.15789473684210525, + "acc_norm_stderr": 0.029674167520101453 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24673202614379086, + "acc_stderr": 0.0174408203674025, + "acc_norm": 0.24673202614379086, + "acc_norm_stderr": 0.0174408203674025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.02441461297430771, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.02441461297430771 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285714, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.18055555555555555, + "acc_stderr": 0.02623287897149166, + "acc_norm": 0.18055555555555555, + "acc_norm_stderr": 0.02623287897149166 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767865, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767865 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.028920583220675592, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.028920583220675592 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2489451476793249, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.2489451476793249, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24445893089960888, + "acc_stderr": 0.010976425013113895, + "acc_norm": 0.24445893089960888, + "acc_norm_stderr": 0.010976425013113895 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350195, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2460220318237454, + "mc1_stderr": 0.01507721920066258, + "mc2": 0.45680011920122654, + "mc2_stderr": 0.015871137134464107 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29515938606847697, + "acc_stderr": 0.015681535229192186, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.0171191722080615 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/TinyKo-v5-b", + "model_sha": "735cbc4443568c1a3a8506bf5e3637479f26cdcd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/TinyKo-v5-c/result_2024-02-03 05:49:10.json b/blueapple8259/TinyKo-v5-c/result_2024-02-03 05:49:10.json new file mode 100644 index 0000000000000000000000000000000000000000..c9e9c203af0fef56c6f6dbc4a55bec1ba3758e2c --- /dev/null +++ b/blueapple8259/TinyKo-v5-c/result_2024-02-03 05:49:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2098976109215017, + "acc_stderr": 0.011900548748047452, + "acc_norm": 0.2525597269624573, + "acc_norm_stderr": 0.01269672898020771 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2688707428799044, + "acc_stderr": 0.004424664761480209, + "acc_norm": 0.2830113523202549, + "acc_norm_stderr": 0.0044954128683246065 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2886334610472541, + "acc_stderr": 0.016203792703197793, + "acc_norm": 0.2886334610472541, + "acc_norm_stderr": 0.016203792703197793 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800254, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800254 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.035716092300534796, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.035716092300534796 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.026003301117885142, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.026003301117885142 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.029620227874790458, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.029620227874790458 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.19747899159663865, + "acc_stderr": 0.025859164122051456, + "acc_norm": 0.19747899159663865, + "acc_norm_stderr": 0.025859164122051456 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2641025641025641, + "acc_stderr": 0.022352193737453275, + "acc_norm": 0.2641025641025641, + "acc_norm_stderr": 0.022352193737453275 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.0401910747255735, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.0401910747255735 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18226600985221675, + "acc_stderr": 0.02716334085964515, + "acc_norm": 0.18226600985221675, + "acc_norm_stderr": 0.02716334085964515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1935483870967742, + "acc_stderr": 0.022475258525536057, + "acc_norm": 0.1935483870967742, + "acc_norm_stderr": 0.022475258525536057 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.025819233256483727, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.025819233256483727 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.02648035717989568, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.02648035717989568 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.17272727272727273, + "acc_stderr": 0.03620691833929217, + "acc_norm": 0.17272727272727273, + "acc_norm_stderr": 0.03620691833929217 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473836, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473836 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.030769444967296024, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.030769444967296024 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0230836585869842, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0230836585869842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.032591773927421776, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.032591773927421776 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02313237623454335, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02313237623454335 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.018125669180861493, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.018125669180861493 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.03512207412302054, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.03512207412302054 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.02367908986180772, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.02367908986180772 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.16447368421052633, + "acc_stderr": 0.030167533468632723, + "acc_norm": 0.16447368421052633, + "acc_norm_stderr": 0.030167533468632723 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2107843137254902, + "acc_stderr": 0.016500472979024787, + "acc_norm": 0.2107843137254902, + "acc_norm_stderr": 0.016500472979024787 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2426470588235294, + "acc_stderr": 0.02604066247420126, + "acc_norm": 0.2426470588235294, + "acc_norm_stderr": 0.02604066247420126 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22448979591836735, + "acc_stderr": 0.02671143055553843, + "acc_norm": 0.22448979591836735, + "acc_norm_stderr": 0.02671143055553843 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2405475880052151, + "acc_stderr": 0.010916406735478947, + "acc_norm": 0.2405475880052151, + "acc_norm_stderr": 0.010916406735478947 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.015201522246299956, + "mc2": 0.47447948332820183, + "mc2_stderr": 0.01606931986013532 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2680047225501771, + "acc_stderr": 0.015227905796335147, + "acc_norm": 0.4628099173553719, + "acc_norm_stderr": 0.017142736117643297 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/TinyKo-v5-c", + "model_sha": "1e9afa7f481cec5143239c679b55dd08b48106bd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/TinyKo/result_2023-12-10 12:47:01.json b/blueapple8259/TinyKo/result_2023-12-10 12:47:01.json new file mode 100644 index 0000000000000000000000000000000000000000..c60c09224f26347991cee493998b2fbb61702226 --- /dev/null +++ b/blueapple8259/TinyKo/result_2023-12-10 12:47:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20392491467576793, + "acc_stderr": 0.011774262478702254, + "acc_norm": 0.2721843003412969, + "acc_norm_stderr": 0.013006600406423704 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25054769966142204, + "acc_stderr": 0.004324428538963687, + "acc_norm": 0.25144393547102173, + "acc_norm_stderr": 0.004329565016527316 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727665, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727665 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20434227330779056, + "acc_stderr": 0.014419123980931904, + "acc_norm": 0.20434227330779056, + "acc_norm_stderr": 0.014419123980931904 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838746, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838746 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1927710843373494, + "acc_stderr": 0.030709824050565264, + "acc_norm": 0.1927710843373494, + "acc_norm_stderr": 0.030709824050565264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.10762331838565023, + "acc_stderr": 0.020799400082880004, + "acc_norm": 0.10762331838565023, + "acc_norm_stderr": 0.020799400082880004 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.024121125416941176, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.024121125416941176 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042767, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042767 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493864, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493864 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577656, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577656 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036622, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036622 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757173, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757173 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3486238532110092, + "acc_stderr": 0.020431254090714328, + "acc_norm": 0.3486238532110092, + "acc_norm_stderr": 0.020431254090714328 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.026090162504279053, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.026090162504279053 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.14049586776859505, + "acc_stderr": 0.031722334260021585, + "acc_norm": 0.14049586776859505, + "acc_norm_stderr": 0.031722334260021585 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.20751633986928106, + "acc_stderr": 0.016405924270103234, + "acc_norm": 0.20751633986928106, + "acc_norm_stderr": 0.016405924270103234 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.02564555362226673, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.02564555362226673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20253164556962025, + "acc_stderr": 0.026160568246601464, + "acc_norm": 0.20253164556962025, + "acc_norm_stderr": 0.026160568246601464 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25097783572359844, + "acc_stderr": 0.011073730299187234, + "acc_norm": 0.25097783572359844, + "acc_norm_stderr": 0.011073730299187234 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923403, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923403 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03588624800091707, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03588624800091707 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520698, + "mc2": 0.5110615894861509, + "mc2_stderr": 0.015927383181781492 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.11452184179456906, + "acc_stderr": 0.010948330698808921, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.016819438642971408 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/TinyKo", + "model_sha": "2cc49adce5ca81f1cb4b406d68c3e80d0270e4d9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/TinyKoWiki-v1/result_2023-12-25 02:41:39.json b/blueapple8259/TinyKoWiki-v1/result_2023-12-25 02:41:39.json new file mode 100644 index 0000000000000000000000000000000000000000..a9688fae89da5234e94a78654b1a9a38ea4073e8 --- /dev/null +++ b/blueapple8259/TinyKoWiki-v1/result_2023-12-25 02:41:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1945392491467577, + "acc_stderr": 0.011567709174648728, + "acc_norm": 0.23208191126279865, + "acc_norm_stderr": 0.012336718284948853 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2528380800637323, + "acc_stderr": 0.0043375063448999164, + "acc_norm": 0.25144393547102173, + "acc_norm_stderr": 0.004329565016527321 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.015818450894777573, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.015818450894777573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066654, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066654 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663925, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663925 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2861736334405145, + "acc_stderr": 0.025670259242188936, + "acc_norm": 0.2861736334405145, + "acc_norm_stderr": 0.025670259242188936 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.029442495585857476, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.029442495585857476 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036625, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036625 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.03375672449560554, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.03375672449560554 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.02665353159671548, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.02665353159671548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722127995, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722127995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.032826493853041504, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.032826493853041504 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23225806451612904, + "acc_stderr": 0.02402225613030824, + "acc_norm": 0.23225806451612904, + "acc_norm_stderr": 0.02402225613030824 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.028120966503914407, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.028120966503914407 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23018867924528302, + "acc_stderr": 0.025907897122408173, + "acc_norm": 0.23018867924528302, + "acc_norm_stderr": 0.025907897122408173 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2, + "acc_stderr": 0.038313051408846006, + "acc_norm": 0.2, + "acc_norm_stderr": 0.038313051408846006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230172, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230172 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.033742355504256936, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.033742355504256936 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014666, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014666 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.024257901705323374, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.024257901705323374 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.025407197798890165, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.025407197798890165 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.029778663037752947, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.029778663037752947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21284403669724772, + "acc_stderr": 0.01754937638931369, + "acc_norm": 0.21284403669724772, + "acc_norm_stderr": 0.01754937638931369 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924316, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.02463004897982478, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.02463004897982478 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312337, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312337 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2630718954248366, + "acc_stderr": 0.01781267654232065, + "acc_norm": 0.2630718954248366, + "acc_norm_stderr": 0.01781267654232065 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23049645390070922, + "acc_stderr": 0.025123739226872405, + "acc_norm": 0.23049645390070922, + "acc_norm_stderr": 0.025123739226872405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.02699145450203673, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.02699145450203673 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19852941176470587, + "acc_stderr": 0.024231013370541087, + "acc_norm": 0.19852941176470587, + "acc_norm_stderr": 0.024231013370541087 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.21224489795918366, + "acc_stderr": 0.026176967197866767, + "acc_norm": 0.21224489795918366, + "acc_norm_stderr": 0.026176967197866767 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145628, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145628 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885416, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23623011015911874, + "mc1_stderr": 0.0148697550158711, + "mc2": 0.4810748840249365, + "mc2_stderr": 0.01638238136567759 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.18890200708382526, + "acc_stderr": 0.013457666696421402, + "acc_norm": 0.4781582054309327, + "acc_norm_stderr": 0.017173944474294378 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/TinyKoWiki-v1", + "model_sha": "42bb6fcd1ed5b902e4ca07e25358c8a13c3e5bb3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/squirrel-c4-ko-cleaned-2/result_2024-07-20 09:08:54.json b/blueapple8259/squirrel-c4-ko-cleaned-2/result_2024-07-20 09:08:54.json new file mode 100644 index 0000000000000000000000000000000000000000..2cd51fba2e3255dfa34ea227bb9af38c4a06e54a --- /dev/null +++ b/blueapple8259/squirrel-c4-ko-cleaned-2/result_2024-07-20 09:08:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21331058020477817, + "acc_stderr": 0.011970971742326334, + "acc_norm": 0.24658703071672355, + "acc_norm_stderr": 0.012595726268790124 + }, + "harness|ko_hellaswag|10": { + "acc": 0.27763393746265685, + "acc_stderr": 0.0044691657286003335, + "acc_norm": 0.29655447122087236, + "acc_norm_stderr": 0.004558049018764659 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.033773102522091945, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.033773102522091945 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2848020434227331, + "acc_stderr": 0.016139174096522588, + "acc_norm": 0.2848020434227331, + "acc_norm_stderr": 0.016139174096522588 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.02655698211783875, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.02655698211783875 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944967, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944967 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.03219079200419997, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.03219079200419997 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768362, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768362 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.029620227874790458, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.029620227874790458 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438015, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438015 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2184873949579832, + "acc_stderr": 0.02684151432295893, + "acc_norm": 0.2184873949579832, + "acc_norm_stderr": 0.02684151432295893 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3435897435897436, + "acc_stderr": 0.024078696580635477, + "acc_norm": 0.3435897435897436, + "acc_norm_stderr": 0.024078696580635477 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332204, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332204 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891363, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891363 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.02620276653465215, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.02620276653465215 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641145, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641145 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02438366553103546, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02438366553103546 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.31088082901554404, + "acc_stderr": 0.03340361906276586, + "acc_norm": 0.31088082901554404, + "acc_norm_stderr": 0.03340361906276586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.017923087667803057, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.017923087667803057 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102148, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102148 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.02355083135199509, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.02355083135199509 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148594, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148594 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.024987106365642962, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.024987106365642962 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.014102223623152589, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.014102223623152589 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174923, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174923 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237276, + "mc2": 0.47437910057987187, + "mc2_stderr": 0.015647489121829936 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3577331759149941, + "acc_stderr": 0.016479808935749976, + "acc_norm": 0.5407319952774499, + "acc_norm_stderr": 0.01713321827653767 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/squirrel-c4-ko-cleaned-2", + "model_sha": "6430156b58fa4e7d961dcb02504977bb8dd31539", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/squirrel-c4-ko-cleaned/result_2024-07-06 10:16:51.json b/blueapple8259/squirrel-c4-ko-cleaned/result_2024-07-06 10:16:51.json new file mode 100644 index 0000000000000000000000000000000000000000..c414b6e750f4ee31f8ad39e152b653f81d4b3d24 --- /dev/null +++ b/blueapple8259/squirrel-c4-ko-cleaned/result_2024-07-06 10:16:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20477815699658702, + "acc_stderr": 0.011792544338513393, + "acc_norm": 0.25426621160409557, + "acc_norm_stderr": 0.012724999945157743 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2650866361282613, + "acc_stderr": 0.00440477273576597, + "acc_norm": 0.2720573590918144, + "acc_norm_stderr": 0.004441097782370493 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245232, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245232 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2120051085568327, + "acc_stderr": 0.014616099385833681, + "acc_norm": 0.2120051085568327, + "acc_norm_stderr": 0.014616099385833681 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617722, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617722 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.028020226271200214, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.028020226271200214 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.0355092018568963, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.0355092018568963 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.21543408360128619, + "acc_stderr": 0.02335022547547142, + "acc_norm": 0.21543408360128619, + "acc_norm_stderr": 0.02335022547547142 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.1210762331838565, + "acc_stderr": 0.02189417411318573, + "acc_norm": 0.1210762331838565, + "acc_norm_stderr": 0.02189417411318573 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20202020202020202, + "acc_stderr": 0.02860620428922987, + "acc_norm": 0.20202020202020202, + "acc_norm_stderr": 0.02860620428922987 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2773109243697479, + "acc_stderr": 0.029079374539480007, + "acc_norm": 0.2773109243697479, + "acc_norm_stderr": 0.029079374539480007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.30512820512820515, + "acc_stderr": 0.023346335293325887, + "acc_norm": 0.30512820512820515, + "acc_norm_stderr": 0.023346335293325887 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.1921182266009852, + "acc_stderr": 0.027719315709614778, + "acc_norm": 0.1921182266009852, + "acc_norm_stderr": 0.027719315709614778 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2, + "acc_stderr": 0.022755204959542936, + "acc_norm": 0.2, + "acc_norm_stderr": 0.022755204959542936 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2, + "acc_stderr": 0.03831305140884603, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03831305140884603 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.024556172219141262, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.024556172219141262 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473835, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473835 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21890547263681592, + "acc_stderr": 0.029239174636647, + "acc_norm": 0.21890547263681592, + "acc_norm_stderr": 0.029239174636647 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.021132859182754423, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.021132859182754423 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071138, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071138 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.0329109957861577, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.0329109957861577 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22839506172839505, + "acc_stderr": 0.023358211840626267, + "acc_norm": 0.22839506172839505, + "acc_norm_stderr": 0.023358211840626267 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19170984455958548, + "acc_stderr": 0.028408953626245282, + "acc_norm": 0.19170984455958548, + "acc_norm_stderr": 0.028408953626245282 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21467889908256882, + "acc_stderr": 0.017604304149256494, + "acc_norm": 0.21467889908256882, + "acc_norm_stderr": 0.017604304149256494 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848877, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848877 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.026336613469046626, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.026336613469046626 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720685, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720685 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.1513157894736842, + "acc_stderr": 0.02916263159684399, + "acc_norm": 0.1513157894736842, + "acc_norm_stderr": 0.02916263159684399 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25326797385620914, + "acc_stderr": 0.01759348689536683, + "acc_norm": 0.25326797385620914, + "acc_norm_stderr": 0.01759348689536683 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729906, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729906 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02518778666022728, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02518778666022728 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.02540930195322568, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.02540930195322568 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3080168776371308, + "acc_stderr": 0.03005238933560569, + "acc_norm": 0.3080168776371308, + "acc_norm_stderr": 0.03005238933560569 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24837027379400262, + "acc_stderr": 0.011035212598034501, + "acc_norm": 0.24837027379400262, + "acc_norm_stderr": 0.011035212598034501 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350194, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350194 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.20606060606060606, + "acc_stderr": 0.0315841532404771, + "acc_norm": 0.20606060606060606, + "acc_norm_stderr": 0.0315841532404771 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283345, + "mc2": 0.5147593768009856, + "mc2_stderr": 0.015977999590545248 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2857142857142857, + "acc_stderr": 0.01553162078698674, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.01675692157106942 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/squirrel-c4-ko-cleaned", + "model_sha": "7c18533c1f3c9d90f7d57d3f17b429d16c3a996e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/blueapple8259/test_model1/result_2023-12-05 03:42:51.json b/blueapple8259/test_model1/result_2023-12-05 03:42:51.json new file mode 100644 index 0000000000000000000000000000000000000000..1e2132945f14d66ae44c0748e07854fe3006741b --- /dev/null +++ b/blueapple8259/test_model1/result_2023-12-05 03:42:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2030716723549488, + "acc_stderr": 0.011755899303705582, + "acc_norm": 0.25426621160409557, + "acc_norm_stderr": 0.01272499994515774 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2535351523600876, + "acc_stderr": 0.00434145484189233, + "acc_norm": 0.2502489543915555, + "acc_norm_stderr": 0.004322710911026373 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209195, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209195 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.0376017800602662, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.0376017800602662 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2784163473818646, + "acc_stderr": 0.01602829518899246, + "acc_norm": 0.2784163473818646, + "acc_norm_stderr": 0.01602829518899246 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2, + "acc_stderr": 0.0261488180184245, + "acc_norm": 0.2, + "acc_norm_stderr": 0.0261488180184245 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.031069390260789413, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.031069390260789413 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.02608270069539966, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.02608270069539966 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.19083969465648856, + "acc_stderr": 0.03446513350752599, + "acc_norm": 0.19083969465648856, + "acc_norm_stderr": 0.03446513350752599 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.03831226048850333, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.03831226048850333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714506, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714506 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380565, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380565 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2230769230769231, + "acc_stderr": 0.02110773012724399, + "acc_norm": 0.2230769230769231, + "acc_norm_stderr": 0.02110773012724399 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.032550867699701024, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.032550867699701024 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24516129032258063, + "acc_stderr": 0.02447224384089553, + "acc_norm": 0.24516129032258063, + "acc_norm_stderr": 0.02447224384089553 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.027236013946196694, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.027236013946196694 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.026616482980501722, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.026616482980501722 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2, + "acc_stderr": 0.03831305140884601, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03831305140884601 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008937, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008937 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213322, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213322 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.03063114553919882, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.03063114553919882 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28034682080924855, + "acc_stderr": 0.024182427496577615, + "acc_norm": 0.28034682080924855, + "acc_norm_stderr": 0.024182427496577615 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.024569223600460852, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.024569223600460852 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22279792746113988, + "acc_stderr": 0.03003114797764154, + "acc_norm": 0.22279792746113988, + "acc_norm_stderr": 0.03003114797764154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.01827257581023187, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.01827257581023187 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.024170840879341016, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.024170840879341016 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.01740181671142765, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.01740181671142765 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.14814814814814814, + "acc_stderr": 0.024227629273728356, + "acc_norm": 0.14814814814814814, + "acc_norm_stderr": 0.024227629273728356 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2324022346368715, + "acc_stderr": 0.0141259687546734, + "acc_norm": 0.2324022346368715, + "acc_norm_stderr": 0.0141259687546734 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20955882352941177, + "acc_stderr": 0.02472311040767707, + "acc_norm": 0.20955882352941177, + "acc_norm_stderr": 0.02472311040767707 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22448979591836735, + "acc_stderr": 0.026711430555538433, + "acc_norm": 0.22448979591836735, + "acc_norm_stderr": 0.026711430555538433 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.22784810126582278, + "acc_stderr": 0.027303484599069443, + "acc_norm": 0.22784810126582278, + "acc_norm_stderr": 0.027303484599069443 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23859191655801826, + "acc_stderr": 0.010885929742002205, + "acc_norm": 0.23859191655801826, + "acc_norm_stderr": 0.010885929742002205 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501964, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501964 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.033175059300091805, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.033175059300091805 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456418, + "mc2": 0.49748125321542586, + "mc2_stderr": 0.016335585216269707 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.19834710743801653, + "acc_stderr": 0.013709478542303373, + "acc_norm": 0.5631641086186541, + "acc_norm_stderr": 0.01705263355985607 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "blueapple8259/test_model1", + "model_sha": "4e1233d9d69f819778b8969ea74dc5a01db876b3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/boracious/llama-2-7b-test/result_2023-12-24 14:02:24.json b/boracious/llama-2-7b-test/result_2023-12-24 14:02:24.json new file mode 100644 index 0000000000000000000000000000000000000000..62afce3d6d8c9afab117f1cecc37053228c01af3 --- /dev/null +++ b/boracious/llama-2-7b-test/result_2023-12-24 14:02:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.18771331058020477, + "acc_stderr": 0.011411001314155133, + "acc_norm": 0.23976109215017063, + "acc_norm_stderr": 0.012476304127453956 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2492531368253336, + "acc_stderr": 0.004316965678675089, + "acc_norm": 0.24447321250746862, + "acc_norm_stderr": 0.004288960926085629 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.031885780176863984, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.031885780176863984 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2886334610472541, + "acc_stderr": 0.016203792703197797, + "acc_norm": 0.2886334610472541, + "acc_norm_stderr": 0.016203792703197797 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.03712537833614866, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.03712537833614866 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426115, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426115 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459156, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459156 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.029620227874790454, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.029620227874790454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924811, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924811 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2230769230769231, + "acc_stderr": 0.02110773012724399, + "acc_norm": 0.2230769230769231, + "acc_norm_stderr": 0.02110773012724399 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.02860595370200424, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.02860595370200424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.17880794701986755, + "acc_stderr": 0.03128744850600724, + "acc_norm": 0.17880794701986755, + "acc_norm_stderr": 0.03128744850600724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749895, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749895 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02438366553103545, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02438366553103545 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.029252823291803627, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.029252823291803627 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.02405102973991225, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.02405102973991225 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.017630827375148383, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.017630827375148383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.14, + "acc_stderr": 0.0348735088019777, + "acc_norm": 0.14, + "acc_norm_stderr": 0.0348735088019777 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676653, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.48362811394951233, + "mc2_stderr": 0.017114971034498782 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.0743801652892562, + "acc_stderr": 0.009021104510906089, + "acc_norm": 0.30342384887839435, + "acc_norm_stderr": 0.015806072717909563 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "boracious/llama-2-7b-test", + "model_sha": "e9591396303478caf649713e73939f348109529b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/c1park/20240105_mistral-step50/result_2024-01-08 00:21:35.json b/c1park/20240105_mistral-step50/result_2024-01-08 00:21:35.json new file mode 100644 index 0000000000000000000000000000000000000000..27df9af1586601b045bb7738f1b735d82a2343cd --- /dev/null +++ b/c1park/20240105_mistral-step50/result_2024-01-08 00:21:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3165529010238908, + "acc_stderr": 0.013592431519068079, + "acc_norm": 0.3626279863481229, + "acc_norm_stderr": 0.014049106564955017 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3611830312686716, + "acc_stderr": 0.004793617835645062, + "acc_norm": 0.45956980681139215, + "acc_norm_stderr": 0.004973442060741627 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219295, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219295 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4393358876117497, + "acc_stderr": 0.017747874245683616, + "acc_norm": 0.4393358876117497, + "acc_norm_stderr": 0.017747874245683616 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.41479099678456594, + "acc_stderr": 0.02798268045975956, + "acc_norm": 0.41479099678456594, + "acc_norm_stderr": 0.02798268045975956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.02512465352588513, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.02512465352588513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961827, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961827 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.02904560029061626, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.02904560029061626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.038258255488486076, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.038258255488486076 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607715, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607715 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680804, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490986, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490986 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.01972205893961806, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.01972205893961806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3195530726256983, + "acc_stderr": 0.015595520294147415, + "acc_norm": 0.3195530726256983, + "acc_norm_stderr": 0.015595520294147415 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.031891418324213966, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.031891418324213966 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.03214814630240369, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.03214814630240369 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330373, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330373 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3219094247246022, + "mc1_stderr": 0.016355567611960383, + "mc2": 0.498987125156699, + "mc2_stderr": 0.015714100379409426 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3789846517119244, + "acc_stderr": 0.01667926068422929, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.016876941165045612 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "c1park/20240105_mistral-step50", + "model_sha": "d0b5b72c0b78b09f135569d5204f07b3c0dc13d0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/c1park/kosolra-kullm-LDCC-merge/result_2024-02-22 00:50:40.json b/c1park/kosolra-kullm-LDCC-merge/result_2024-02-22 00:50:40.json new file mode 100644 index 0000000000000000000000000000000000000000..10eadfa7bca79722520c1c5aee4d4bbbd6b97eb8 --- /dev/null +++ b/c1park/kosolra-kullm-LDCC-merge/result_2024-02-22 00:50:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4761092150170648, + "acc_stderr": 0.014594701798071657, + "acc_norm": 0.53839590443686, + "acc_norm_stderr": 0.014568245550296361 + }, + "harness|ko_hellaswag|10": { + "acc": 0.47092212706632147, + "acc_stderr": 0.004981336318033641, + "acc_norm": 0.6464847639912368, + "acc_norm_stderr": 0.004770838678356036 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6781609195402298, + "acc_stderr": 0.01670638141505791, + "acc_norm": 0.6781609195402298, + "acc_norm_stderr": 0.01670638141505791 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.032600385118357715, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.032600385118357715 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6816720257234726, + "acc_stderr": 0.02645722506781102, + "acc_norm": 0.6816720257234726, + "acc_norm_stderr": 0.02645722506781102 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6053811659192825, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.6053811659192825, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6717557251908397, + "acc_stderr": 0.04118438565806298, + "acc_norm": 0.6717557251908397, + "acc_norm_stderr": 0.04118438565806298 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.03115626951964685, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.03115626951964685 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383886, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383886 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5966386554621849, + "acc_stderr": 0.031866081214088314, + "acc_norm": 0.5966386554621849, + "acc_norm_stderr": 0.031866081214088314 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.558974358974359, + "acc_stderr": 0.025174048384000718, + "acc_norm": 0.558974358974359, + "acc_norm_stderr": 0.025174048384000718 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.02786932057166462, + "acc_norm": 0.6, + "acc_norm_stderr": 0.02786932057166462 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.02665569965392276, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.02665569965392276 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.03067609659938918, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.03067609659938918 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.0320384104021332, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.0320384104021332 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41798941798941797, + "acc_stderr": 0.02540255550326091, + "acc_norm": 0.41798941798941797, + "acc_norm_stderr": 0.02540255550326091 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5953757225433526, + "acc_stderr": 0.026424816594009852, + "acc_norm": 0.5953757225433526, + "acc_norm_stderr": 0.026424816594009852 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.027125115513166858, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.027125115513166858 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7461139896373057, + "acc_stderr": 0.03141024780565319, + "acc_norm": 0.7461139896373057, + "acc_norm_stderr": 0.03141024780565319 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594964, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594964 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.689908256880734, + "acc_stderr": 0.01983084968443975, + "acc_norm": 0.689908256880734, + "acc_norm_stderr": 0.01983084968443975 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749255, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.028180596328259287, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.028180596328259287 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.020226862710039466, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.020226862710039466 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41134751773049644, + "acc_stderr": 0.029354911159940975, + "acc_norm": 0.41134751773049644, + "acc_norm_stderr": 0.029354911159940975 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.046695106638751926, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.046695106638751926 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.03400603625538271, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.03400603625538271 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30502793296089387, + "acc_stderr": 0.015398723510916715, + "acc_norm": 0.30502793296089387, + "acc_norm_stderr": 0.015398723510916715 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5404411764705882, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.5404411764705882, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556166, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556166 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4178617992177314, + "acc_stderr": 0.012596744108998564, + "acc_norm": 0.4178617992177314, + "acc_norm_stderr": 0.012596744108998564 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6519607843137255, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.6519607843137255, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.401468788249694, + "mc1_stderr": 0.01716027390169365, + "mc2": 0.563740399008354, + "mc2_stderr": 0.015872267940178188 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5737898465171193, + "acc_stderr": 0.017002122609489252, + "acc_norm": 0.5903187721369539, + "acc_norm_stderr": 0.01690756819221948 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "c1park/kosolra-kullm-LDCC-merge", + "model_sha": "0e4433b55a2b4b6cce4b873080066936a0550796", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-3data-merged/result_2023-10-09 14:50:39.json b/caisarl76/Mistral-7B-3data-merged/result_2023-10-09 14:50:39.json new file mode 100644 index 0000000000000000000000000000000000000000..3d622996c34b82e2a6e33c02e1d0b8b2093f0e7b --- /dev/null +++ b/caisarl76/Mistral-7B-3data-merged/result_2023-10-09 14:50:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32081911262798635, + "acc_stderr": 0.013640943091946528, + "acc_norm": 0.37627986348122866, + "acc_norm_stderr": 0.014157022555407166 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37094204341764586, + "acc_stderr": 0.004820697457420419, + "acc_norm": 0.47480581557458673, + "acc_norm_stderr": 0.0049834428886777705 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4495530012771392, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.4495530012771392, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.028043399858210628, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.028043399858210628 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3452914798206278, + "acc_stderr": 0.03191100192835794, + "acc_norm": 0.3452914798206278, + "acc_norm_stderr": 0.03191100192835794 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.03941707632064889, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.03941707632064889 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.0242831405294673, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.0242831405294673 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.032826493853041504, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.032826493853041504 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165897, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165897 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.029445175328199596, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.029445175328199596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731571, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731571 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959316, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959316 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.035319879302087305, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.035319879302087305 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267437, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267437 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.023393826500484875, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.023393826500484875 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4104938271604938, + "acc_stderr": 0.027371350925124764, + "acc_norm": 0.4104938271604938, + "acc_norm_stderr": 0.027371350925124764 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42385321100917434, + "acc_stderr": 0.021187263209087516, + "acc_norm": 0.42385321100917434, + "acc_norm_stderr": 0.021187263209087516 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523811, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523811 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.512396694214876, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.039531733777491924, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.039531733777491924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.01920660684882537, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.01920660684882537 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755805, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755805 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802749, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.014635185616527829, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.014635185616527829 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411952, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411952 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3224489795918367, + "acc_stderr": 0.029923100563683906, + "acc_norm": 0.3224489795918367, + "acc_norm_stderr": 0.029923100563683906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34224250325945244, + "acc_stderr": 0.01211793999870587, + "acc_norm": 0.34224250325945244, + "acc_norm_stderr": 0.01211793999870587 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.0345423658538061, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.0345423658538061 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.015744027248256055, + "mc2": 0.45994906823090903, + "mc2_stderr": 0.01581120469816343 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3789846517119244, + "acc_stderr": 0.01667926068422929, + "acc_norm": 0.44155844155844154, + "acc_norm_stderr": 0.0170725258755631 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "caisarl76/Mistral-7B-3data-merged", + "model_sha": "7df44d1c021898b608f741519016e4fd1373e636", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-Openorca-cot-2157/result_2023-10-23 00:34:03.json b/caisarl76/Mistral-7B-Openorca-cot-2157/result_2023-10-23 00:34:03.json new file mode 100644 index 0000000000000000000000000000000000000000..7d20aca2d4aa615f8033e2ef5a46ab546b999e2f --- /dev/null +++ b/caisarl76/Mistral-7B-Openorca-cot-2157/result_2023-10-23 00:34:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2226962457337884, + "acc_stderr": 0.012158314774829919, + "acc_norm": 0.2960750853242321, + "acc_norm_stderr": 0.013340916085246254 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2795259908384784, + "acc_stderr": 0.004478491697891243, + "acc_norm": 0.30870344552877915, + "acc_norm_stderr": 0.004610143575553467 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.03753638955761691, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.03753638955761691 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.34951456310679613, + "acc_stderr": 0.04721188506097173, + "acc_norm": 0.34951456310679613, + "acc_norm_stderr": 0.04721188506097173 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36909323116219667, + "acc_stderr": 0.01725628310912461, + "acc_norm": 0.36909323116219667, + "acc_norm_stderr": 0.01725628310912461 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.031068985963122155, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.031068985963122155 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.34726688102893893, + "acc_stderr": 0.027040745502307336, + "acc_norm": 0.34726688102893893, + "acc_norm_stderr": 0.027040745502307336 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467766, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467766 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.31313131313131315, + "acc_stderr": 0.03304205087813652, + "acc_norm": 0.31313131313131315, + "acc_norm_stderr": 0.03304205087813652 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378948, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378948 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.02947248583313609, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.02947248583313609 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3230769230769231, + "acc_stderr": 0.023710888501970565, + "acc_norm": 0.3230769230769231, + "acc_norm_stderr": 0.023710888501970565 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22167487684729065, + "acc_stderr": 0.029225575892489614, + "acc_norm": 0.22167487684729065, + "acc_norm_stderr": 0.029225575892489614 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34838709677419355, + "acc_stderr": 0.02710482632810094, + "acc_norm": 0.34838709677419355, + "acc_norm_stderr": 0.02710482632810094 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.03274531938842351, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.03274531938842351 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3169811320754717, + "acc_stderr": 0.028637235639800928, + "acc_norm": 0.3169811320754717, + "acc_norm_stderr": 0.028637235639800928 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275794, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275794 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696525, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43781094527363185, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.43781094527363185, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0356760379963917, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0356760379963917 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101813, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101813 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.33815028901734107, + "acc_stderr": 0.02546977014940017, + "acc_norm": 0.33815028901734107, + "acc_norm_stderr": 0.02546977014940017 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.02610567386140981, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.02610567386140981 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35233160621761656, + "acc_stderr": 0.03447478286414358, + "acc_norm": 0.35233160621761656, + "acc_norm_stderr": 0.03447478286414358 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3798165137614679, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.3798165137614679, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.026716118380156837, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.026716118380156837 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.045454545454545456, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.045454545454545456 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.03690677986137283, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.03690677986137283 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.01887568293806944, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.01887568293806944 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833585, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833585 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.027696910713093933, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.027696910713093933 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966351, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966351 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681404, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681404 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.026537045312145312, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.026537045312145312 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.47257383966244726, + "acc_stderr": 0.03249822718301303, + "acc_norm": 0.47257383966244726, + "acc_norm_stderr": 0.03249822718301303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2542372881355932, + "acc_stderr": 0.011121129007840676, + "acc_norm": 0.2542372881355932, + "acc_norm_stderr": 0.011121129007840676 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.032566854844603886, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.032566854844603886 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.40606060606060607, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.40606060606060607, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476189, + "mc2": 0.4874892521316813, + "mc2_stderr": 0.017011135502882097 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.16646989374262103, + "acc_stderr": 0.01280687925641312, + "acc_norm": 0.2833530106257379, + "acc_norm_stderr": 0.015492852084597233 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "caisarl76/Mistral-7B-Openorca-cot-2157", + "model_sha": "eaf722c66f6bbb64f7f43d08bc9de3b36be29d2b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-eng-kor-cot-combined/result_2023-10-23 00:34:59.json b/caisarl76/Mistral-7B-eng-kor-cot-combined/result_2023-10-23 00:34:59.json new file mode 100644 index 0000000000000000000000000000000000000000..2ee2428f3c460e53e68e02640fe2f9154daf6e2b --- /dev/null +++ b/caisarl76/Mistral-7B-eng-kor-cot-combined/result_2023-10-23 00:34:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2901023890784983, + "acc_stderr": 0.01326157367752077, + "acc_norm": 0.34812286689419797, + "acc_norm_stderr": 0.013921008595179335 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35012945628360886, + "acc_stderr": 0.004760354191370866, + "acc_norm": 0.4374626568412667, + "acc_norm_stderr": 0.0049505983006675565 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.0484674825397724, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.0484674825397724 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.017612204084663775, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.017612204084663775 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.039446241625011175, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.039446241625011175 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3858520900321543, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.3858520900321543, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.041423137719966634, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.041423137719966634 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.03394853965156402, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.03394853965156402 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309993, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309993 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077636, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31092436974789917, + "acc_stderr": 0.03006676158297793, + "acc_norm": 0.31092436974789917, + "acc_norm_stderr": 0.03006676158297793 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.024433016466052452, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.024433016466052452 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5427350427350427, + "acc_stderr": 0.03263622596380688, + "acc_norm": 0.5427350427350427, + "acc_norm_stderr": 0.03263622596380688 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.028254200344438662, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.028254200344438662 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524586, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524586 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4427860696517413, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.4427860696517413, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0356760379963917, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0356760379963917 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.02479606060269995, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.02479606060269995 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.38439306358381503, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.38439306358381503, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.037311335196738925, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.037311335196738925 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.026571483480719967, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.026571483480719967 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38860103626943004, + "acc_stderr": 0.035177397963731316, + "acc_norm": 0.38860103626943004, + "acc_norm_stderr": 0.035177397963731316 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3908256880733945, + "acc_stderr": 0.020920058346111065, + "acc_norm": 0.3908256880733945, + "acc_norm_stderr": 0.020920058346111065 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.027582811415159624, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.027582811415159624 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.48760330578512395, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.01871806705262323, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.01871806705262323 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510927, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510927 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21564245810055865, + "acc_stderr": 0.013754835975482336, + "acc_norm": 0.21564245810055865, + "acc_norm_stderr": 0.013754835975482336 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3224489795918367, + "acc_stderr": 0.029923100563683906, + "acc_norm": 0.3224489795918367, + "acc_norm_stderr": 0.029923100563683906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4641350210970464, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.4641350210970464, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2907431551499348, + "acc_stderr": 0.011598062372851974, + "acc_norm": 0.2907431551499348, + "acc_norm_stderr": 0.011598062372851974 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.03402272044340705, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.03402272044340705 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.0381549430868893, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.0381549430868893 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.4699106773315303, + "mc2_stderr": 0.01582978440702906 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22904368358913813, + "acc_stderr": 0.014447372277253822, + "acc_norm": 0.24557260920897284, + "acc_norm_stderr": 0.014798357154972826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "caisarl76/Mistral-7B-eng-kor-cot-combined", + "model_sha": "d7e959c88fdc316602494d1ffd2bf52d33371f89", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-orca-1k-platy-1k/result_2023-10-22 12:42:37.json b/caisarl76/Mistral-7B-orca-1k-platy-1k/result_2023-10-22 12:42:37.json new file mode 100644 index 0000000000000000000000000000000000000000..c9c428dd4b099bccd150980f6214fe391755a829 --- /dev/null +++ b/caisarl76/Mistral-7B-orca-1k-platy-1k/result_2023-10-22 12:42:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31313993174061433, + "acc_stderr": 0.013552671543623497, + "acc_norm": 0.3660409556313993, + "acc_norm_stderr": 0.014077223108470139 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37183827922724555, + "acc_stderr": 0.004823078145064963, + "acc_norm": 0.45947022505477, + "acc_norm_stderr": 0.00497336133916965 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.03805797505590459, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.03805797505590459 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273482, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273482 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4367816091954023, + "acc_stderr": 0.017736470837800677, + "acc_norm": 0.4367816091954023, + "acc_norm_stderr": 0.017736470837800677 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894245, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894245 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.032361983509282766, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.032361983509282766 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307808, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307808 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.024784316942156374, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.024784316942156374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3584905660377358, + "acc_stderr": 0.029514703583981755, + "acc_norm": 0.3584905660377358, + "acc_norm_stderr": 0.029514703583981755 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066485, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.03533133389323657, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.03533133389323657 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.02386520683697259, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.02386520683697259 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38271604938271603, + "acc_stderr": 0.02704453813840262, + "acc_norm": 0.38271604938271603, + "acc_norm_stderr": 0.02704453813840262 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43486238532110094, + "acc_stderr": 0.02125463146560928, + "acc_norm": 0.43486238532110094, + "acc_norm_stderr": 0.02125463146560928 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03782728980865469, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03782728980865469 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355442, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30614525139664805, + "acc_stderr": 0.01541449448790321, + "acc_norm": 0.30614525139664805, + "acc_norm_stderr": 0.01541449448790321 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34224250325945244, + "acc_stderr": 0.012117939998705876, + "acc_norm": 0.34224250325945244, + "acc_norm_stderr": 0.012117939998705876 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833344, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833344 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.015945068581236618, + "mc2": 0.4769559005507783, + "mc2_stderr": 0.015879206203595765 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.34946871310507677, + "acc_stderr": 0.01639279708576985, + "acc_norm": 0.39787485242030696, + "acc_norm_stderr": 0.01682795905473338 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "caisarl76/Mistral-7B-orca-1k-platy-1k", + "model_sha": "528d7bcaa2489daeea58946d17b341b55946f21b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-orca-platy-1k-ko-f-1871/result_2023-10-22 22:35:04.json b/caisarl76/Mistral-7B-orca-platy-1k-ko-f-1871/result_2023-10-22 22:35:04.json new file mode 100644 index 0000000000000000000000000000000000000000..815017928d2ffa29e4ac6c279492e289eb393149 --- /dev/null +++ b/caisarl76/Mistral-7B-orca-platy-1k-ko-f-1871/result_2023-10-22 22:35:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3267918088737201, + "acc_stderr": 0.013706665975587333, + "acc_norm": 0.3779863481228669, + "acc_norm_stderr": 0.014169664520303103 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3707428799044015, + "acc_stderr": 0.004820166002253069, + "acc_norm": 0.4790878311093408, + "acc_norm_stderr": 0.004985415250690911 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041692, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041692 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.38569604086845466, + "acc_stderr": 0.017406476619212904, + "acc_norm": 0.38569604086845466, + "acc_norm_stderr": 0.017406476619212904 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02850485647051418, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02850485647051418 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288087, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288087 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3536977491961415, + "acc_stderr": 0.027155208103200865, + "acc_norm": 0.3536977491961415, + "acc_norm_stderr": 0.027155208103200865 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2556053811659193, + "acc_stderr": 0.029275891003969923, + "acc_norm": 0.2556053811659193, + "acc_norm_stderr": 0.029275891003969923 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.04010358942462202, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.04010358942462202 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.03394853965156403, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.03394853965156403 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.02959732973097809, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.02959732973097809 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.02281581309889661, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.02281581309889661 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3580645161290323, + "acc_stderr": 0.027273890594300642, + "acc_norm": 0.3580645161290323, + "acc_norm_stderr": 0.027273890594300642 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.032485775115784, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.032485775115784 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2830188679245283, + "acc_stderr": 0.027724236492700907, + "acc_norm": 0.2830188679245283, + "acc_norm_stderr": 0.027724236492700907 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.04309118709946458, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.04309118709946458 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4228855721393035, + "acc_stderr": 0.03493231777421281, + "acc_norm": 0.4228855721393035, + "acc_norm_stderr": 0.03493231777421281 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101803, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101803 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566016, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.025722802200895813, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.025722802200895813 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924034, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924034 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38271604938271603, + "acc_stderr": 0.02704453813840262, + "acc_norm": 0.38271604938271603, + "acc_norm_stderr": 0.02704453813840262 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29015544041450775, + "acc_stderr": 0.03275264467791515, + "acc_norm": 0.29015544041450775, + "acc_norm_stderr": 0.03275264467791515 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3357798165137615, + "acc_stderr": 0.02024808139675293, + "acc_norm": 0.3357798165137615, + "acc_norm_stderr": 0.02024808139675293 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.026643278474508758, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.026643278474508758 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.045454545454545484, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.045454545454545484 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2957516339869281, + "acc_stderr": 0.01846315413263281, + "acc_norm": 0.2957516339869281, + "acc_norm_stderr": 0.01846315413263281 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257013, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.01450897945355399, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.01450897945355399 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21323529411764705, + "acc_stderr": 0.024880971512294268, + "acc_norm": 0.21323529411764705, + "acc_norm_stderr": 0.024880971512294268 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.33877551020408164, + "acc_stderr": 0.03029950656215418, + "acc_norm": 0.33877551020408164, + "acc_norm_stderr": 0.03029950656215418 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.41350210970464135, + "acc_stderr": 0.03205649904851859, + "acc_norm": 0.41350210970464135, + "acc_norm_stderr": 0.03205649904851859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.288135593220339, + "acc_stderr": 0.011567140661324565, + "acc_norm": 0.288135593220339, + "acc_norm_stderr": 0.011567140661324565 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03681050869161549, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03681050869161549 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.45737169951487844, + "mc2_stderr": 0.015829256462411827 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3955135773317591, + "acc_stderr": 0.016810815902206046, + "acc_norm": 0.4498229043683589, + "acc_norm_stderr": 0.01710357334382571 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "caisarl76/Mistral-7B-orca-platy-1k-ko-f-1871", + "model_sha": "648931fc59553f86c011a4e312d6fc0ee93d4b37", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-orca-platy-2k-ep4/result_2023-10-22 15:12:08.json b/caisarl76/Mistral-7B-orca-platy-2k-ep4/result_2023-10-22 15:12:08.json new file mode 100644 index 0000000000000000000000000000000000000000..98d3ff59a477dbb70380e62df9510cb466fdb91d --- /dev/null +++ b/caisarl76/Mistral-7B-orca-platy-2k-ep4/result_2023-10-22 15:12:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.295221843003413, + "acc_stderr": 0.013329750293382316, + "acc_norm": 0.3430034129692833, + "acc_norm_stderr": 0.013872423223718167 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37054371639115713, + "acc_stderr": 0.0048196336688325495, + "acc_norm": 0.46345349531965746, + "acc_norm_stderr": 0.00497643438746997 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.037792759455032, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.037792759455032 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39719029374201786, + "acc_stderr": 0.01749790503715938, + "acc_norm": 0.39719029374201786, + "acc_norm_stderr": 0.01749790503715938 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.029241883869628813, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.029241883869628813 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.036643147772880864, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.036643147772880864 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42443729903536975, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.42443729903536975, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.47474747474747475, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.47474747474747475, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.03878352372138623, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.03878352372138623 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.02512465352588513, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.02512465352588513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.027709359675032488, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.027709359675032488 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5299145299145299, + "acc_stderr": 0.03269741106812443, + "acc_norm": 0.5299145299145299, + "acc_norm_stderr": 0.03269741106812443 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670238, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670238 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371218, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371218 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4626865671641791, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.4626865671641791, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.023266512213730578, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.023266512213730578 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.025816756791584215, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.025816756791584215 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.027402042040269952, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.027402042040269952 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144809, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144809 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41100917431192663, + "acc_stderr": 0.021095050687277638, + "acc_norm": 0.41100917431192663, + "acc_norm_stderr": 0.021095050687277638 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4297520661157025, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849726, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849726 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29248366013071897, + "acc_stderr": 0.01840341571010979, + "acc_norm": 0.29248366013071897, + "acc_norm_stderr": 0.01840341571010979 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2905027932960894, + "acc_stderr": 0.015183844307206157, + "acc_norm": 0.2905027932960894, + "acc_norm_stderr": 0.015183844307206157 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.02981263070156974, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.02981263070156974 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.03093285879278984, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.03093285879278984 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4810126582278481, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.4810126582278481, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2966101694915254, + "acc_stderr": 0.011665946586082844, + "acc_norm": 0.2966101694915254, + "acc_norm_stderr": 0.011665946586082844 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.03393388584958403, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.03393388584958403 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502346, + "mc2": 0.449359001521154, + "mc2_stderr": 0.016084396495163696 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32585596221959856, + "acc_stderr": 0.016114023894800336, + "acc_norm": 0.3565525383707202, + "acc_norm_stderr": 0.01646770698152745 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "caisarl76/Mistral-7B-orca-platy-2k-ep4", + "model_sha": "fd2682689d7efd4dd350d71f64a7a8ff09842fd7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/caisarl76/Mistral-7B-v0.1-orca_platy-1k-ep4/result_2023-10-22 15:19:27.json b/caisarl76/Mistral-7B-v0.1-orca_platy-1k-ep4/result_2023-10-22 15:19:27.json new file mode 100644 index 0000000000000000000000000000000000000000..37a5c715b756d1f7ac23f6e90a2aa1674a39c07a --- /dev/null +++ b/caisarl76/Mistral-7B-v0.1-orca_platy-1k-ep4/result_2023-10-22 15:19:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.318259385665529, + "acc_stderr": 0.013611993916971451, + "acc_norm": 0.3583617747440273, + "acc_norm_stderr": 0.01401288333485986 + }, + "harness|ko_hellaswag|10": { + "acc": 0.368352917745469, + "acc_stderr": 0.00481371995282996, + "acc_norm": 0.46265684126667994, + "acc_norm_stderr": 0.0049758453350866195 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.03762738699917055, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.03762738699917055 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44061302681992337, + "acc_stderr": 0.017753396973908486, + "acc_norm": 0.44061302681992337, + "acc_norm_stderr": 0.017753396973908486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610334, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610334 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.02812534098397271, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.02812534098397271 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.03114679648297246, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.03114679648297246 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179327, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179327 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38974358974358975, + "acc_stderr": 0.024726967886647078, + "acc_norm": 0.38974358974358975, + "acc_norm_stderr": 0.024726967886647078 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04750077341199985, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04750077341199985 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.35161290322580646, + "acc_stderr": 0.027162537826948458, + "acc_norm": 0.35161290322580646, + "acc_norm_stderr": 0.027162537826948458 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03088273697413866, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03088273697413866 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.029445175328199596, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.029445175328199596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4626865671641791, + "acc_stderr": 0.03525675167467974, + "acc_norm": 0.4626865671641791, + "acc_norm_stderr": 0.03525675167467974 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.02413015829976262, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.02413015829976262 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.025906632631016127, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.025906632631016127 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3734567901234568, + "acc_stderr": 0.02691500301138015, + "acc_norm": 0.3734567901234568, + "acc_norm_stderr": 0.02691500301138015 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37305699481865284, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.37305699481865284, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45688073394495415, + "acc_stderr": 0.021357458785226206, + "acc_norm": 0.45688073394495415, + "acc_norm_stderr": 0.021357458785226206 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.027245613047215362, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.027245613047215362 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.03878139888797611, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.03878139888797611 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.019117213911495175, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.019117213911495175 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503803, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503803 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767867, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767867 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.03000856284500348, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.03000856284500348 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3510204081632653, + "acc_stderr": 0.030555316755573637, + "acc_norm": 0.3510204081632653, + "acc_norm_stderr": 0.030555316755573637 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45569620253164556, + "acc_stderr": 0.03241920684693334, + "acc_norm": 0.45569620253164556, + "acc_norm_stderr": 0.03241920684693334 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2835723598435463, + "acc_stderr": 0.011511900775968302, + "acc_norm": 0.2835723598435463, + "acc_norm_stderr": 0.011511900775968302 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4121212121212121, + "acc_stderr": 0.03843566993588717, + "acc_norm": 0.4121212121212121, + "acc_norm_stderr": 0.03843566993588717 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.459471439183592, + "mc2_stderr": 0.016149154578981872 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.345926800472255, + "acc_stderr": 0.016353853414347568, + "acc_norm": 0.3624557260920897, + "acc_norm_stderr": 0.016527131240453713 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "caisarl76/Mistral-7B-v0.1-orca_platy-1k-ep4", + "model_sha": "e3e91aad9d307bf43b516f95440a35a1db3e1c68", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cepiloth/ko-en-llama2-13b-finetune-ex/result_2023-11-07 01:30:27.json b/cepiloth/ko-en-llama2-13b-finetune-ex/result_2023-11-07 01:30:27.json new file mode 100644 index 0000000000000000000000000000000000000000..7d73b0b554536b2c20f82b3a58ad8fac06147c22 --- /dev/null +++ b/cepiloth/ko-en-llama2-13b-finetune-ex/result_2023-11-07 01:30:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.014077223108470137, + "acc_norm": 0.4035836177474403, + "acc_norm_stderr": 0.01433715891426844 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3948416650069707, + "acc_stderr": 0.004878176541703574, + "acc_norm": 0.5118502290380402, + "acc_norm_stderr": 0.004988379805261165 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44189016602809705, + "acc_stderr": 0.017758800534214417, + "acc_norm": 0.44189016602809705, + "acc_norm_stderr": 0.017758800534214417 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785139, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785139 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02850485647051419, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02850485647051419 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40836012861736337, + "acc_stderr": 0.02791705074848462, + "acc_norm": 0.40836012861736337, + "acc_norm_stderr": 0.02791705074848462 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.040131241954243856, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.040131241954243856 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34102564102564104, + "acc_stderr": 0.024035489676335065, + "acc_norm": 0.34102564102564104, + "acc_norm_stderr": 0.024035489676335065 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3419354838709677, + "acc_stderr": 0.02698528957655274, + "acc_norm": 0.3419354838709677, + "acc_norm_stderr": 0.02698528957655274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.032745319388423504, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.032745319388423504 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.043091187099464585, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.043091187099464585 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473075, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.035118075718047245, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.035118075718047245 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3681592039800995, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.3681592039800995, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.0336876293225943, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.0336876293225943 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655805, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655805 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686934, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686934 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.0258167567915842, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.0258167567915842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3374233128834356, + "acc_stderr": 0.03714908409935574, + "acc_norm": 0.3374233128834356, + "acc_norm_stderr": 0.03714908409935574 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3734567901234568, + "acc_stderr": 0.026915003011380147, + "acc_norm": 0.3734567901234568, + "acc_norm_stderr": 0.026915003011380147 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39896373056994816, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.39896373056994816, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3926605504587156, + "acc_stderr": 0.020937505161201093, + "acc_norm": 0.3926605504587156, + "acc_norm_stderr": 0.020937505161201093 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.027420477662629245, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.027420477662629245 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.48760330578512395, + "acc_stderr": 0.045629515481807666, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.045629515481807666 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.017401816711427653, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.017401816711427653 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.0316746870682898, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.0316746870682898 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.02736586113151381, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.02736586113151381 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3881856540084388, + "acc_stderr": 0.031722950043323296, + "acc_norm": 0.3881856540084388, + "acc_norm_stderr": 0.031722950043323296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28552803129074317, + "acc_stderr": 0.011535751586665668, + "acc_norm": 0.28552803129074317, + "acc_norm_stderr": 0.011535751586665668 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399811, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399811 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882466, + "mc2": 0.48334405699140953, + "mc2_stderr": 0.015932530840786423 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33884297520661155, + "acc_stderr": 0.01627295299701912, + "acc_norm": 0.3789846517119244, + "acc_norm_stderr": 0.016679260684229282 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cepiloth/ko-en-llama2-13b-finetune-ex", + "model_sha": "ee6a38bb61742af106567d743b3d87458a303f60", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cepiloth/ko-en-llama2-13b-finetune/result_2023-11-03 07:39:57.json b/cepiloth/ko-en-llama2-13b-finetune/result_2023-11-03 07:39:57.json new file mode 100644 index 0000000000000000000000000000000000000000..c62c614f2db27dc6b23abf0be1b742a3ed47277d --- /dev/null +++ b/cepiloth/ko-en-llama2-13b-finetune/result_2023-11-03 07:39:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37627986348122866, + "acc_stderr": 0.014157022555407163, + "acc_norm": 0.4274744027303754, + "acc_norm_stderr": 0.014456862944650649 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40310695080661224, + "acc_stderr": 0.0048951941438926784, + "acc_norm": 0.536247759410476, + "acc_norm_stderr": 0.004976651989757641 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5006385696040868, + "acc_stderr": 0.01787994891443169, + "acc_norm": 0.5006385696040868, + "acc_norm_stderr": 0.01787994891443169 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.02812534098397271, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.02812534098397271 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4797979797979798, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.4797979797979798, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165894, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165894 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969481, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969481 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123936, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123936 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463087, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463087 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.026636539741116076, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.026636539741116076 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899616, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899616 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.02762873715566878, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.02762873715566878 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42385321100917434, + "acc_stderr": 0.021187263209087533, + "acc_norm": 0.42385321100917434, + "acc_norm_stderr": 0.021187263209087533 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.02811092849280907, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.02811092849280907 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32516339869281047, + "acc_stderr": 0.01895088677080631, + "acc_norm": 0.32516339869281047, + "acc_norm_stderr": 0.01895088677080631 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509317, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.03085199299325701, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.03085199299325701 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23161764705882354, + "acc_stderr": 0.025626533803777562, + "acc_norm": 0.23161764705882354, + "acc_norm_stderr": 0.025626533803777562 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.46835443037974683, + "acc_stderr": 0.03248197400511075, + "acc_norm": 0.46835443037974683, + "acc_norm_stderr": 0.03248197400511075 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271824, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271824 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.03434131164719129, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.03434131164719129 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502342, + "mc2": 0.45610675413247587, + "mc2_stderr": 0.01508637089874796 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41440377804014167, + "acc_stderr": 0.016936583383943625, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cepiloth/ko-en-llama2-13b-finetune", + "model_sha": "966347fa24706fb7265c1967e3212504ad0f32da", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cepiloth/ko-llama2-13b-finetune-ex/result_2023-11-02 09:31:28.json b/cepiloth/ko-llama2-13b-finetune-ex/result_2023-11-02 09:31:28.json new file mode 100644 index 0000000000000000000000000000000000000000..12dbc0f8fce70a85c20744eeb64786266662f87f --- /dev/null +++ b/cepiloth/ko-llama2-13b-finetune-ex/result_2023-11-02 09:31:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30802047781569963, + "acc_stderr": 0.01349142951729204, + "acc_norm": 0.36945392491467577, + "acc_norm_stderr": 0.014104578366491904 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35620394343756223, + "acc_stderr": 0.004778978031389642, + "acc_norm": 0.45488946425014937, + "acc_norm_stderr": 0.004969431900874307 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.03771283107626544, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.03771283107626544 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44699872286079184, + "acc_stderr": 0.017779225233394213, + "acc_norm": 0.44699872286079184, + "acc_norm_stderr": 0.017779225233394213 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288088, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288088 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36012861736334406, + "acc_stderr": 0.027264297599804012, + "acc_norm": 0.36012861736334406, + "acc_norm_stderr": 0.027264297599804012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.040131241954243856, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.040131241954243856 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233485, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849738, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849738 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.594017094017094, + "acc_stderr": 0.03217180182641086, + "acc_norm": 0.594017094017094, + "acc_norm_stderr": 0.03217180182641086 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0242785680243077, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0242785680243077 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43641618497109824, + "acc_stderr": 0.026700545424943677, + "acc_norm": 0.43641618497109824, + "acc_norm_stderr": 0.026700545424943677 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.36728395061728397, + "acc_stderr": 0.026822801759507887, + "acc_norm": 0.36728395061728397, + "acc_norm_stderr": 0.026822801759507887 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40414507772020725, + "acc_stderr": 0.035415085788840193, + "acc_norm": 0.40414507772020725, + "acc_norm_stderr": 0.035415085788840193 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41834862385321103, + "acc_stderr": 0.02114954859644388, + "acc_norm": 0.41834862385321103, + "acc_norm_stderr": 0.02114954859644388 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.02807415894760066, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.02807415894760066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.018249024411207668, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.018249024411207668 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467764, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467764 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293647, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293647 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.02902942281568141, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.02902942281568141 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.33755274261603374, + "acc_stderr": 0.03078154910202622, + "acc_norm": 0.33755274261603374, + "acc_norm_stderr": 0.03078154910202622 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28748370273794005, + "acc_stderr": 0.011559337355708505, + "acc_norm": 0.28748370273794005, + "acc_norm_stderr": 0.011559337355708505 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.037818873532059816, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.037818873532059816 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589685, + "mc2": 0.4639200463938291, + "mc2_stderr": 0.015440957243862982 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3612750885478158, + "acc_stderr": 0.016515463022412, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.017014038119297473 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cepiloth/ko-llama2-13b-finetune-ex", + "model_sha": "f1dcbe9a1ff2ea479a2094f5058226f796341bfd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cepiloth/ko-llama2-13b-finetune/result_2023-11-01 09:20:47.json b/cepiloth/ko-llama2-13b-finetune/result_2023-11-01 09:20:47.json new file mode 100644 index 0000000000000000000000000000000000000000..5b16010eff13931b9c3f1a1447fdbf0f52e10749 --- /dev/null +++ b/cepiloth/ko-llama2-13b-finetune/result_2023-11-01 09:20:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3165529010238908, + "acc_stderr": 0.01359243151906808, + "acc_norm": 0.38310580204778155, + "acc_norm_stderr": 0.014206472661672876 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3536148177653854, + "acc_stderr": 0.004771143074426131, + "acc_norm": 0.45359490141406095, + "acc_norm_stderr": 0.004968244611429387 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.017784034534992454, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.017784034534992454 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.039154506304142495, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.039154506304142495 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3858520900321543, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.3858520900321543, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.035594435655639196, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.035594435655639196 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236153, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236153 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33004926108374383, + "acc_stderr": 0.03308530426228258, + "acc_norm": 0.33004926108374383, + "acc_norm_stderr": 0.03308530426228258 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165894, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165894 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5641025641025641, + "acc_stderr": 0.032485775115784, + "acc_norm": 0.5641025641025641, + "acc_norm_stderr": 0.032485775115784 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3849056603773585, + "acc_stderr": 0.029946498567699945, + "acc_norm": 0.3849056603773585, + "acc_norm_stderr": 0.029946498567699945 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.0472457740573157, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.0472457740573157 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3959537572254335, + "acc_stderr": 0.026329813341946253, + "acc_norm": 0.3959537572254335, + "acc_norm_stderr": 0.026329813341946253 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3549382716049383, + "acc_stderr": 0.02662415247884585, + "acc_norm": 0.3549382716049383, + "acc_norm_stderr": 0.02662415247884585 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42201834862385323, + "acc_stderr": 0.021174991407763178, + "acc_norm": 0.42201834862385323, + "acc_norm_stderr": 0.021174991407763178 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.02824513402438729, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.02824513402438729 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.018433427649401896, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.018433427649401896 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902002, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902002 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.0279715413701706, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.0279715413701706 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3877551020408163, + "acc_stderr": 0.03119223072679566, + "acc_norm": 0.3877551020408163, + "acc_norm_stderr": 0.03119223072679566 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.33755274261603374, + "acc_stderr": 0.03078154910202622, + "acc_norm": 0.33755274261603374, + "acc_norm_stderr": 0.03078154910202622 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27249022164276404, + "acc_stderr": 0.011371658294311532, + "acc_norm": 0.27249022164276404, + "acc_norm_stderr": 0.011371658294311532 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03308611113236434, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03308611113236434 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512566, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512566 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133033, + "mc2": 0.46645373213159264, + "mc2_stderr": 0.015378490920195349 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33884297520661155, + "acc_stderr": 0.016272952997019124, + "acc_norm": 0.4014167650531287, + "acc_norm_stderr": 0.01685290785872906 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cepiloth/ko-llama2-13b-finetune", + "model_sha": "15f8932879b2e7880baf3402b1a150f9ff36d370", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cepiloth/ko-llama2-finetune-ex2/result_2023-10-26 09:22:05.json b/cepiloth/ko-llama2-finetune-ex2/result_2023-10-26 09:22:05.json new file mode 100644 index 0000000000000000000000000000000000000000..8cdc9afaa6a3b72af8afc8c188a6a425551c22d3 --- /dev/null +++ b/cepiloth/ko-llama2-finetune-ex2/result_2023-10-26 09:22:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2764505119453925, + "acc_stderr": 0.013069662474252425, + "acc_norm": 0.3216723549488055, + "acc_norm_stderr": 0.013650488084494162 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3296156144194384, + "acc_stderr": 0.004691128722535481, + "acc_norm": 0.4091814379605656, + "acc_norm_stderr": 0.004906779523192671 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39080459770114945, + "acc_stderr": 0.01744836606706253, + "acc_norm": 0.39080459770114945, + "acc_norm_stderr": 0.01744836606706253 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501117, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501117 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.031068985963122145, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.031068985963122145 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944967, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944967 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.37942122186495175, + "acc_stderr": 0.02755994980234782, + "acc_norm": 0.37942122186495175, + "acc_norm_stderr": 0.02755994980234782 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.03258630383836556, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.03258630383836556 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.038552896163789464, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.038552896163789464 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.03104194130405927, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.03104194130405927 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.022282141204204426, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.022282141204204426 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970187, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970187 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.027379871229943245, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.027379871229943245 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4700854700854701, + "acc_stderr": 0.03269741106812443, + "acc_norm": 0.4700854700854701, + "acc_norm_stderr": 0.03269741106812443 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.02881561571343211, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.02881561571343211 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683522, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683522 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43283582089552236, + "acc_stderr": 0.03503490923673281, + "acc_norm": 0.43283582089552236, + "acc_norm_stderr": 0.03503490923673281 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.025624723994030457, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.025624723994030457 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02712511551316686, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02712511551316686 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3316062176165803, + "acc_stderr": 0.03397636541089117, + "acc_norm": 0.3316062176165803, + "acc_norm_stderr": 0.03397636541089117 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30275229357798167, + "acc_stderr": 0.019698711434756357, + "acc_norm": 0.30275229357798167, + "acc_norm_stderr": 0.019698711434756357 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.027184498909941613, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.027184498909941613 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.036906779861372814, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.036906779861372814 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275915, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275915 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984302, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984302 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340455, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.031141447823536027, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.031141447823536027 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261446, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261446 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.02533684856333236, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.02533684856333236 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.031052391937584353, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.031052391937584353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25945241199478486, + "acc_stderr": 0.011195262076350314, + "acc_norm": 0.25945241199478486, + "acc_norm_stderr": 0.011195262076350314 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.03228210387037892, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.03228210387037892 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.4547120708605401, + "mc2_stderr": 0.015426627135169792 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2632821723730815, + "acc_stderr": 0.015141752199573205, + "acc_norm": 0.3530106257378985, + "acc_norm_stderr": 0.016430745982427126 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cepiloth/ko-llama2-finetune-ex2", + "model_sha": "ab3114ee91616a692eee5bfa8e238f6f821e89b8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cepiloth/ko-llama2-finetune-ex3/result_2023-10-31 06:39:25.json b/cepiloth/ko-llama2-finetune-ex3/result_2023-10-31 06:39:25.json new file mode 100644 index 0000000000000000000000000000000000000000..3f686bf802830bf8fd6db64c7835200463e5c110 --- /dev/null +++ b/cepiloth/ko-llama2-finetune-ex3/result_2023-10-31 06:39:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2764505119453925, + "acc_stderr": 0.013069662474252428, + "acc_norm": 0.33532423208191126, + "acc_norm_stderr": 0.013796182947785562 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33031268671579367, + "acc_stderr": 0.004693644357202052, + "acc_norm": 0.41147181836287594, + "acc_norm_stderr": 0.004910946424771612 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36015325670498083, + "acc_stderr": 0.017166362471369306, + "acc_norm": 0.36015325670498083, + "acc_norm_stderr": 0.017166362471369306 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745667, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745667 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553026, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553026 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788513, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788513 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.030898610882477515, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.030898610882477515 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165085, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165085 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732524, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732524 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003337, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003337 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.029719142876342853, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.029719142876342853 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.021444547301560465, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.021444547301560465 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.025560604721022895, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.025560604721022895 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4017094017094017, + "acc_stderr": 0.03211693751051621, + "acc_norm": 0.4017094017094017, + "acc_norm_stderr": 0.03211693751051621 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2830188679245283, + "acc_stderr": 0.027724236492700897, + "acc_norm": 0.2830188679245283, + "acc_norm_stderr": 0.027724236492700897 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.39800995024875624, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.39800995024875624, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.03533133389323657, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.03533133389323657 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.022101128787415426, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.022101128787415426 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554857, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554857 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.02494679222527231, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.02494679222527231 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.02563082497562135, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.02563082497562135 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.03027690994517826, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.03027690994517826 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518752, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518752 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26422018348623855, + "acc_stderr": 0.018904164171510213, + "acc_norm": 0.26422018348623855, + "acc_norm_stderr": 0.018904164171510213 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523811, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523811 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.026415601914389, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.026415601914389 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4380165289256198, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.0387813988879761, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.0387813988879761 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275915, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275915 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.037709700493470166, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.037709700493470166 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1875, + "acc_stderr": 0.023709788253811766, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.023709788253811766 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.03078905113903081, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.03078905113903081 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2790091264667536, + "acc_stderr": 0.011455208832803548, + "acc_norm": 0.2790091264667536, + "acc_norm_stderr": 0.011455208832803548 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145628, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145628 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.03663974994391244, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.03663974994391244 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237035, + "mc2": 0.4373029262876568, + "mc2_stderr": 0.015588306319483176 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2632821723730815, + "acc_stderr": 0.015141752199573201, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.016272952997019124 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cepiloth/ko-llama2-finetune-ex3", + "model_sha": "013b64f9d7f8155d95fedc7a859df06ae0c4fce9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cepiloth/ko-llama2-finetune-ex4/result_2023-10-31 03:26:06.json b/cepiloth/ko-llama2-finetune-ex4/result_2023-10-31 03:26:06.json new file mode 100644 index 0000000000000000000000000000000000000000..0ddcc3285a4d493c456c2978d7c87d3b44031ebd --- /dev/null +++ b/cepiloth/ko-llama2-finetune-ex4/result_2023-10-31 03:26:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27559726962457337, + "acc_stderr": 0.013057169655761836, + "acc_norm": 0.310580204778157, + "acc_norm_stderr": 0.013522292098053057 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3202549292969528, + "acc_stderr": 0.004656208951541443, + "acc_norm": 0.37582154949213303, + "acc_norm_stderr": 0.004833444556338622 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.03743979825926399, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.03743979825926399 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3001277139208174, + "acc_stderr": 0.016389249691317425, + "acc_norm": 0.3001277139208174, + "acc_norm_stderr": 0.016389249691317425 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.028020226271200217, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.028020226271200217 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.0332939411907353, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.0332939411907353 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2765273311897106, + "acc_stderr": 0.025403832978179604, + "acc_norm": 0.2765273311897106, + "acc_norm_stderr": 0.025403832978179604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.24663677130044842, + "acc_stderr": 0.028930413120910874, + "acc_norm": 0.24663677130044842, + "acc_norm_stderr": 0.028930413120910874 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.031156269519646836, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.031156269519646836 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.038552896163789485, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.038552896163789485 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.27310924369747897, + "acc_stderr": 0.02894200404099817, + "acc_norm": 0.27310924369747897, + "acc_norm_stderr": 0.02894200404099817 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23846153846153847, + "acc_stderr": 0.02160629449464773, + "acc_norm": 0.23846153846153847, + "acc_norm_stderr": 0.02160629449464773 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.04373313040914761, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.04373313040914761 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.02967833314144444, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.02967833314144444 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.025736542745594525, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.025736542745594525 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.33760683760683763, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.33760683760683763, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2641509433962264, + "acc_stderr": 0.027134291628741706, + "acc_norm": 0.2641509433962264, + "acc_norm_stderr": 0.027134291628741706 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.041723430387053825, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.041723430387053825 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02606715922227579, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02606715922227579 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594316, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.02167921966369317, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.02167921966369317 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.024257901705323378, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.024257901705323378 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.024383665531035454, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.024383665531035454 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27155963302752295, + "acc_stderr": 0.019069098363191452, + "acc_norm": 0.27155963302752295, + "acc_norm_stderr": 0.019069098363191452 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.02600480036395211, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.02600480036395211 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2231404958677686, + "acc_stderr": 0.03800754475228732, + "acc_norm": 0.2231404958677686, + "acc_norm_stderr": 0.03800754475228732 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.037610708698674805, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.037610708698674805 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25326797385620914, + "acc_stderr": 0.017593486895366835, + "acc_norm": 0.25326797385620914, + "acc_norm_stderr": 0.017593486895366835 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.02769691071309394, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.02769691071309394 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983566, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983566 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.33877551020408164, + "acc_stderr": 0.030299506562154178, + "acc_norm": 0.33877551020408164, + "acc_norm_stderr": 0.030299506562154178 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.19831223628691982, + "acc_stderr": 0.025955020841621112, + "acc_norm": 0.19831223628691982, + "acc_norm_stderr": 0.025955020841621112 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2620599739243807, + "acc_stderr": 0.011231552795890392, + "acc_norm": 0.2620599739243807, + "acc_norm_stderr": 0.011231552795890392 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.03317505930009181, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.03317505930009181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834564, + "mc2": 0.42970330311039423, + "mc2_stderr": 0.01625558814144742 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2514757969303424, + "acc_stderr": 0.014916462437232256, + "acc_norm": 0.29043683589138136, + "acc_norm_stderr": 0.01560760256981463 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cepiloth/ko-llama2-finetune-ex4", + "model_sha": "c368a2162df72c2310144879432d508736a16e90", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cepiloth/ko-llama2-finetune-ex5/result_2023-10-31 11:01:48.json b/cepiloth/ko-llama2-finetune-ex5/result_2023-10-31 11:01:48.json new file mode 100644 index 0000000000000000000000000000000000000000..d0e09dd576e3c40a9ecb90db174f044698f83b9f --- /dev/null +++ b/cepiloth/ko-llama2-finetune-ex5/result_2023-10-31 11:01:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2636518771331058, + "acc_stderr": 0.012875929151297065, + "acc_norm": 0.3122866894197952, + "acc_norm_stderr": 0.013542598541688065 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33100975901214896, + "acc_stderr": 0.004696148339570981, + "acc_norm": 0.4099780920135431, + "acc_norm_stderr": 0.004908241354310212 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690879, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690879 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36398467432950193, + "acc_stderr": 0.017205684809032232, + "acc_norm": 0.36398467432950193, + "acc_norm_stderr": 0.017205684809032232 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071857, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071857 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36012861736334406, + "acc_stderr": 0.02726429759980402, + "acc_norm": 0.36012861736334406, + "acc_norm_stderr": 0.02726429759980402 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.37404580152671757, + "acc_stderr": 0.04243869242230524, + "acc_norm": 0.37404580152671757, + "acc_norm_stderr": 0.04243869242230524 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.032424979581788166, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.032424979581788166 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424387, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424387 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3319327731092437, + "acc_stderr": 0.030588697013783663, + "acc_norm": 0.3319327731092437, + "acc_norm_stderr": 0.030588697013783663 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2794871794871795, + "acc_stderr": 0.022752388839776823, + "acc_norm": 0.2794871794871795, + "acc_norm_stderr": 0.022752388839776823 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3419354838709677, + "acc_stderr": 0.02698528957655273, + "acc_norm": 0.3419354838709677, + "acc_norm_stderr": 0.02698528957655273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.44017094017094016, + "acc_stderr": 0.032520741720630506, + "acc_norm": 0.44017094017094016, + "acc_norm_stderr": 0.032520741720630506 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.028254200344438672, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.028254200344438672 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.02742001935094528, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.02742001935094528 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4626865671641791, + "acc_stderr": 0.03525675167467974, + "acc_norm": 0.4626865671641791, + "acc_norm_stderr": 0.03525675167467974 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918428, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918428 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.025624723994030454, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.025624723994030454 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.02640614597362568, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.02640614597362568 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32124352331606215, + "acc_stderr": 0.03369950868549068, + "acc_norm": 0.32124352331606215, + "acc_norm_stderr": 0.03369950868549068 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3211009174311927, + "acc_stderr": 0.020018149772733744, + "acc_norm": 0.3211009174311927, + "acc_norm_stderr": 0.020018149772733744 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.027184498909941613, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.027184498909941613 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.49586776859504134, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.037385206761196686, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.037385206761196686 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.018373116915903966, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.018373116915903966 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467764, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467764 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.025187786660227248, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.025187786660227248 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31223628691983124, + "acc_stderr": 0.030165137867847, + "acc_norm": 0.31223628691983124, + "acc_norm_stderr": 0.030165137867847 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25097783572359844, + "acc_stderr": 0.011073730299187224, + "acc_norm": 0.25097783572359844, + "acc_norm_stderr": 0.011073730299187224 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268049, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268049 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.0158663464013843, + "mc2": 0.4504635842487325, + "mc2_stderr": 0.01536359300418303 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27036599763872493, + "acc_stderr": 0.015270152942068406, + "acc_norm": 0.35182998819362454, + "acc_norm_stderr": 0.016418206451218057 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cepiloth/ko-llama2-finetune-ex5", + "model_sha": "72d3e9fcbf33373b484f2beb26751ac0bf06af65", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chahyunmook/42dot-test-upload/result_2024-03-25 04:34:41.json b/chahyunmook/42dot-test-upload/result_2024-03-25 04:34:41.json new file mode 100644 index 0000000000000000000000000000000000000000..9c43cddd0e1b123c5f9e05935e802fb3b9fdedae --- /dev/null +++ b/chahyunmook/42dot-test-upload/result_2024-03-25 04:34:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30204778156996587, + "acc_stderr": 0.013417519144716426, + "acc_norm": 0.3395904436860068, + "acc_norm_stderr": 0.013839039762820166 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35301732722565227, + "acc_stderr": 0.00476931330047024, + "acc_norm": 0.44891455885281817, + "acc_norm_stderr": 0.004963669199433393 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3231162196679438, + "acc_stderr": 0.016723726512343048, + "acc_norm": 0.3231162196679438, + "acc_norm_stderr": 0.016723726512343048 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838742, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838742 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2289156626506024, + "acc_stderr": 0.03270745277352477, + "acc_norm": 0.2289156626506024, + "acc_norm_stderr": 0.03270745277352477 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3183279742765273, + "acc_stderr": 0.026457225067811025, + "acc_norm": 0.3183279742765273, + "acc_norm_stderr": 0.026457225067811025 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.242152466367713, + "acc_stderr": 0.028751392398694755, + "acc_norm": 0.242152466367713, + "acc_norm_stderr": 0.028751392398694755 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.04010358942462202, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.04010358942462202 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.031911782267135466, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.031911782267135466 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.19310344827586207, + "acc_stderr": 0.032894455221273995, + "acc_norm": 0.19310344827586207, + "acc_norm_stderr": 0.032894455221273995 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.03618664819936245, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.03618664819936245 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380572, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380572 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.020932445774463196, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.020932445774463196 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768081, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768081 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.03108982600293753, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.03108982600293753 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.02468597928623996, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.02468597928623996 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.25660377358490566, + "acc_stderr": 0.02688064788905199, + "acc_norm": 0.25660377358490566, + "acc_norm_stderr": 0.02688064788905199 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.031871875379197966, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.031871875379197966 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.03214737302029468, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.03214737302029468 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28034682080924855, + "acc_stderr": 0.024182427496577615, + "acc_norm": 0.28034682080924855, + "acc_norm_stderr": 0.024182427496577615 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.03487825168497892, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.03487825168497892 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.025702640260603756, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.025702640260603756 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147602, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147602 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24220183486238533, + "acc_stderr": 0.01836817630659862, + "acc_norm": 0.24220183486238533, + "acc_norm_stderr": 0.01836817630659862 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.0339549002085611, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.0339549002085611 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.025829163272757475, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.025829163272757475 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.32231404958677684, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.037150621549989056, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.037150621549989056 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.018185218954318082, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.018185218954318082 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902002, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902002 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.01446589382985992, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.01446589382985992 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125478, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125478 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.028123429335142787, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.028123429335142787 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3291139240506329, + "acc_stderr": 0.03058732629470236, + "acc_norm": 0.3291139240506329, + "acc_norm_stderr": 0.03058732629470236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25684485006518903, + "acc_stderr": 0.011158455853098864, + "acc_norm": 0.25684485006518903, + "acc_norm_stderr": 0.011158455853098864 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2, + "acc_stderr": 0.031234752377721175, + "acc_norm": 0.2, + "acc_norm_stderr": 0.031234752377721175 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.014896277441041867, + "mc2": 0.39750034717437555, + "mc2_stderr": 0.016010537152585266 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27390791027154665, + "acc_stderr": 0.015332499474791024, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.01606825361581395 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chahyunmook/42dot-test-upload", + "model_sha": "06cfb74d97b7bd0c4e52de4aca010fe85012b018", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chahyunmook/42dot_label/result_2024-03-25 05:12:48.json b/chahyunmook/42dot_label/result_2024-03-25 05:12:48.json new file mode 100644 index 0000000000000000000000000000000000000000..75b4c66044a08dc763a76f8d1c821ffc2a150d70 --- /dev/null +++ b/chahyunmook/42dot_label/result_2024-03-25 05:12:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29180887372013653, + "acc_stderr": 0.013284525292403492, + "acc_norm": 0.32849829351535836, + "acc_norm_stderr": 0.013724978465537357 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3565026887074288, + "acc_stderr": 0.004779872250633706, + "acc_norm": 0.4466241784505079, + "acc_norm_stderr": 0.004961268387512964 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.1871345029239766, + "acc_stderr": 0.029913127232368043, + "acc_norm": 0.1871345029239766, + "acc_norm_stderr": 0.029913127232368043 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.039166677628225836, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.039166677628225836 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24010217113665389, + "acc_stderr": 0.015274685213734195, + "acc_norm": 0.24010217113665389, + "acc_norm_stderr": 0.015274685213734195 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.30638297872340425, + "acc_stderr": 0.03013590647851756, + "acc_norm": 0.30638297872340425, + "acc_norm_stderr": 0.03013590647851756 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553028, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553028 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3086816720257235, + "acc_stderr": 0.026236965881153245, + "acc_norm": 0.3086816720257235, + "acc_norm_stderr": 0.026236965881153245 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.026936111912802273, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.026936111912802273 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728745, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728745 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.32413793103448274, + "acc_stderr": 0.03900432069185553, + "acc_norm": 0.32413793103448274, + "acc_norm_stderr": 0.03900432069185553 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.11764705882352941, + "acc_stderr": 0.032059077331445265, + "acc_norm": 0.11764705882352941, + "acc_norm_stderr": 0.032059077331445265 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.02835962087053395, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.02835962087053395 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.023661296393964273, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.023661296393964273 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.02645087448904276, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.02645087448904276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.25213675213675213, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.25213675213675213, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443867, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443867 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.27860696517412936, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.27860696517412936, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.034355680560478746, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.034355680560478746 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708604, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708604 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.023948512905468348, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.023948512905468348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.02517104191530968, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.02517104191530968 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21243523316062177, + "acc_stderr": 0.02951928261681726, + "acc_norm": 0.21243523316062177, + "acc_norm_stderr": 0.02951928261681726 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3119266055045872, + "acc_stderr": 0.019862967976707238, + "acc_norm": 0.3119266055045872, + "acc_norm_stderr": 0.019862967976707238 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.025261691219729494, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.025261691219729494 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.33884297520661155, + "acc_stderr": 0.043207678075366684, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.043207678075366684 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.017848089574913226, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.017848089574913226 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.025518731049537783, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.025518731049537783 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755805, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755805 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.03338473403207401, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.03338473403207401 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125478, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125478 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.31020408163265306, + "acc_stderr": 0.029613459872484378, + "acc_norm": 0.31020408163265306, + "acc_norm_stderr": 0.029613459872484378 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31223628691983124, + "acc_stderr": 0.030165137867847008, + "acc_norm": 0.31223628691983124, + "acc_norm_stderr": 0.030165137867847008 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.01099615663514269, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.01099615663514269 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.03058759135160424, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.03058759135160424 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.0350143870629678, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.0350143870629678 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485086, + "mc2": 0.4324361736231922, + "mc2_stderr": 0.015412264447807083 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.014846044968252247, + "acc_norm": 0.269185360094451, + "acc_norm_stderr": 0.015249098024144538 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chahyunmook/42dot_label", + "model_sha": "179e025c2baf8a35f262e34893753f6d51aa1ac4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chahyunmook/42dot_law/result_2024-03-25 05:23:51.json b/chahyunmook/42dot_law/result_2024-03-25 05:23:51.json new file mode 100644 index 0000000000000000000000000000000000000000..1dbf2dd0ca9633eed0ab7711701abc7873c76b2e --- /dev/null +++ b/chahyunmook/42dot_law/result_2024-03-25 05:23:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26706484641638223, + "acc_stderr": 0.012928933196496338, + "acc_norm": 0.3199658703071672, + "acc_norm_stderr": 0.013631345807016195 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3494323839872535, + "acc_stderr": 0.004758162967997394, + "acc_norm": 0.4402509460266879, + "acc_norm_stderr": 0.004954026775425764 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21966794380587484, + "acc_stderr": 0.014805384478371155, + "acc_norm": 0.21966794380587484, + "acc_norm_stderr": 0.014805384478371155 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.0291012906983867, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.0291012906983867 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.03384429155233135, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.03384429155233135 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2797427652733119, + "acc_stderr": 0.02549425935069489, + "acc_norm": 0.2797427652733119, + "acc_norm_stderr": 0.02549425935069489 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.037276735755969174, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.037276735755969174 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.02655220782821529, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.02655220782821529 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149351, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149351 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277733, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277733 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.25384615384615383, + "acc_stderr": 0.022066054378726257, + "acc_norm": 0.25384615384615383, + "acc_norm_stderr": 0.022066054378726257 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.026148685930671742, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.026148685930671742 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.029058588303748842, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.029058588303748842 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.026749899771241245, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.026749899771241245 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04265792110940589, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04265792110940589 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.02564410863926762, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.02564410863926762 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2935323383084577, + "acc_stderr": 0.03220024104534205, + "acc_norm": 0.2935323383084577, + "acc_norm_stderr": 0.03220024104534205 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.03396116205845335, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.03396116205845335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.02402774515526501, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.02402774515526501 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.02465968518596729, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.02465968518596729 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22279792746113988, + "acc_stderr": 0.030031147977641545, + "acc_norm": 0.22279792746113988, + "acc_norm_stderr": 0.030031147977641545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30091743119266057, + "acc_stderr": 0.01966475136680211, + "acc_norm": 0.30091743119266057, + "acc_norm_stderr": 0.01966475136680211 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.033954900208561116, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.033954900208561116 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.238562091503268, + "acc_stderr": 0.024404394928087866, + "acc_norm": 0.238562091503268, + "acc_norm_stderr": 0.024404394928087866 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.034597776068105345, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.034597776068105345 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.017160587235046345, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.017160587235046345 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.024847921358063962, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.024847921358063962 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755808, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755808 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608043, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.01428834380392531, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.01428834380392531 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.21224489795918366, + "acc_stderr": 0.026176967197866767, + "acc_norm": 0.21224489795918366, + "acc_norm_stderr": 0.026176967197866767 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29957805907172996, + "acc_stderr": 0.029818024749753095, + "acc_norm": 0.29957805907172996, + "acc_norm_stderr": 0.029818024749753095 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25488917861799215, + "acc_stderr": 0.01113050981266297, + "acc_norm": 0.25488917861799215, + "acc_norm_stderr": 0.01113050981266297 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.035014387062967806, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.035014387062967806 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766368, + "mc2": 0.4242224964906894, + "mc2_stderr": 0.01527862179271015 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2809917355371901, + "acc_stderr": 0.015453559655458275, + "acc_norm": 0.3447461629279811, + "acc_norm_stderr": 0.016340649905418687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chahyunmook/42dot_law", + "model_sha": "892a6f1cf8bd42aa122d41cbc800b58ac5bba83a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chahyunmook/42dot_number/result_2024-04-04 05:13:43.json b/chahyunmook/42dot_number/result_2024-04-04 05:13:43.json new file mode 100644 index 0000000000000000000000000000000000000000..129cc879eae90d5a863de34422062906af5d2a13 --- /dev/null +++ b/chahyunmook/42dot_number/result_2024-04-04 05:13:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26706484641638223, + "acc_stderr": 0.01292893319649635, + "acc_norm": 0.3122866894197952, + "acc_norm_stderr": 0.013542598541688065 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3301135232025493, + "acc_stderr": 0.004692926794268459, + "acc_norm": 0.4056960764787891, + "acc_norm_stderr": 0.004900227226433395 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.03989139859531772, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.03989139859531772 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2822477650063857, + "acc_stderr": 0.016095302969878544, + "acc_norm": 0.2822477650063857, + "acc_norm_stderr": 0.016095302969878544 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785137, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785137 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.02802022627120022, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.02802022627120022 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.03484331592680588, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.03484331592680588 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.026082700695399662, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.026082700695399662 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.030769352008229136, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.030769352008229136 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22137404580152673, + "acc_stderr": 0.0364129708131373, + "acc_norm": 0.22137404580152673, + "acc_norm_stderr": 0.0364129708131373 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.031156269519646843, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.031156269519646843 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868966, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868966 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23846153846153847, + "acc_stderr": 0.021606294494647727, + "acc_norm": 0.23846153846153847, + "acc_norm_stderr": 0.021606294494647727 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.031089826002937523, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.031089826002937523 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.025284416114900152, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.025284416114900152 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3162393162393162, + "acc_stderr": 0.030463656747340265, + "acc_norm": 0.3162393162393162, + "acc_norm_stderr": 0.030463656747340265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22641509433962265, + "acc_stderr": 0.025757559893106723, + "acc_norm": 0.22641509433962265, + "acc_norm_stderr": 0.025757559893106723 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2, + "acc_stderr": 0.03831305140884601, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03831305140884601 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02606715922227578, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02606715922227578 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473836, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473836 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.031157150869355558, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.031157150869355558 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.03063114553919882, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.03063114553919882 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686936, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686936 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24691358024691357, + "acc_stderr": 0.02399350170904211, + "acc_norm": 0.24691358024691357, + "acc_norm_stderr": 0.02399350170904211 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22279792746113988, + "acc_stderr": 0.030031147977641545, + "acc_norm": 0.22279792746113988, + "acc_norm_stderr": 0.030031147977641545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22935779816513763, + "acc_stderr": 0.018025349724618684, + "acc_norm": 0.22935779816513763, + "acc_norm_stderr": 0.018025349724618684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011744, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.023929155517351277, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.023929155517351277 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.031546980450822305, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.031546980450822305 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468634, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468634 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681407, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681407 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.02412746346265015, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.02412746346265015 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.010986307870045522, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.010986307870045522 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.03256685484460389, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.03256685484460389 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237033, + "mc2": 0.4193009019396091, + "mc2_stderr": 0.01574347752598874 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26210153482880755, + "acc_stderr": 0.015119864670254151, + "acc_norm": 0.3435655253837072, + "acc_norm_stderr": 0.016327334806429145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chahyunmook/42dot_number", + "model_sha": "2dcd2387bde48bc913fb4e18db3aa16c9489cbc0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chahyunmook/42dot_ppl_20/result_2024-08-05 14:31:24.json b/chahyunmook/42dot_ppl_20/result_2024-08-05 14:31:24.json new file mode 100644 index 0000000000000000000000000000000000000000..5eee1fce82e8647e4f7b52c54b7bcfdaa7e7c528 --- /dev/null +++ b/chahyunmook/42dot_ppl_20/result_2024-08-05 14:31:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2883959044368601, + "acc_stderr": 0.013238394422428166, + "acc_norm": 0.32849829351535836, + "acc_norm_stderr": 0.013724978465537368 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36227843059151565, + "acc_stderr": 0.004796763521045227, + "acc_norm": 0.458972316271659, + "acc_norm_stderr": 0.004972954732733356 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822582, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822582 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.22860791826309068, + "acc_stderr": 0.015016884698539883, + "acc_norm": 0.22860791826309068, + "acc_norm_stderr": 0.015016884698539883 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102956, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102956 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.03550920185689631, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.03550920185689631 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.02429659403476343, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.02429659403476343 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21076233183856502, + "acc_stderr": 0.027373095500540193, + "acc_norm": 0.21076233183856502, + "acc_norm_stderr": 0.027373095500540193 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.0359546161177469, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.0359546161177469 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30808080808080807, + "acc_stderr": 0.03289477330098616, + "acc_norm": 0.30808080808080807, + "acc_norm_stderr": 0.03289477330098616 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.18620689655172415, + "acc_stderr": 0.032439461590046174, + "acc_norm": 0.18620689655172415, + "acc_norm_stderr": 0.032439461590046174 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201943, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201943 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.03086868260412162, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.03086868260412162 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.024283140529467295, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.024283140529467295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.04414343666854933, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.04414343666854933 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.031089826002937533, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.031089826002937533 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.025819233256483727, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.025819233256483727 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.02761116340239972, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.02761116340239972 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.040693063197213754, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.040693063197213754 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844072, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.036690724774169056, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.036690724774169056 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2328042328042328, + "acc_stderr": 0.021765961672154544, + "acc_norm": 0.2328042328042328, + "acc_norm_stderr": 0.021765961672154544 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.22254335260115607, + "acc_stderr": 0.02239421566194282, + "acc_norm": 0.22254335260115607, + "acc_norm_stderr": 0.02239421566194282 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.03291099578615769, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.03291099578615769 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24691358024691357, + "acc_stderr": 0.02399350170904211, + "acc_norm": 0.24691358024691357, + "acc_norm_stderr": 0.02399350170904211 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3471502590673575, + "acc_stderr": 0.03435696168361355, + "acc_norm": 0.3471502590673575, + "acc_norm_stderr": 0.03435696168361355 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29174311926605506, + "acc_stderr": 0.019489300968876532, + "acc_norm": 0.29174311926605506, + "acc_norm_stderr": 0.019489300968876532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.025160998214292456, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.025160998214292456 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2231404958677686, + "acc_stderr": 0.03800754475228732, + "acc_norm": 0.2231404958677686, + "acc_norm_stderr": 0.03800754475228732 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2236842105263158, + "acc_stderr": 0.033911609343436025, + "acc_norm": 0.2236842105263158, + "acc_norm_stderr": 0.033911609343436025 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22712418300653595, + "acc_stderr": 0.016949853279212373, + "acc_norm": 0.22712418300653595, + "acc_norm_stderr": 0.016949853279212373 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3551020408163265, + "acc_stderr": 0.030635655150387638, + "acc_norm": 0.3551020408163265, + "acc_norm_stderr": 0.030635655150387638 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.011005971399927239, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.011005971399927239 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.03256866661681102, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.03256866661681102 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.01529807750948508, + "mc2": 0.4186727693376496, + "mc2_stderr": 0.014980581436916602 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.269185360094451, + "acc_stderr": 0.015249098024144543, + "acc_norm": 0.358913813459268, + "acc_norm_stderr": 0.016491802102999036 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chahyunmook/42dot_ppl_20", + "model_sha": "192816b986ac2e78b7ba4788527c4f057fa07265", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chahyunmook/42dot_ppl_40/result_2024-08-05 14:31:49.json b/chahyunmook/42dot_ppl_40/result_2024-08-05 14:31:49.json new file mode 100644 index 0000000000000000000000000000000000000000..77ea69e45bbfae921e814f2344a9ce6e78c6307c --- /dev/null +++ b/chahyunmook/42dot_ppl_40/result_2024-08-05 14:31:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2781569965870307, + "acc_stderr": 0.0130944699195388, + "acc_norm": 0.3293515358361775, + "acc_norm_stderr": 0.013734057652635474 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35441147181836286, + "acc_stderr": 0.0047735700961850525, + "acc_norm": 0.445628360884286, + "acc_norm_stderr": 0.0049601913414302435 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.031267817146631786, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.031267817146631786 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1553398058252427, + "acc_stderr": 0.03586594738573975, + "acc_norm": 0.1553398058252427, + "acc_norm_stderr": 0.03586594738573975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.014485656041669175, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.014485656041669175 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03591444084196969, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03591444084196969 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23829787234042554, + "acc_stderr": 0.02785125297388978, + "acc_norm": 0.23829787234042554, + "acc_norm_stderr": 0.02785125297388978 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.03484331592680589, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.03484331592680589 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24758842443729903, + "acc_stderr": 0.024513879973621967, + "acc_norm": 0.24758842443729903, + "acc_norm_stderr": 0.024513879973621967 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.18834080717488788, + "acc_stderr": 0.026241132996407273, + "acc_norm": 0.18834080717488788, + "acc_norm_stderr": 0.026241132996407273 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.03383201223244441, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.03383201223244441 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2, + "acc_stderr": 0.033333333333333305, + "acc_norm": 0.2, + "acc_norm_stderr": 0.033333333333333305 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207765, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207765 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3487179487179487, + "acc_stderr": 0.02416278028401772, + "acc_norm": 0.3487179487179487, + "acc_norm_stderr": 0.02416278028401772 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.033442837442804574, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.033442837442804574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335134, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335134 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.02661648298050171, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.02661648298050171 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22388059701492538, + "acc_stderr": 0.02947525023601719, + "acc_norm": 0.22388059701492538, + "acc_norm_stderr": 0.02947525023601719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.0355068398916558, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.0355068398916558 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.22486772486772486, + "acc_stderr": 0.02150209607822914, + "acc_norm": 0.22486772486772486, + "acc_norm_stderr": 0.02150209607822914 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.19653179190751446, + "acc_stderr": 0.021393961404363847, + "acc_norm": 0.19653179190751446, + "acc_norm_stderr": 0.021393961404363847 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2147239263803681, + "acc_stderr": 0.032262193772867744, + "acc_norm": 0.2147239263803681, + "acc_norm_stderr": 0.032262193772867744 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02438366553103545, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02438366553103545 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.03499807276193338, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.03499807276193338 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3504587155963303, + "acc_stderr": 0.02045607759982446, + "acc_norm": 0.3504587155963303, + "acc_norm_stderr": 0.02045607759982446 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.04343525428949097, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.04343525428949097 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.026336613469046626, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.026336613469046626 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351585, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351585 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266722, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266722 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2320675105485232, + "acc_stderr": 0.027479744550808514, + "acc_norm": 0.2320675105485232, + "acc_norm_stderr": 0.027479744550808514 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.01100597139992725, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.01100597139992725 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693268, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693268 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283335, + "mc2": 0.4180635991948751, + "mc2_stderr": 0.015264729809222404 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28689492325855964, + "acc_stderr": 0.01555080996678178, + "acc_norm": 0.3872491145218418, + "acc_norm_stderr": 0.016747577991642792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chahyunmook/42dot_ppl_40", + "model_sha": "b85371c39feeacb0372d8ed229b9bf4a60e79cba", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chahyunmook/42dot_ppl_60/result_2024-08-05 15:24:06.json b/chahyunmook/42dot_ppl_60/result_2024-08-05 15:24:06.json new file mode 100644 index 0000000000000000000000000000000000000000..25bb4cd96a69624c6f0b02769c36a8795ad87b30 --- /dev/null +++ b/chahyunmook/42dot_ppl_60/result_2024-08-05 15:24:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2738907849829352, + "acc_stderr": 0.013032004972989501, + "acc_norm": 0.32337883959044367, + "acc_norm_stderr": 0.01366942163001212 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3474407488548098, + "acc_stderr": 0.00475184064673085, + "acc_norm": 0.4335789683330014, + "acc_norm_stderr": 0.00494555806985253 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2046783625730994, + "acc_stderr": 0.03094445977853321, + "acc_norm": 0.2046783625730994, + "acc_norm_stderr": 0.03094445977853321 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.29246487867177523, + "acc_stderr": 0.016267000684598652, + "acc_norm": 0.29246487867177523, + "acc_norm_stderr": 0.016267000684598652 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785137, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785137 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23829787234042554, + "acc_stderr": 0.02785125297388978, + "acc_norm": 0.23829787234042554, + "acc_norm_stderr": 0.02785125297388978 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2861736334405145, + "acc_stderr": 0.025670259242188936, + "acc_norm": 0.2861736334405145, + "acc_norm_stderr": 0.025670259242188936 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.03154449888270285, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.03154449888270285 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135303, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135303 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380558, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380558 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2846153846153846, + "acc_stderr": 0.022878322799706283, + "acc_norm": 0.2846153846153846, + "acc_norm_stderr": 0.022878322799706283 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.024892469172462843, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.024892469172462843 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.17094017094017094, + "acc_stderr": 0.024662496845209804, + "acc_norm": 0.17094017094017094, + "acc_norm_stderr": 0.024662496845209804 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.027495663683724057, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.027495663683724057 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724138, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724138 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749895, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749895 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.034370793441061344, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.034370793441061344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.02335736578587403, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.02335736578587403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02438366553103545, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02438366553103545 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565318, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565318 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24036697247706423, + "acc_stderr": 0.01832060732096407, + "acc_norm": 0.24036697247706423, + "acc_norm_stderr": 0.01832060732096407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523811, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523811 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.33884297520661155, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.01740181671142765, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.01740181671142765 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.02564555362226673, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.02564555362226673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010083, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010083 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.027682979522960238, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.027682979522960238 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24771838331160365, + "acc_stderr": 0.011025499291443738, + "acc_norm": 0.24771838331160365, + "acc_norm_stderr": 0.011025499291443738 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.03077855467869328, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.03077855467869328 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816525, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816525 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.01489627744104186, + "mc2": 0.39007135790631153, + "mc2_stderr": 0.015105286320589073 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2632821723730815, + "acc_stderr": 0.015141752199573205, + "acc_norm": 0.33766233766233766, + "acc_norm_stderr": 0.016259075784754964 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chahyunmook/42dot_ppl_60", + "model_sha": "da784d85ebabe5724e33bb31cd355a79c41f8b24", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chahyunmook/42dot_ppl_all/result_2024-08-05 09:11:49.json b/chahyunmook/42dot_ppl_all/result_2024-08-05 09:11:49.json new file mode 100644 index 0000000000000000000000000000000000000000..6120d5ce511649674e5caad43ae41d27e29eb439 --- /dev/null +++ b/chahyunmook/42dot_ppl_all/result_2024-08-05 09:11:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19539249146757678, + "acc_stderr": 0.011586907189952911, + "acc_norm": 0.2354948805460751, + "acc_norm_stderr": 0.012399451855004752 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2813184624576778, + "acc_stderr": 0.004487235657955677, + "acc_norm": 0.3084047002589126, + "acc_norm_stderr": 0.004608907872957705 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.040580420156460344, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.040580420156460344 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2848020434227331, + "acc_stderr": 0.016139174096522577, + "acc_norm": 0.2848020434227331, + "acc_norm_stderr": 0.016139174096522577 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.02655698211783875, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.02655698211783875 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.034605799075530255, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.034605799075530255 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.032361983509282745, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.032361983509282745 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.31313131313131315, + "acc_stderr": 0.033042050878136525, + "acc_norm": 0.31313131313131315, + "acc_norm_stderr": 0.033042050878136525 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03724563619774631, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03724563619774631 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808779, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808779 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.226890756302521, + "acc_stderr": 0.02720537153827948, + "acc_norm": 0.226890756302521, + "acc_norm_stderr": 0.02720537153827948 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.02443301646605245, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.02443301646605245 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.02967833314144444, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.02967833314144444 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.024892469172462843, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.024892469172462843 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.027611163402399715, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.027611163402399715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.039559328617958335, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.039559328617958335 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208955, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208955 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816503, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816503 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.0218552552634218, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.0218552552634218 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.024477222856135114, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.024477222856135114 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.03410780251836184, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.03410780251836184 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322004, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322004 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24036697247706423, + "acc_stderr": 0.01832060732096407, + "acc_norm": 0.24036697247706423, + "acc_norm_stderr": 0.01832060732096407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.024848018263875195, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.024848018263875195 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3140495867768595, + "acc_stderr": 0.04236964753041019, + "acc_norm": 0.3140495867768595, + "acc_norm_stderr": 0.04236964753041019 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.016774672365468517, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.016774672365468517 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.20567375886524822, + "acc_stderr": 0.02411213895047188, + "acc_norm": 0.20567375886524822, + "acc_norm_stderr": 0.02411213895047188 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3346938775510204, + "acc_stderr": 0.030209235226242314, + "acc_norm": 0.3346938775510204, + "acc_norm_stderr": 0.030209235226242314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676655, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676655 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604243, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604243 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.03287666758603488, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.03287666758603488 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.01489627744104186, + "mc2": 0.4754396473258298, + "mc2_stderr": 0.01671070620074593 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2644628099173554, + "acc_stderr": 0.015163499477892412, + "acc_norm": 0.45690672963400236, + "acc_norm_stderr": 0.017126389093086777 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chahyunmook/42dot_ppl_all", + "model_sha": "015c39f0a9ad8e5d5f88697be3812c12ffd263f8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chahyunmook/42dot_v0/result_2024-06-10 07:08:37.json b/chahyunmook/42dot_v0/result_2024-06-10 07:08:37.json new file mode 100644 index 0000000000000000000000000000000000000000..f906c05fbdadd7185b4c5334bca15ea212e75df1 --- /dev/null +++ b/chahyunmook/42dot_v0/result_2024-06-10 07:08:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30204778156996587, + "acc_stderr": 0.01341751914471643, + "acc_norm": 0.3430034129692833, + "acc_norm_stderr": 0.013872423223718174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3623780123481378, + "acc_stderr": 0.004797048154893968, + "acc_norm": 0.4510057757418841, + "acc_norm_stderr": 0.0049657683486280585 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21637426900584794, + "acc_stderr": 0.03158149539338733, + "acc_norm": 0.21637426900584794, + "acc_norm_stderr": 0.03158149539338733 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2886334610472541, + "acc_stderr": 0.016203792703197793, + "acc_norm": 0.2886334610472541, + "acc_norm_stderr": 0.016203792703197793 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.02910129069838671, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.02910129069838671 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24758842443729903, + "acc_stderr": 0.024513879973621967, + "acc_norm": 0.24758842443729903, + "acc_norm_stderr": 0.024513879973621967 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.0324430528300873, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.0324430528300873 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.20610687022900764, + "acc_stderr": 0.03547771004159463, + "acc_norm": 0.20610687022900764, + "acc_norm_stderr": 0.03547771004159463 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3282828282828283, + "acc_stderr": 0.03345678422756776, + "acc_norm": 0.3282828282828283, + "acc_norm_stderr": 0.03345678422756776 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.1793103448275862, + "acc_stderr": 0.03196766433373187, + "acc_norm": 0.1793103448275862, + "acc_norm_stderr": 0.03196766433373187 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.030176808288974333, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.030176808288974333 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3230769230769231, + "acc_stderr": 0.02371088850197057, + "acc_norm": 0.3230769230769231, + "acc_norm_stderr": 0.02371088850197057 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.19704433497536947, + "acc_stderr": 0.02798672466673622, + "acc_norm": 0.19704433497536947, + "acc_norm_stderr": 0.02798672466673622 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.025649381063029265, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.025649381063029265 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21367521367521367, + "acc_stderr": 0.02685345037700916, + "acc_norm": 0.21367521367521367, + "acc_norm_stderr": 0.02685345037700916 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072774, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072774 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766093, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766093 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.03479185572599661, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.03479185572599661 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.035676037996391706, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.035676037996391706 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.22832369942196531, + "acc_stderr": 0.022598703804321624, + "acc_norm": 0.22832369942196531, + "acc_norm_stderr": 0.022598703804321624 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2716049382716049, + "acc_stderr": 0.02474862449053737, + "acc_norm": 0.2716049382716049, + "acc_norm_stderr": 0.02474862449053737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3626943005181347, + "acc_stderr": 0.034697137917043715, + "acc_norm": 0.3626943005181347, + "acc_norm_stderr": 0.034697137917043715 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3211009174311927, + "acc_stderr": 0.02001814977273375, + "acc_norm": 0.3211009174311927, + "acc_norm_stderr": 0.02001814977273375 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020514, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020514 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.024051029739912258, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.024051029739912258 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036843, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036843 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2892561983471074, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.20394736842105263, + "acc_stderr": 0.03279000406310049, + "acc_norm": 0.20394736842105263, + "acc_norm_stderr": 0.03279000406310049 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767105, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2911392405063291, + "acc_stderr": 0.029571601065753374, + "acc_norm": 0.2911392405063291, + "acc_norm_stderr": 0.029571601065753374 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2470664928292047, + "acc_stderr": 0.011015752255279336, + "acc_norm": 0.2470664928292047, + "acc_norm_stderr": 0.011015752255279336 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350194, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350194 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087293, + "mc2": 0.417863298837643, + "mc2_stderr": 0.015437477415793885 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.23258559622195984, + "acc_stderr": 0.014525169182416493, + "acc_norm": 0.29279811097992914, + "acc_norm_stderr": 0.015644823205401337 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chahyunmook/42dot_v0", + "model_sha": "c28e8c398ff8dd46787d67f9d872a1512d4b9b19", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chargoddard/Yi-6B-Llama/result_2023-11-21 00:54:55.json b/chargoddard/Yi-6B-Llama/result_2023-11-21 00:54:55.json new file mode 100644 index 0000000000000000000000000000000000000000..7c6735232d17805f6385a5d96ff8364b5575254d --- /dev/null +++ b/chargoddard/Yi-6B-Llama/result_2023-11-21 00:54:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2175767918088737, + "acc_stderr": 0.012057262020972502, + "acc_norm": 0.2627986348122867, + "acc_norm_stderr": 0.012862523175351331 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3028281218880701, + "acc_stderr": 0.004585424513012102, + "acc_norm": 0.35082652857996416, + "acc_norm_stderr": 0.004762534245488401 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39208173690932313, + "acc_stderr": 0.017458524050147636, + "acc_norm": 0.39208173690932313, + "acc_norm_stderr": 0.017458524050147636 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.031565646822367836, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.031565646822367836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.03526552724601199, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.03526552724601199 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34102564102564104, + "acc_stderr": 0.02403548967633507, + "acc_norm": 0.34102564102564104, + "acc_norm_stderr": 0.02403548967633507 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36129032258064514, + "acc_stderr": 0.02732754844795754, + "acc_norm": 0.36129032258064514, + "acc_norm_stderr": 0.02732754844795754 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791923, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3660377358490566, + "acc_stderr": 0.029647813539365263, + "acc_norm": 0.3660377358490566, + "acc_norm_stderr": 0.029647813539365263 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.04653429807913508, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.04653429807913508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871937, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871937 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.02475747390275205, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.02475747390275205 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.03814269893261837, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.03814269893261837 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.027237415094592474, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.027237415094592474 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38341968911917096, + "acc_stderr": 0.03508984236295342, + "acc_norm": 0.38341968911917096, + "acc_norm_stderr": 0.03508984236295342 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3724770642201835, + "acc_stderr": 0.020728368457638494, + "acc_norm": 0.3724770642201835, + "acc_norm_stderr": 0.020728368457638494 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.02847293847803353, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.02847293847803353 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.018850084696468712, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.018850084696468712 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.029049190342543465, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.029049190342543465 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098426, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098426 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841196, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.030964810588786713, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.030964810588786713 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897634, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897634 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.01607750926613303, + "mc2": 0.4750668989915785, + "mc2_stderr": 0.015774112289507786 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3270365997638725, + "acc_stderr": 0.016129047485457022, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.016819438642971408 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chargoddard/Yi-6B-Llama", + "model_sha": "282ad3e8502e1830c466dd75601af816a43b8bcf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chihoonlee10/T3Q-KO-SOLAR-MG-v1.0/result_2024-03-17 13:25:44.json b/chihoonlee10/T3Q-KO-SOLAR-MG-v1.0/result_2024-03-17 13:25:44.json new file mode 100644 index 0000000000000000000000000000000000000000..1d73734283ad8034ca9a05e04c413e24713cdbb7 --- /dev/null +++ b/chihoonlee10/T3Q-KO-SOLAR-MG-v1.0/result_2024-03-17 13:25:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6843003412969283, + "acc_stderr": 0.013582571095815293, + "acc_norm": 0.7303754266211604, + "acc_norm_stderr": 0.012968040686869154 + }, + "harness|ko_hellaswag|10": { + "acc": 0.47211710814578767, + "acc_stderr": 0.004982016702445962, + "acc_norm": 0.6179047998406691, + "acc_norm_stderr": 0.004849065962692128 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7309941520467836, + "acc_stderr": 0.0340105262010409, + "acc_norm": 0.7309941520467836, + "acc_norm_stderr": 0.0340105262010409 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7184466019417476, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.7184466019417476, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7318007662835249, + "acc_stderr": 0.01584243083526948, + "acc_norm": 0.7318007662835249, + "acc_norm_stderr": 0.01584243083526948 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5574468085106383, + "acc_stderr": 0.032469569197899575, + "acc_norm": 0.5574468085106383, + "acc_norm_stderr": 0.032469569197899575 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.536144578313253, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.536144578313253, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6527331189710611, + "acc_stderr": 0.027040745502307336, + "acc_norm": 0.6527331189710611, + "acc_norm_stderr": 0.027040745502307336 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6636771300448431, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.6636771300448431, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6641221374045801, + "acc_stderr": 0.04142313771996664, + "acc_norm": 0.6641221374045801, + "acc_norm_stderr": 0.04142313771996664 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7373737373737373, + "acc_stderr": 0.03135305009533087, + "acc_norm": 0.7373737373737373, + "acc_norm_stderr": 0.03135305009533087 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6596638655462185, + "acc_stderr": 0.03077805742293167, + "acc_norm": 0.6596638655462185, + "acc_norm_stderr": 0.03077805742293167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6564102564102564, + "acc_stderr": 0.024078696580635495, + "acc_norm": 0.6564102564102564, + "acc_norm_stderr": 0.024078696580635495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301811, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301811 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.035107665979592154, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.035107665979592154 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6774193548387096, + "acc_stderr": 0.026593084516572288, + "acc_norm": 0.6774193548387096, + "acc_norm_stderr": 0.026593084516572288 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8461538461538461, + "acc_stderr": 0.023636873317489284, + "acc_norm": 0.8461538461538461, + "acc_norm_stderr": 0.023636873317489284 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6188679245283019, + "acc_stderr": 0.029890609686286627, + "acc_norm": 0.6188679245283019, + "acc_norm_stderr": 0.029890609686286627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.04554619617541053, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.04554619617541053 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3962962962962963, + "acc_stderr": 0.029822619458533997, + "acc_norm": 0.3962962962962963, + "acc_norm_stderr": 0.029822619458533997 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.746268656716418, + "acc_stderr": 0.03076944496729601, + "acc_norm": 0.746268656716418, + "acc_norm_stderr": 0.03076944496729601 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5838150289017341, + "acc_stderr": 0.03758517775404948, + "acc_norm": 0.5838150289017341, + "acc_norm_stderr": 0.03758517775404948 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.025467149045469546, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.025467149045469546 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5694444444444444, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.5694444444444444, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.84, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.84, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.653179190751445, + "acc_stderr": 0.025624723994030457, + "acc_norm": 0.653179190751445, + "acc_norm_stderr": 0.025624723994030457 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6257668711656442, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.6257668711656442, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6604938271604939, + "acc_stderr": 0.026348564412011624, + "acc_norm": 0.6604938271604939, + "acc_norm_stderr": 0.026348564412011624 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7706422018348624, + "acc_stderr": 0.01802534972461868, + "acc_norm": 0.7706422018348624, + "acc_norm_stderr": 0.01802534972461868 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.630718954248366, + "acc_stderr": 0.027634176689602656, + "acc_norm": 0.630718954248366, + "acc_norm_stderr": 0.027634176689602656 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.03941897526516303, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.03941897526516303 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5849673202614379, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.5849673202614379, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.02946218923337059, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.02946218923337059 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5324074074074074, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.5324074074074074, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.33519553072625696, + "acc_stderr": 0.015788007190185888, + "acc_norm": 0.33519553072625696, + "acc_norm_stderr": 0.015788007190185888 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5772058823529411, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.5772058823529411, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6489795918367347, + "acc_stderr": 0.030555316755573644, + "acc_norm": 0.6489795918367347, + "acc_norm_stderr": 0.030555316755573644 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7426160337552743, + "acc_stderr": 0.0284588209914603, + "acc_norm": 0.7426160337552743, + "acc_norm_stderr": 0.0284588209914603 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44328552803129073, + "acc_stderr": 0.012687818419599916, + "acc_norm": 0.44328552803129073, + "acc_norm_stderr": 0.012687818419599916 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6156670746634026, + "mc1_stderr": 0.01702870730124521, + "mc2": 0.715362766622031, + "mc2_stderr": 0.014287009722062975 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5242030696576151, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5608028335301063, + "acc_norm_stderr": 0.017062775744780705 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chihoonlee10/T3Q-KO-SOLAR-MG-v1.0", + "model_sha": "ef58b1f80ebee8907b3eb8381b06f17497e52c66", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chihoonlee10/T3Q-LLM-MG-DPO-v1.0/result_2024-04-30 23:42:09.json b/chihoonlee10/T3Q-LLM-MG-DPO-v1.0/result_2024-04-30 23:42:09.json new file mode 100644 index 0000000000000000000000000000000000000000..fa03303b9c95ed2e28e3a697bdb1c8ab22829087 --- /dev/null +++ b/chihoonlee10/T3Q-LLM-MG-DPO-v1.0/result_2024-04-30 23:42:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6092150170648464, + "acc_stderr": 0.014258563880513777, + "acc_norm": 0.658703071672355, + "acc_norm_stderr": 0.013855831287497723 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5091615216092412, + "acc_stderr": 0.004988943721711237, + "acc_norm": 0.6653057159928301, + "acc_norm_stderr": 0.004709190850274409 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.672514619883041, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.672514619883041, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.0458212416016155, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.0458212416016155 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7062579821200511, + "acc_stderr": 0.01628775938849169, + "acc_norm": 0.7062579821200511, + "acc_norm_stderr": 0.01628775938849169 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5617021276595745, + "acc_stderr": 0.032436186361081004, + "acc_norm": 0.5617021276595745, + "acc_norm_stderr": 0.032436186361081004 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6655948553054662, + "acc_stderr": 0.026795422327893947, + "acc_norm": 0.6655948553054662, + "acc_norm_stderr": 0.026795422327893947 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6547085201793722, + "acc_stderr": 0.03191100192835794, + "acc_norm": 0.6547085201793722, + "acc_norm_stderr": 0.03191100192835794 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.029620227874790458, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.029620227874790458 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006718, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006718 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6596638655462185, + "acc_stderr": 0.03077805742293167, + "acc_norm": 0.6596638655462185, + "acc_norm_stderr": 0.03077805742293167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.024321738484602354, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.024321738484602354 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.046166311118017146, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.046166311118017146 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6387096774193548, + "acc_stderr": 0.02732754844795755, + "acc_norm": 0.6387096774193548, + "acc_norm_stderr": 0.02732754844795755 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.02514093595033544, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.02514093595033544 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6037735849056604, + "acc_stderr": 0.030102793781791194, + "acc_norm": 0.6037735849056604, + "acc_norm_stderr": 0.030102793781791194 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6636363636363637, + "acc_stderr": 0.04525393596302505, + "acc_norm": 0.6636363636363637, + "acc_norm_stderr": 0.04525393596302505 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.029723278961476664, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.029723278961476664 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7562189054726368, + "acc_stderr": 0.030360490154014638, + "acc_norm": 0.7562189054726368, + "acc_norm_stderr": 0.030360490154014638 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.037657466938651504, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.037657466938651504 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.025670080636909193, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.025670080636909193 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5972222222222222, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.5972222222222222, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6127167630057804, + "acc_stderr": 0.026226158605124658, + "acc_norm": 0.6127167630057804, + "acc_norm_stderr": 0.026226158605124658 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6441717791411042, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.6441717791411042, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.654320987654321, + "acc_stderr": 0.026462487777001865, + "acc_norm": 0.654320987654321, + "acc_norm_stderr": 0.026462487777001865 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7512953367875648, + "acc_stderr": 0.031195840877700293, + "acc_norm": 0.7512953367875648, + "acc_norm_stderr": 0.031195840877700293 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.744954128440367, + "acc_stderr": 0.018688500856535853, + "acc_norm": 0.744954128440367, + "acc_norm_stderr": 0.018688500856535853 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.02782610930728369, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.02782610930728369 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.768595041322314, + "acc_stderr": 0.03849856098794088, + "acc_norm": 0.768595041322314, + "acc_norm_stderr": 0.03849856098794088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.04008973785779205, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.04008973785779205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5571895424836601, + "acc_stderr": 0.020095083154577354, + "acc_norm": 0.5571895424836601, + "acc_norm_stderr": 0.020095083154577354 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4432624113475177, + "acc_stderr": 0.029634838473766006, + "acc_norm": 0.4432624113475177, + "acc_norm_stderr": 0.029634838473766006 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5324074074074074, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.5324074074074074, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.36201117318435755, + "acc_stderr": 0.016073067350153084, + "acc_norm": 0.36201117318435755, + "acc_norm_stderr": 0.016073067350153084 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5367647058823529, + "acc_stderr": 0.03029061918048569, + "acc_norm": 0.5367647058823529, + "acc_norm_stderr": 0.03029061918048569 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6816326530612244, + "acc_stderr": 0.029822533793982045, + "acc_norm": 0.6816326530612244, + "acc_norm_stderr": 0.029822533793982045 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.02782078198114968, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.02782078198114968 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44328552803129073, + "acc_stderr": 0.012687818419599917, + "acc_norm": 0.44328552803129073, + "acc_norm_stderr": 0.012687818419599917 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6862745098039216, + "acc_stderr": 0.03256685484460388, + "acc_norm": 0.6862745098039216, + "acc_norm_stderr": 0.03256685484460388 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5238678090575275, + "mc1_stderr": 0.01748354715696156, + "mc2": 0.6521355657270486, + "mc2_stderr": 0.015510256466864823 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5171192443919717, + "acc_stderr": 0.017180275246085626, + "acc_norm": 0.5277449822904369, + "acc_norm_stderr": 0.017163867979456016 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chihoonlee10/T3Q-LLM-MG-DPO-v1.0", + "model_sha": "278179cc45b49591400357fea267058b8e396c68", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chihoonlee10/T3Q-Merge-SOLAR12/result_2024-03-14 06:50:17.json b/chihoonlee10/T3Q-Merge-SOLAR12/result_2024-03-14 06:50:17.json new file mode 100644 index 0000000000000000000000000000000000000000..8441981c87bbdf3efe7f19d6e690cffb3eba92b9 --- /dev/null +++ b/chihoonlee10/T3Q-Merge-SOLAR12/result_2024-03-14 06:50:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.507679180887372, + "acc_stderr": 0.01460966744089257, + "acc_norm": 0.5665529010238908, + "acc_norm_stderr": 0.014481376224558896 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4945230033857797, + "acc_stderr": 0.0049894820406101124, + "acc_norm": 0.6771559450308704, + "acc_norm_stderr": 0.004666080865179645 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5906432748538012, + "acc_stderr": 0.03771283107626545, + "acc_norm": 0.5906432748538012, + "acc_norm_stderr": 0.03771283107626545 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.665389527458493, + "acc_stderr": 0.01687346864159216, + "acc_norm": 0.665389527458493, + "acc_norm_stderr": 0.01687346864159216 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.639871382636656, + "acc_stderr": 0.027264297599804015, + "acc_norm": 0.639871382636656, + "acc_norm_stderr": 0.027264297599804015 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7070707070707071, + "acc_stderr": 0.032424979581788166, + "acc_norm": 0.7070707070707071, + "acc_norm_stderr": 0.032424979581788166 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5743589743589743, + "acc_stderr": 0.02506909438729652, + "acc_norm": 0.5743589743589743, + "acc_norm_stderr": 0.02506909438729652 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5774193548387097, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.5774193548387097, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.028286324075564424, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.028286324075564424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616255, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.032658195885126966, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.032658195885126966 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273958, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273958 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41005291005291006, + "acc_stderr": 0.025331202438944447, + "acc_norm": 0.41005291005291006, + "acc_norm_stderr": 0.025331202438944447 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5635838150289018, + "acc_stderr": 0.026700545424943687, + "acc_norm": 0.5635838150289018, + "acc_norm_stderr": 0.026700545424943687 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6172839506172839, + "acc_stderr": 0.02704453813840259, + "acc_norm": 0.6172839506172839, + "acc_norm_stderr": 0.02704453813840259 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.032210245080411544, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.032210245080411544 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4824561403508772, + "acc_stderr": 0.04700708033551038, + "acc_norm": 0.4824561403508772, + "acc_norm_stderr": 0.04700708033551038 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.689908256880734, + "acc_stderr": 0.019830849684439756, + "acc_norm": 0.689908256880734, + "acc_norm_stderr": 0.019830849684439756 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.042857142857142816, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.042857142857142816 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5915032679738562, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.5915032679738562, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.04008973785779205, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.04008973785779205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.020196594933541204, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.020196594933541204 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3754189944134078, + "acc_stderr": 0.01619510424846353, + "acc_norm": 0.3754189944134078, + "acc_norm_stderr": 0.01619510424846353 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5257352941176471, + "acc_stderr": 0.030332578094555026, + "acc_norm": 0.5257352941176471, + "acc_norm_stderr": 0.030332578094555026 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6, + "acc_stderr": 0.031362502409358936, + "acc_norm": 0.6, + "acc_norm_stderr": 0.031362502409358936 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4380704041720991, + "acc_stderr": 0.01267190278256764, + "acc_norm": 0.4380704041720991, + "acc_norm_stderr": 0.01267190278256764 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.03364487286088298, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.03364487286088298 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4944920440636475, + "mc1_stderr": 0.01750243899045107, + "mc2": 0.6619184770773421, + "mc2_stderr": 0.01576857060981085 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5914994096812278, + "acc_stderr": 0.01690006287942712, + "acc_norm": 0.5997638724911453, + "acc_norm_stderr": 0.01684469351050505 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chihoonlee10/T3Q-Merge-SOLAR12", + "model_sha": "e22d9d67294e440d32c0d350adb5346206e75719", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chihoonlee10/T3Q-ko-solar-dpo-v1.0/result_2024-03-19 02:26:45.json b/chihoonlee10/T3Q-ko-solar-dpo-v1.0/result_2024-03-19 02:26:45.json new file mode 100644 index 0000000000000000000000000000000000000000..614e2e5aec89c68eaefaa70f40fe495db40be2d5 --- /dev/null +++ b/chihoonlee10/T3Q-ko-solar-dpo-v1.0/result_2024-03-19 02:26:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6988054607508533, + "acc_stderr": 0.013406741767847634, + "acc_norm": 0.7389078498293515, + "acc_norm_stderr": 0.012835523909473848 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5710017924716192, + "acc_stderr": 0.004939215682191774, + "acc_norm": 0.7245568611830313, + "acc_norm_stderr": 0.004458242960556817 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7309941520467836, + "acc_stderr": 0.03401052620104089, + "acc_norm": 0.7309941520467836, + "acc_norm_stderr": 0.03401052620104089 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7037037037037037, + "acc_stderr": 0.016328814422102052, + "acc_norm": 0.7037037037037037, + "acc_norm_stderr": 0.016328814422102052 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5234042553191489, + "acc_stderr": 0.0326501947503358, + "acc_norm": 0.5234042553191489, + "acc_norm_stderr": 0.0326501947503358 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6559485530546624, + "acc_stderr": 0.026981478043648043, + "acc_norm": 0.6559485530546624, + "acc_norm_stderr": 0.026981478043648043 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6278026905829597, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.6278026905829597, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.803030303030303, + "acc_stderr": 0.028335609732463362, + "acc_norm": 0.803030303030303, + "acc_norm_stderr": 0.028335609732463362 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6384615384615384, + "acc_stderr": 0.02435958146539701, + "acc_norm": 0.6384615384615384, + "acc_norm_stderr": 0.02435958146539701 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.47783251231527096, + "acc_stderr": 0.035145285621750094, + "acc_norm": 0.47783251231527096, + "acc_norm_stderr": 0.035145285621750094 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.632258064516129, + "acc_stderr": 0.027430866579973463, + "acc_norm": 0.632258064516129, + "acc_norm_stderr": 0.027430866579973463 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8333333333333334, + "acc_stderr": 0.024414947304543688, + "acc_norm": 0.8333333333333334, + "acc_norm_stderr": 0.024414947304543688 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.569811320754717, + "acc_stderr": 0.03047144586718324, + "acc_norm": 0.569811320754717, + "acc_norm_stderr": 0.03047144586718324 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.046313813194254656, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.046313813194254656 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630882, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630882 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.030965903123573037, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.030965903123573037 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.025699352832131792, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.025699352832131792 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6527777777777778, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.6527777777777778, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6040462427745664, + "acc_stderr": 0.02632981334194625, + "acc_norm": 0.6040462427745664, + "acc_norm_stderr": 0.02632981334194625 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6012269938650306, + "acc_stderr": 0.03847021420456024, + "acc_norm": 0.6012269938650306, + "acc_norm_stderr": 0.03847021420456024 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6697530864197531, + "acc_stderr": 0.026168298456732846, + "acc_norm": 0.6697530864197531, + "acc_norm_stderr": 0.026168298456732846 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.03097543638684543, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.03097543638684543 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7486238532110092, + "acc_stderr": 0.01859920636028741, + "acc_norm": 0.7486238532110092, + "acc_norm_stderr": 0.01859920636028741 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.630718954248366, + "acc_stderr": 0.027634176689602656, + "acc_norm": 0.630718954248366, + "acc_norm_stderr": 0.027634176689602656 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6644736842105263, + "acc_stderr": 0.03842498559395268, + "acc_norm": 0.6644736842105263, + "acc_norm_stderr": 0.03842498559395268 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5816993464052288, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.5816993464052288, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3971631205673759, + "acc_stderr": 0.029189805673587102, + "acc_norm": 0.3971631205673759, + "acc_norm_stderr": 0.029189805673587102 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.4145251396648045, + "acc_stderr": 0.016476342210254003, + "acc_norm": 0.4145251396648045, + "acc_norm_stderr": 0.016476342210254003 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5477941176470589, + "acc_stderr": 0.03023375855159644, + "acc_norm": 0.5477941176470589, + "acc_norm_stderr": 0.03023375855159644 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6816326530612244, + "acc_stderr": 0.02982253379398204, + "acc_norm": 0.6816326530612244, + "acc_norm_stderr": 0.02982253379398204 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4452411994784876, + "acc_stderr": 0.012693421303973294, + "acc_norm": 0.4452411994784876, + "acc_norm_stderr": 0.012693421303973294 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.0332057461294543, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.0332057461294543 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7037943696450428, + "mc1_stderr": 0.015983595101811396, + "mc2": 0.7972081011238825, + "mc2_stderr": 0.013291929048345317 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5395513577331759, + "acc_stderr": 0.017136487626049846, + "acc_norm": 0.5560802833530106, + "acc_norm_stderr": 0.017081884623542543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chihoonlee10/T3Q-ko-solar-dpo-v1.0", + "model_sha": "ba1da10619b3dbd31e43c09ff67cc609e22c1e19", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chihoonlee10/T3Q-ko-solar-dpo-v2.0/result_2024-03-20 03:38:04.json b/chihoonlee10/T3Q-ko-solar-dpo-v2.0/result_2024-03-20 03:38:04.json new file mode 100644 index 0000000000000000000000000000000000000000..47e118d4d8a57730848655760d8d93e7dc2c672f --- /dev/null +++ b/chihoonlee10/T3Q-ko-solar-dpo-v2.0/result_2024-03-20 03:38:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6996587030716723, + "acc_stderr": 0.01339590930995701, + "acc_norm": 0.7380546075085325, + "acc_norm_stderr": 0.012849054826858117 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5710017924716192, + "acc_stderr": 0.004939215682191774, + "acc_norm": 0.7259510057757419, + "acc_norm_stderr": 0.004451222241494022 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7251461988304093, + "acc_stderr": 0.03424042924691583, + "acc_norm": 0.7251461988304093, + "acc_norm_stderr": 0.03424042924691583 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7062579821200511, + "acc_stderr": 0.016287759388491693, + "acc_norm": 0.7062579821200511, + "acc_norm_stderr": 0.016287759388491693 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5319148936170213, + "acc_stderr": 0.032619369184673806, + "acc_norm": 0.5319148936170213, + "acc_norm_stderr": 0.032619369184673806 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6463022508038585, + "acc_stderr": 0.027155208103200875, + "acc_norm": 0.6463022508038585, + "acc_norm_stderr": 0.027155208103200875 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6412556053811659, + "acc_stderr": 0.03219079200419995, + "acc_norm": 0.6412556053811659, + "acc_norm_stderr": 0.03219079200419995 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.797979797979798, + "acc_stderr": 0.028606204289229872, + "acc_norm": 0.797979797979798, + "acc_norm_stderr": 0.028606204289229872 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5655172413793104, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.5655172413793104, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.03142946637883708, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.03142946637883708 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6435897435897436, + "acc_stderr": 0.024283140529467315, + "acc_norm": 0.6435897435897436, + "acc_norm_stderr": 0.024283140529467315 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.7, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.7, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.03510766597959217, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.03510766597959217 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6387096774193548, + "acc_stderr": 0.02732754844795755, + "acc_norm": 0.6387096774193548, + "acc_norm_stderr": 0.02732754844795755 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8376068376068376, + "acc_stderr": 0.024161618127987745, + "acc_norm": 0.8376068376068376, + "acc_norm_stderr": 0.024161618127987745 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.030437794342983052, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.030437794342983052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.046075820907199756, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.046075820907199756 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7512437810945274, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.7512437810945274, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4708994708994709, + "acc_stderr": 0.025707658614154943, + "acc_norm": 0.4708994708994709, + "acc_norm_stderr": 0.025707658614154943 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6458333333333334, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.6458333333333334, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5982658959537572, + "acc_stderr": 0.026394104177643634, + "acc_norm": 0.5982658959537572, + "acc_norm_stderr": 0.026394104177643634 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6257668711656442, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.6257668711656442, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6790123456790124, + "acc_stderr": 0.025976566010862737, + "acc_norm": 0.6790123456790124, + "acc_norm_stderr": 0.025976566010862737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7668393782383419, + "acc_stderr": 0.03051611137147601, + "acc_norm": 0.7668393782383419, + "acc_norm_stderr": 0.03051611137147601 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7431192660550459, + "acc_stderr": 0.01873249292834246, + "acc_norm": 0.7431192660550459, + "acc_norm_stderr": 0.01873249292834246 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.630718954248366, + "acc_stderr": 0.027634176689602653, + "acc_norm": 0.630718954248366, + "acc_norm_stderr": 0.027634176689602653 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6644736842105263, + "acc_stderr": 0.03842498559395268, + "acc_norm": 0.6644736842105263, + "acc_norm_stderr": 0.03842498559395268 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.019944914136873586, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.019944914136873586 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40070921985815605, + "acc_stderr": 0.02923346574557309, + "acc_norm": 0.40070921985815605, + "acc_norm_stderr": 0.02923346574557309 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.40670391061452515, + "acc_stderr": 0.016428811915898858, + "acc_norm": 0.40670391061452515, + "acc_norm_stderr": 0.016428811915898858 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5514705882352942, + "acc_stderr": 0.030211479609121596, + "acc_norm": 0.5514705882352942, + "acc_norm_stderr": 0.030211479609121596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6979591836734694, + "acc_stderr": 0.029393609319879797, + "acc_norm": 0.6979591836734694, + "acc_norm_stderr": 0.029393609319879797 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036416, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036416 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4471968709256845, + "acc_stderr": 0.012698825252435117, + "acc_norm": 0.4471968709256845, + "acc_norm_stderr": 0.012698825252435117 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.032962451101722294, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.032962451101722294 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7050183598531212, + "mc1_stderr": 0.01596440096558964, + "mc2": 0.7937806978980084, + "mc2_stderr": 0.013385999654039625 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5289256198347108, + "acc_stderr": 0.017161563949916348, + "acc_norm": 0.5560802833530106, + "acc_norm_stderr": 0.017081884623542543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chihoonlee10/T3Q-ko-solar-dpo-v2.0", + "model_sha": "53717721711dcd8bce946a3aba6f0dd5fcd33778", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chihoonlee10/T3Q-ko-solar-dpo-v3.0/result_2024-03-20 15:21:25.json b/chihoonlee10/T3Q-ko-solar-dpo-v3.0/result_2024-03-20 15:21:25.json new file mode 100644 index 0000000000000000000000000000000000000000..d18dd64a71f94401d2bc09a82a948e81a9096235 --- /dev/null +++ b/chihoonlee10/T3Q-ko-solar-dpo-v3.0/result_2024-03-20 15:21:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7013651877133106, + "acc_stderr": 0.013374078615068742, + "acc_norm": 0.7508532423208191, + "acc_norm_stderr": 0.012639407111926428 + }, + "harness|ko_hellaswag|10": { + "acc": 0.585839474208325, + "acc_stderr": 0.004915697886906118, + "acc_norm": 0.7417845050786696, + "acc_norm_stderr": 0.004367586801776636 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7309941520467836, + "acc_stderr": 0.0340105262010409, + "acc_norm": 0.7309941520467836, + "acc_norm_stderr": 0.0340105262010409 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7475728155339806, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.7475728155339806, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.698595146871009, + "acc_stderr": 0.016409091097268794, + "acc_norm": 0.698595146871009, + "acc_norm_stderr": 0.016409091097268794 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5148936170212766, + "acc_stderr": 0.032671518489247764, + "acc_norm": 0.5148936170212766, + "acc_norm_stderr": 0.032671518489247764 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.0389136449583582, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.0389136449583582 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6302250803858521, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.6302250803858521, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6457399103139013, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.6457399103139013, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.029620227874790465, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.029620227874790465 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319617, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319617 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.03149930577784906, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.03149930577784906 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6205128205128205, + "acc_stderr": 0.024603626924097417, + "acc_norm": 0.6205128205128205, + "acc_norm_stderr": 0.024603626924097417 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.49261083743842365, + "acc_stderr": 0.03517603540361008, + "acc_norm": 0.49261083743842365, + "acc_norm_stderr": 0.03517603540361008 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6290322580645161, + "acc_stderr": 0.02748054188795359, + "acc_norm": 0.6290322580645161, + "acc_norm_stderr": 0.02748054188795359 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8333333333333334, + "acc_stderr": 0.024414947304543688, + "acc_norm": 0.8333333333333334, + "acc_norm_stderr": 0.024414947304543688 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5773584905660377, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.5773584905660377, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37407407407407406, + "acc_stderr": 0.02950286112895529, + "acc_norm": 0.37407407407407406, + "acc_norm_stderr": 0.02950286112895529 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.038073017265045125, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.038073017265045125 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.0256700806369092, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.0256700806369092 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6319444444444444, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.6319444444444444, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.76, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5895953757225434, + "acc_stderr": 0.026483392042098174, + "acc_norm": 0.5895953757225434, + "acc_norm_stderr": 0.026483392042098174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6134969325153374, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.6134969325153374, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.026041766202717156, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.026041766202717156 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7431192660550459, + "acc_stderr": 0.018732492928342448, + "acc_norm": 0.7431192660550459, + "acc_norm_stderr": 0.018732492928342448 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6013071895424836, + "acc_stderr": 0.028036092273891776, + "acc_norm": 0.6013071895424836, + "acc_norm_stderr": 0.028036092273891776 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6710526315789473, + "acc_stderr": 0.03823428969926605, + "acc_norm": 0.6710526315789473, + "acc_norm_stderr": 0.03823428969926605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5816993464052288, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.5816993464052288, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4148936170212766, + "acc_stderr": 0.0293922365846125, + "acc_norm": 0.4148936170212766, + "acc_norm_stderr": 0.0293922365846125 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.45027932960893857, + "acc_stderr": 0.016639615236845817, + "acc_norm": 0.45027932960893857, + "acc_norm_stderr": 0.016639615236845817 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5073529411764706, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.5073529411764706, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6979591836734694, + "acc_stderr": 0.0293936093198798, + "acc_norm": 0.6979591836734694, + "acc_norm_stderr": 0.0293936093198798 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4589308996088657, + "acc_stderr": 0.012727084826799805, + "acc_norm": 0.4589308996088657, + "acc_norm_stderr": 0.012727084826799805 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.032962451101722294, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.032962451101722294 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7209302325581395, + "mc1_stderr": 0.015702107090627918, + "mc2": 0.809791586353025, + "mc2_stderr": 0.012994763457305245 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5206611570247934, + "acc_stderr": 0.017175671279836446, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chihoonlee10/T3Q-ko-solar-dpo-v3.0", + "model_sha": "a3ce6daa4993e2711b033720be053daca0a5549b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chihoonlee10/T3Q-ko-solar-dpo-v4.0/result_2024-03-25 15:19:52.json b/chihoonlee10/T3Q-ko-solar-dpo-v4.0/result_2024-03-25 15:19:52.json new file mode 100644 index 0000000000000000000000000000000000000000..1969103cd7a766487855141fcac2b2df5a83c28d --- /dev/null +++ b/chihoonlee10/T3Q-ko-solar-dpo-v4.0/result_2024-03-25 15:19:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7107508532423208, + "acc_stderr": 0.013250012579393443, + "acc_norm": 0.7517064846416383, + "acc_norm_stderr": 0.012624912868089758 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5974905397331209, + "acc_stderr": 0.004894012555642659, + "acc_norm": 0.7497510456084445, + "acc_norm_stderr": 0.00432271091102637 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7134502923976608, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.7134502923976608, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7184466019417476, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.7184466019417476, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6934865900383141, + "acc_stderr": 0.016486952893041515, + "acc_norm": 0.6934865900383141, + "acc_norm_stderr": 0.016486952893041515 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5276595744680851, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.5276595744680851, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6302250803858521, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.6302250803858521, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6412556053811659, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.6412556053811659, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7676767676767676, + "acc_stderr": 0.030088629490217487, + "acc_norm": 0.7676767676767676, + "acc_norm_stderr": 0.030088629490217487 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.031566630992154156, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.031566630992154156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6128205128205129, + "acc_stderr": 0.024697216930878934, + "acc_norm": 0.6128205128205129, + "acc_norm_stderr": 0.024697216930878934 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4729064039408867, + "acc_stderr": 0.03512819077876106, + "acc_norm": 0.4729064039408867, + "acc_norm_stderr": 0.03512819077876106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6193548387096774, + "acc_stderr": 0.02762171783290703, + "acc_norm": 0.6193548387096774, + "acc_norm_stderr": 0.02762171783290703 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8376068376068376, + "acc_stderr": 0.02416161812798774, + "acc_norm": 0.8376068376068376, + "acc_norm_stderr": 0.02416161812798774 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5886792452830188, + "acc_stderr": 0.030285009259009787, + "acc_norm": 0.5886792452830188, + "acc_norm_stderr": 0.030285009259009787 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37407407407407406, + "acc_stderr": 0.02950286112895529, + "acc_norm": 0.37407407407407406, + "acc_norm_stderr": 0.02950286112895529 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.038073017265045125, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.038073017265045125 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.455026455026455, + "acc_stderr": 0.02564692836104939, + "acc_norm": 0.455026455026455, + "acc_norm_stderr": 0.02564692836104939 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6011560693641619, + "acc_stderr": 0.02636243757454654, + "acc_norm": 0.6011560693641619, + "acc_norm_stderr": 0.02636243757454654 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6134969325153374, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.6134969325153374, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6604938271604939, + "acc_stderr": 0.026348564412011617, + "acc_norm": 0.6604938271604939, + "acc_norm_stderr": 0.026348564412011617 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.047028804320496165, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.047028804320496165 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7504587155963303, + "acc_stderr": 0.018553897629501617, + "acc_norm": 0.7504587155963303, + "acc_norm_stderr": 0.018553897629501617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6143790849673203, + "acc_stderr": 0.027870745278290282, + "acc_norm": 0.6143790849673203, + "acc_norm_stderr": 0.027870745278290282 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6710526315789473, + "acc_stderr": 0.03823428969926605, + "acc_norm": 0.6710526315789473, + "acc_norm_stderr": 0.03823428969926605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.019977422600227477, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.019977422600227477 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4326241134751773, + "acc_stderr": 0.02955545423677885, + "acc_norm": 0.4326241134751773, + "acc_norm_stderr": 0.02955545423677885 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.034063153607115086, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.034063153607115086 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.41675977653631285, + "acc_stderr": 0.01648913496243895, + "acc_norm": 0.41675977653631285, + "acc_norm_stderr": 0.01648913496243895 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.0303720158854282, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.0303720158854282 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6938775510204082, + "acc_stderr": 0.029504896454595975, + "acc_norm": 0.6938775510204082, + "acc_norm_stderr": 0.029504896454595975 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7383966244725738, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.7383966244725738, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4530638852672751, + "acc_stderr": 0.012713845972358992, + "acc_norm": 0.4530638852672751, + "acc_norm_stderr": 0.012713845972358992 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.033086111132364364, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.033086111132364364 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205983, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205983 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7405140758873929, + "mc1_stderr": 0.015345409485558003, + "mc2": 0.8229128205076661, + "mc2_stderr": 0.012616808294286377 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.01718890435907731, + "acc_norm": 0.526564344746163, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chihoonlee10/T3Q-ko-solar-dpo-v4.0", + "model_sha": "454942a61feed76e0e8dbdeab38885b1c0d0dc3b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chihoonlee10/T3Q-ko-solar-dpo-v5.0/result_2024-03-27 21:17:27.json b/chihoonlee10/T3Q-ko-solar-dpo-v5.0/result_2024-03-27 21:17:27.json new file mode 100644 index 0000000000000000000000000000000000000000..08adde3370e7d0f288d083495189c1d1c8a61a94 --- /dev/null +++ b/chihoonlee10/T3Q-ko-solar-dpo-v5.0/result_2024-03-27 21:17:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7329351535836177, + "acc_stderr": 0.01292893319649635, + "acc_norm": 0.7619453924914675, + "acc_norm_stderr": 0.012445770028026208 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6923919537940649, + "acc_stderr": 0.004605601610012385, + "acc_norm": 0.8095996813383788, + "acc_norm_stderr": 0.003918145109742943 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6845466155810983, + "acc_stderr": 0.016617501738763408, + "acc_norm": 0.6845466155810983, + "acc_norm_stderr": 0.016617501738763408 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033583, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033583 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.027368078243971646, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.027368078243971646 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6502242152466368, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.6502242152466368, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.047840607041056527, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.047840607041056527 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.03142946637883708, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.03142946637883708 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6205128205128205, + "acc_stderr": 0.024603626924097413, + "acc_norm": 0.6205128205128205, + "acc_norm_stderr": 0.024603626924097413 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4729064039408867, + "acc_stderr": 0.03512819077876106, + "acc_norm": 0.4729064039408867, + "acc_norm_stderr": 0.03512819077876106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5935483870967742, + "acc_stderr": 0.027941727346256304, + "acc_norm": 0.5935483870967742, + "acc_norm_stderr": 0.027941727346256304 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8376068376068376, + "acc_stderr": 0.024161618127987745, + "acc_norm": 0.8376068376068376, + "acc_norm_stderr": 0.024161618127987745 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5509433962264151, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.5509433962264151, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948496, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.03170056183497309, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.03170056183497309 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4470899470899471, + "acc_stderr": 0.02560672399577702, + "acc_norm": 0.4470899470899471, + "acc_norm_stderr": 0.02560672399577702 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5635838150289018, + "acc_stderr": 0.02670054542494368, + "acc_norm": 0.5635838150289018, + "acc_norm_stderr": 0.02670054542494368 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6012269938650306, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.6012269938650306, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6512345679012346, + "acc_stderr": 0.02651759772446501, + "acc_norm": 0.6512345679012346, + "acc_norm_stderr": 0.02651759772446501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7461139896373057, + "acc_stderr": 0.03141024780565318, + "acc_norm": 0.7461139896373057, + "acc_norm_stderr": 0.03141024780565318 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7211009174311926, + "acc_stderr": 0.019227468876463524, + "acc_norm": 0.7211009174311926, + "acc_norm_stderr": 0.019227468876463524 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.02830457667314111, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.02830457667314111 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.625, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.625, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5702614379084967, + "acc_stderr": 0.020027122784928554, + "acc_norm": 0.5702614379084967, + "acc_norm_stderr": 0.020027122784928554 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4326241134751773, + "acc_stderr": 0.02955545423677885, + "acc_norm": 0.4326241134751773, + "acc_norm_stderr": 0.02955545423677885 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.0340763209385405, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.0340763209385405 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3307262569832402, + "acc_stderr": 0.01573502625896612, + "acc_norm": 0.3307262569832402, + "acc_norm_stderr": 0.01573502625896612 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.48161764705882354, + "acc_stderr": 0.030352303395351964, + "acc_norm": 0.48161764705882354, + "acc_norm_stderr": 0.030352303395351964 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6653061224489796, + "acc_stderr": 0.030209235226242304, + "acc_norm": 0.6653061224489796, + "acc_norm_stderr": 0.030209235226242304 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7383966244725738, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.7383966244725738, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4471968709256845, + "acc_stderr": 0.012698825252435117, + "acc_norm": 0.4471968709256845, + "acc_norm_stderr": 0.012698825252435117 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6813725490196079, + "acc_stderr": 0.032702871814820816, + "acc_norm": 0.6813725490196079, + "acc_norm_stderr": 0.032702871814820816 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205983, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205983 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7637698898408812, + "mc1_stderr": 0.014869755015871122, + "mc2": 0.8431626816195691, + "mc2_stderr": 0.012132117847955096 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.017188904359077314, + "acc_norm": 0.5395513577331759, + "acc_norm_stderr": 0.017136487626049846 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chihoonlee10/T3Q-ko-solar-dpo-v5.0", + "model_sha": "c42cc63981ca164da92be524e93ae66e4c92ad2c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chihoonlee10/T3Q-ko-solar-dpo-v6.0/result_2024-04-04 07:08:58.json b/chihoonlee10/T3Q-ko-solar-dpo-v6.0/result_2024-04-04 07:08:58.json new file mode 100644 index 0000000000000000000000000000000000000000..11dfd8fda770e7b915525675ee23007e1ae0a4d0 --- /dev/null +++ b/chihoonlee10/T3Q-ko-solar-dpo-v6.0/result_2024-04-04 07:08:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7414675767918089, + "acc_stderr": 0.012794553754288677, + "acc_norm": 0.7747440273037542, + "acc_norm_stderr": 0.012207839995407302 + }, + "harness|ko_hellaswag|10": { + "acc": 0.692989444333798, + "acc_stderr": 0.004603111343213072, + "acc_norm": 0.8118900617406891, + "acc_norm_stderr": 0.0039000125049579253 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.685823754789272, + "acc_stderr": 0.01659929173588493, + "acc_norm": 0.685823754789272, + "acc_norm_stderr": 0.01659929173588493 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.0387862677100236, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.0387862677100236 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.027368078243971646, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.027368078243971646 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6412556053811659, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.6412556053811659, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7474747474747475, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.7474747474747475, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383887, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383887 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.03163145807552378, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.03163145807552378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6051282051282051, + "acc_stderr": 0.024784316942156406, + "acc_norm": 0.6051282051282051, + "acc_norm_stderr": 0.024784316942156406 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.046166311118017146, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.046166311118017146 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4729064039408867, + "acc_stderr": 0.03512819077876106, + "acc_norm": 0.4729064039408867, + "acc_norm_stderr": 0.03512819077876106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.028040981380761533, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.028040981380761533 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8333333333333334, + "acc_stderr": 0.024414947304543688, + "acc_norm": 0.8333333333333334, + "acc_norm_stderr": 0.024414947304543688 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5622641509433962, + "acc_stderr": 0.03053333843046752, + "acc_norm": 0.5622641509433962, + "acc_norm_stderr": 0.03053333843046752 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.029318203645206865, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.029318203645206865 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.038073017265045125, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.038073017265045125 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4497354497354497, + "acc_stderr": 0.02562085704293665, + "acc_norm": 0.4497354497354497, + "acc_norm_stderr": 0.02562085704293665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6180555555555556, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.6180555555555556, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5867052023121387, + "acc_stderr": 0.02651126136940924, + "acc_norm": 0.5867052023121387, + "acc_norm_stderr": 0.02651126136940924 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6073619631901841, + "acc_stderr": 0.0383674090783103, + "acc_norm": 0.6073619631901841, + "acc_norm_stderr": 0.0383674090783103 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6635802469135802, + "acc_stderr": 0.02628973494595293, + "acc_norm": 0.6635802469135802, + "acc_norm_stderr": 0.02628973494595293 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7229357798165138, + "acc_stderr": 0.019188482590169545, + "acc_norm": 0.7229357798165138, + "acc_norm_stderr": 0.019188482590169545 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.019977422600227477, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.019977422600227477 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4326241134751773, + "acc_stderr": 0.029555454236778852, + "acc_norm": 0.4326241134751773, + "acc_norm_stderr": 0.029555454236778852 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.034063153607115086, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.034063153607115086 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3340782122905028, + "acc_stderr": 0.015774911422381632, + "acc_norm": 0.3340782122905028, + "acc_norm_stderr": 0.015774911422381632 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5, + "acc_stderr": 0.030372836961539352, + "acc_norm": 0.5, + "acc_norm_stderr": 0.030372836961539352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.673469387755102, + "acc_stderr": 0.03002105623844033, + "acc_norm": 0.673469387755102, + "acc_norm_stderr": 0.03002105623844033 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7383966244725738, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.7383966244725738, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4485006518904824, + "acc_stderr": 0.01270231749055982, + "acc_norm": 0.4485006518904824, + "acc_norm_stderr": 0.01270231749055982 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6862745098039216, + "acc_stderr": 0.03256685484460388, + "acc_norm": 0.6862745098039216, + "acc_norm_stderr": 0.03256685484460388 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.769889840881273, + "mc1_stderr": 0.014734557959807763, + "mc2": 0.8423262420500789, + "mc2_stderr": 0.012271963463540116 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5041322314049587, + "acc_stderr": 0.01718976703213082, + "acc_norm": 0.538370720188902, + "acc_norm_stderr": 0.017139660221845557 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chihoonlee10/T3Q-ko-solar-dpo-v6.0", + "model_sha": "7fa856ba6fef319d7e93df31da91fbe32b2f1070", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chihoonlee10/T3Q-ko-solar-dpo-v7.0/result_2024-05-09 04:42:40.json b/chihoonlee10/T3Q-ko-solar-dpo-v7.0/result_2024-05-09 04:42:40.json new file mode 100644 index 0000000000000000000000000000000000000000..3bab8ca400c3bae2bdf19c1d5d6435e7c336d0e7 --- /dev/null +++ b/chihoonlee10/T3Q-ko-solar-dpo-v7.0/result_2024-05-09 04:42:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7559726962457338, + "acc_stderr": 0.012551447627856255, + "acc_norm": 0.7909556313993175, + "acc_norm_stderr": 0.011882746987406467 + }, + "harness|ko_hellaswag|10": { + "acc": 0.73132842063334, + "acc_stderr": 0.004423628080052022, + "acc_norm": 0.8149770961959769, + "acc_norm_stderr": 0.00387522536936573 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7076023391812866, + "acc_stderr": 0.034886477134579215, + "acc_norm": 0.7076023391812866, + "acc_norm_stderr": 0.034886477134579215 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7669902912621359, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.7669902912621359, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6922094508301405, + "acc_stderr": 0.016506045045155637, + "acc_norm": 0.6922094508301405, + "acc_norm_stderr": 0.016506045045155637 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.032662042990646775, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.032662042990646775 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.639871382636656, + "acc_stderr": 0.02726429759980401, + "acc_norm": 0.639871382636656, + "acc_norm_stderr": 0.02726429759980401 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6681614349775785, + "acc_stderr": 0.031602951437766785, + "acc_norm": 0.6681614349775785, + "acc_norm_stderr": 0.031602951437766785 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.043171711948702556, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.043171711948702556 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.031156269519646847, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.031156269519646847 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.03149930577784906, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.03149930577784906 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6230769230769231, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.6230769230769231, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6944444444444444, + "acc_stderr": 0.04453197507374984, + "acc_norm": 0.6944444444444444, + "acc_norm_stderr": 0.04453197507374984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649038, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649038 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6387096774193548, + "acc_stderr": 0.02732754844795755, + "acc_norm": 0.6387096774193548, + "acc_norm_stderr": 0.02732754844795755 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.025372139671722933, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.025372139671722933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5622641509433962, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.5622641509433962, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251976, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251976 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4417989417989418, + "acc_stderr": 0.025576257061253833, + "acc_norm": 0.4417989417989418, + "acc_norm_stderr": 0.025576257061253833 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5902777777777778, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.5902777777777778, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6011560693641619, + "acc_stderr": 0.02636243757454654, + "acc_norm": 0.6011560693641619, + "acc_norm_stderr": 0.02636243757454654 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5828220858895705, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.5828220858895705, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6728395061728395, + "acc_stderr": 0.026105673861409825, + "acc_norm": 0.6728395061728395, + "acc_norm_stderr": 0.026105673861409825 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7668393782383419, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.7668393782383419, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7174311926605504, + "acc_stderr": 0.01930424349770715, + "acc_norm": 0.7174311926605504, + "acc_norm_stderr": 0.01930424349770715 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6209150326797386, + "acc_stderr": 0.027780141207023344, + "acc_norm": 0.6209150326797386, + "acc_norm_stderr": 0.027780141207023344 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6447368421052632, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.6447368421052632, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5800653594771242, + "acc_stderr": 0.01996681117825648, + "acc_norm": 0.5800653594771242, + "acc_norm_stderr": 0.01996681117825648 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4432624113475177, + "acc_stderr": 0.029634838473766006, + "acc_norm": 0.4432624113475177, + "acc_norm_stderr": 0.029634838473766006 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3575418994413408, + "acc_stderr": 0.016029394474894886, + "acc_norm": 0.3575418994413408, + "acc_norm_stderr": 0.016029394474894886 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5257352941176471, + "acc_stderr": 0.030332578094555033, + "acc_norm": 0.5257352941176471, + "acc_norm_stderr": 0.030332578094555033 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598025, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598025 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.45045632333767927, + "acc_stderr": 0.012707390438502348, + "acc_norm": 0.45045632333767927, + "acc_norm_stderr": 0.012707390438502348 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.03354092437591519, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.03354092437591519 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7674418604651163, + "mc1_stderr": 0.014789157531080501, + "mc2": 0.8316562107887671, + "mc2_stderr": 0.012858018602641902 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5041322314049587, + "acc_stderr": 0.017189767032130817, + "acc_norm": 0.526564344746163, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chihoonlee10/T3Q-ko-solar-dpo-v7.0", + "model_sha": "16e172758086d3783553bf7202d6001193a52320", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chihoonlee10/T3Q-ko-solar-dpo-v8.0/result_2024-05-13 10:58:01.json b/chihoonlee10/T3Q-ko-solar-dpo-v8.0/result_2024-05-13 10:58:01.json new file mode 100644 index 0000000000000000000000000000000000000000..2c4b74d5377e3d9cf5913ff959f45f3f64252f5d --- /dev/null +++ b/chihoonlee10/T3Q-ko-solar-dpo-v8.0/result_2024-05-13 10:58:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7414675767918089, + "acc_stderr": 0.012794553754288679, + "acc_norm": 0.773037542662116, + "acc_norm_stderr": 0.012240491536132861 + }, + "harness|ko_hellaswag|10": { + "acc": 0.7057359091814379, + "acc_stderr": 0.004547798964126636, + "acc_norm": 0.8136825333598885, + "acc_norm_stderr": 0.0038856689631260916 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.695906432748538, + "acc_stderr": 0.03528211258245232, + "acc_norm": 0.695906432748538, + "acc_norm_stderr": 0.03528211258245232 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7475728155339806, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.7475728155339806, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6883780332056194, + "acc_stderr": 0.016562433867284176, + "acc_norm": 0.6883780332056194, + "acc_norm_stderr": 0.016562433867284176 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.03257901482099836, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.03257901482099836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.027604689028581975, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.027604689028581975 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6412556053811659, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.6412556053811659, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7373737373737373, + "acc_stderr": 0.03135305009533086, + "acc_norm": 0.7373737373737373, + "acc_norm_stderr": 0.03135305009533086 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.634453781512605, + "acc_stderr": 0.03128217706368462, + "acc_norm": 0.634453781512605, + "acc_norm_stderr": 0.03128217706368462 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6, + "acc_stderr": 0.02483881198803317, + "acc_norm": 0.6, + "acc_norm_stderr": 0.02483881198803317 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.47783251231527096, + "acc_stderr": 0.035145285621750094, + "acc_norm": 0.47783251231527096, + "acc_norm_stderr": 0.035145285621750094 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5935483870967742, + "acc_stderr": 0.027941727346256308, + "acc_norm": 0.5935483870967742, + "acc_norm_stderr": 0.027941727346256308 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8290598290598291, + "acc_stderr": 0.024662496845209807, + "acc_norm": 0.8290598290598291, + "acc_norm_stderr": 0.024662496845209807 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.04653429807913507, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.04653429807913507 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857406, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857406 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.0317005618349731, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.0317005618349731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.03809342081273956, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.03809342081273956 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43915343915343913, + "acc_stderr": 0.025559920550531013, + "acc_norm": 0.43915343915343913, + "acc_norm_stderr": 0.025559920550531013 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6180555555555556, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.6180555555555556, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5924855491329479, + "acc_stderr": 0.026454578146931505, + "acc_norm": 0.5924855491329479, + "acc_norm_stderr": 0.026454578146931505 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6073619631901841, + "acc_stderr": 0.0383674090783103, + "acc_norm": 0.6073619631901841, + "acc_norm_stderr": 0.0383674090783103 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.654320987654321, + "acc_stderr": 0.026462487777001865, + "acc_norm": 0.654320987654321, + "acc_norm_stderr": 0.026462487777001865 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.728440366972477, + "acc_stderr": 0.019069098363191452, + "acc_norm": 0.728440366972477, + "acc_norm_stderr": 0.019069098363191452 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.028180596328259287, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.028180596328259287 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.576797385620915, + "acc_stderr": 0.01998780976948207, + "acc_norm": 0.576797385620915, + "acc_norm_stderr": 0.01998780976948207 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4148936170212766, + "acc_stderr": 0.0293922365846125, + "acc_norm": 0.4148936170212766, + "acc_norm_stderr": 0.0293922365846125 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.0340763209385405, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.0340763209385405 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.32849162011173183, + "acc_stderr": 0.015707935398496454, + "acc_norm": 0.32849162011173183, + "acc_norm_stderr": 0.015707935398496454 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5220588235294118, + "acc_stderr": 0.030343264224213528, + "acc_norm": 0.5220588235294118, + "acc_norm_stderr": 0.030343264224213528 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6489795918367347, + "acc_stderr": 0.030555316755573644, + "acc_norm": 0.6489795918367347, + "acc_norm_stderr": 0.030555316755573644 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7130801687763713, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.7130801687763713, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44328552803129073, + "acc_stderr": 0.012687818419599917, + "acc_norm": 0.44328552803129073, + "acc_norm_stderr": 0.012687818419599917 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.033086111132364364, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.033086111132364364 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205983, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205983 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.773561811505508, + "mc1_stderr": 0.014651337324602573, + "mc2": 0.8440286993540121, + "mc2_stderr": 0.012343615316512327 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.017177301992342544, + "acc_norm": 0.5076741440377804, + "acc_norm_stderr": 0.017188329219654276 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chihoonlee10/T3Q-ko-solar-dpo-v8.0", + "model_sha": "d20658c5e53d04a803372b549d11ee4b669cdf82", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chihoonlee10/T3Q-ko-solar-sft-dpo-v1.0/result_2024-03-27 07:14:17.json b/chihoonlee10/T3Q-ko-solar-sft-dpo-v1.0/result_2024-03-27 07:14:17.json new file mode 100644 index 0000000000000000000000000000000000000000..c954c879c5f86a994171503a3256be6b54ef97b5 --- /dev/null +++ b/chihoonlee10/T3Q-ko-solar-sft-dpo-v1.0/result_2024-03-27 07:14:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6953924914675768, + "acc_stderr": 0.013449522109932487, + "acc_norm": 0.7380546075085325, + "acc_norm_stderr": 0.012849054826858117 + }, + "harness|ko_hellaswag|10": { + "acc": 0.548994224258116, + "acc_stderr": 0.004965768348628077, + "acc_norm": 0.7087233618801035, + "acc_norm_stderr": 0.00453422135004609 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7309941520467836, + "acc_stderr": 0.03401052620104089, + "acc_norm": 0.7309941520467836, + "acc_norm_stderr": 0.03401052620104089 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7669902912621359, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.7669902912621359, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7126436781609196, + "acc_stderr": 0.016182410730682703, + "acc_norm": 0.7126436781609196, + "acc_norm_stderr": 0.016182410730682703 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5148936170212766, + "acc_stderr": 0.03267151848924776, + "acc_norm": 0.5148936170212766, + "acc_norm_stderr": 0.03267151848924776 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6430868167202572, + "acc_stderr": 0.02721042037593403, + "acc_norm": 0.6430868167202572, + "acc_norm_stderr": 0.02721042037593403 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6591928251121076, + "acc_stderr": 0.03181149747055359, + "acc_norm": 0.6591928251121076, + "acc_norm_stderr": 0.03181149747055359 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.031156269519646847, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.031156269519646847 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.041443118108781526, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.041443118108781526 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077636, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6615384615384615, + "acc_stderr": 0.02399150050031304, + "acc_norm": 0.6615384615384615, + "acc_norm_stderr": 0.02399150050031304 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.035158955511656986, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.035158955511656986 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6064516129032258, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.6064516129032258, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8547008547008547, + "acc_stderr": 0.02308663508684141, + "acc_norm": 0.8547008547008547, + "acc_norm_stderr": 0.02308663508684141 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6113207547169811, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.6113207547169811, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.029723278961476664, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.029723278961476664 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7512437810945274, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.7512437810945274, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.03778621079092056, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.03778621079092056 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4470899470899471, + "acc_stderr": 0.025606723995777025, + "acc_norm": 0.4470899470899471, + "acc_norm_stderr": 0.025606723995777025 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6040462427745664, + "acc_stderr": 0.026329813341946243, + "acc_norm": 0.6040462427745664, + "acc_norm_stderr": 0.026329813341946243 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5950920245398773, + "acc_stderr": 0.038566721635489125, + "acc_norm": 0.5950920245398773, + "acc_norm_stderr": 0.038566721635489125 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.026041766202717163, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.026041766202717163 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.03074890536390989, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.03074890536390989 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4824561403508772, + "acc_stderr": 0.04700708033551038, + "acc_norm": 0.4824561403508772, + "acc_norm_stderr": 0.04700708033551038 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7504587155963303, + "acc_stderr": 0.018553897629501617, + "acc_norm": 0.7504587155963303, + "acc_norm_stderr": 0.018553897629501617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6437908496732027, + "acc_stderr": 0.027420477662629245, + "acc_norm": 0.6437908496732027, + "acc_norm_stderr": 0.027420477662629245 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591206, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591206 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849725, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5964052287581699, + "acc_stderr": 0.019848280168401164, + "acc_norm": 0.5964052287581699, + "acc_norm_stderr": 0.019848280168401164 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.450354609929078, + "acc_stderr": 0.029680105565029036, + "acc_norm": 0.450354609929078, + "acc_norm_stderr": 0.029680105565029036 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053756, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053756 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.03406315360711507, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.03406315360711507 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.4011173184357542, + "acc_stderr": 0.016392221899407082, + "acc_norm": 0.4011173184357542, + "acc_norm_stderr": 0.016392221899407082 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.7, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5367647058823529, + "acc_stderr": 0.03029061918048569, + "acc_norm": 0.5367647058823529, + "acc_norm_stderr": 0.03029061918048569 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6693877551020408, + "acc_stderr": 0.030116426296540617, + "acc_norm": 0.6693877551020408, + "acc_norm_stderr": 0.030116426296540617 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7088607594936709, + "acc_stderr": 0.029571601065753374, + "acc_norm": 0.7088607594936709, + "acc_norm_stderr": 0.029571601065753374 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44132985658409385, + "acc_stderr": 0.012682016335646676, + "acc_norm": 0.44132985658409385, + "acc_norm_stderr": 0.012682016335646676 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.032962451101722294, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.032962451101722294 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03756335775187896, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03756335775187896 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6695226438188494, + "mc1_stderr": 0.01646676961369831, + "mc2": 0.7810008770724665, + "mc2_stderr": 0.013320696535535692 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6174734356552538, + "acc_stderr": 0.01670916538722882, + "acc_norm": 0.6375442739079102, + "acc_norm_stderr": 0.01652713124045371 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chihoonlee10/T3Q-ko-solar-sft-dpo-v1.0", + "model_sha": "e91cc43f0fa35047c692c861860839aa99514264", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chlee10/T3Q-LLM3-Llama3-sft1.0-dpo1.0/result_2024-05-02 10:01:02.json b/chlee10/T3Q-LLM3-Llama3-sft1.0-dpo1.0/result_2024-05-02 10:01:02.json new file mode 100644 index 0000000000000000000000000000000000000000..1715a0ff8d99cc53b6e4399b1bff7ad25e22b730 --- /dev/null +++ b/chlee10/T3Q-LLM3-Llama3-sft1.0-dpo1.0/result_2024-05-02 10:01:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3609215017064846, + "acc_stderr": 0.014034761386175452, + "acc_norm": 0.3967576791808874, + "acc_norm_stderr": 0.014296513020180637 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35371439952200756, + "acc_stderr": 0.004771447244095126, + "acc_norm": 0.45130452101175067, + "acc_norm_stderr": 0.004966060995315062 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4278416347381865, + "acc_stderr": 0.01769278792780373, + "acc_norm": 0.4278416347381865, + "acc_norm_stderr": 0.01769278792780373 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.02817391776176288, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.02817391776176288 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.0497569851956243, + "acc_norm": 0.43, + "acc_norm_stderr": 0.0497569851956243 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03156663099215416, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03156663099215416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.024537591572830517, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.024537591572830517 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970104, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.027869320571664635, + "acc_norm": 0.4, + "acc_norm_stderr": 0.027869320571664635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.594017094017094, + "acc_stderr": 0.03217180182641087, + "acc_norm": 0.594017094017094, + "acc_norm_stderr": 0.03217180182641087 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009812, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009812 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176095, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176095 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43781094527363185, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.43781094527363185, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05000000000000001, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05000000000000001 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.026720034380514995, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.026720034380514995 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607718, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607718 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41651376146788993, + "acc_stderr": 0.021136376504030874, + "acc_norm": 0.41651376146788993, + "acc_norm_stderr": 0.021136376504030874 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.038932596106046734, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.038932596106046734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.02830457667314111, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.02830457667314111 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.019373332420724493, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.019373332420724493 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022128, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791054, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791054 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.027033041151681456, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.027033041151681456 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.32653061224489793, + "acc_stderr": 0.030021056238440317, + "acc_norm": 0.32653061224489793, + "acc_norm_stderr": 0.030021056238440317 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30638852672750977, + "acc_stderr": 0.011773980329380701, + "acc_norm": 0.30638852672750977, + "acc_norm_stderr": 0.011773980329380701 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32313341493268055, + "mc1_stderr": 0.016371836286454607, + "mc2": 0.4893709956131311, + "mc2_stderr": 0.015902126228959312 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5017709563164109, + "acc_stderr": 0.01719024627623186, + "acc_norm": 0.51357733175915, + "acc_norm_stderr": 0.017184015060401455 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chlee10/T3Q-LLM3-Llama3-sft1.0-dpo1.0", + "model_sha": "1c041719fde3f98705dd1311ce66bb54da03ddf2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chlee10/T3Q-LLM3-Llama3-sft1.0/result_2024-05-02 06:05:36.json b/chlee10/T3Q-LLM3-Llama3-sft1.0/result_2024-05-02 06:05:36.json new file mode 100644 index 0000000000000000000000000000000000000000..42ed0f2830400615182f35745e9b37345885b62a --- /dev/null +++ b/chlee10/T3Q-LLM3-Llama3-sft1.0/result_2024-05-02 06:05:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35238907849829354, + "acc_stderr": 0.013960142600598684, + "acc_norm": 0.378839590443686, + "acc_norm_stderr": 0.014175915490000322 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3476399123680542, + "acc_stderr": 0.004752476997887832, + "acc_norm": 0.4380601473809998, + "acc_norm_stderr": 0.004951346338164499 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41507024265644954, + "acc_stderr": 0.017620137003655275, + "acc_norm": 0.41507024265644954, + "acc_norm_stderr": 0.017620137003655275 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.41479099678456594, + "acc_stderr": 0.027982680459759563, + "acc_norm": 0.41479099678456594, + "acc_norm_stderr": 0.027982680459759563 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.0497569851956243, + "acc_norm": 0.43, + "acc_norm_stderr": 0.0497569851956243 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.034889016168527305, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.034889016168527305 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237656, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237656 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.0316314580755238, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.0316314580755238 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.024321738484602354, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.024321738484602354 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678241, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678241 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5769230769230769, + "acc_stderr": 0.032366121762202014, + "acc_norm": 0.5769230769230769, + "acc_norm_stderr": 0.032366121762202014 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3849056603773585, + "acc_stderr": 0.029946498567699948, + "acc_norm": 0.3849056603773585, + "acc_norm_stderr": 0.029946498567699948 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.42786069651741293, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.42786069651741293, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425082, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425082 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05000000000000001, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05000000000000001 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.026538189104705477, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.026538189104705477 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557673, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3834862385321101, + "acc_stderr": 0.02084715664191598, + "acc_norm": 0.3834862385321101, + "acc_norm_stderr": 0.02084715664191598 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790606, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.01927099870822398, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.01927099870822398 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.02813968944485965, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.02813968944485965 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.027146271936625162, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.027146271936625162 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.02866685779027465, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.02866685779027465 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533485, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533485 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29986962190352023, + "acc_stderr": 0.011702660860193994, + "acc_norm": 0.29986962190352023, + "acc_norm_stderr": 0.011702660860193994 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.034107853389047184, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.034107853389047184 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882473, + "mc2": 0.4794357153908793, + "mc2_stderr": 0.01585921287240947 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4970484061393152, + "acc_stderr": 0.017190054580194698, + "acc_norm": 0.5100354191263282, + "acc_norm_stderr": 0.01718689128689406 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chlee10/T3Q-LLM3-Llama3-sft1.0", + "model_sha": "c96875883e0869f2954225e0c610d2b63bb325a0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chlee10/T3Q-Llama3-8B-Inst-sft1.0/result_2024-04-24 22:41:21.json b/chlee10/T3Q-Llama3-8B-Inst-sft1.0/result_2024-04-24 22:41:21.json new file mode 100644 index 0000000000000000000000000000000000000000..e1b1d5ec6a5ae71dedd8e62c1e0d65a48363fe7a --- /dev/null +++ b/chlee10/T3Q-Llama3-8B-Inst-sft1.0/result_2024-04-24 22:41:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.01421244498065189, + "acc_norm": 0.44283276450511944, + "acc_norm_stderr": 0.014515573873348914 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36656044612626965, + "acc_stderr": 0.004808802114592832, + "acc_norm": 0.4733120892252539, + "acc_norm_stderr": 0.004982668452118939 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46360153256704983, + "acc_stderr": 0.01783252407959326, + "acc_norm": 0.46360153256704983, + "acc_norm_stderr": 0.01783252407959326 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936338, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936338 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.02837327096106942, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.02837327096106942 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087313, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087313 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.025323990861736246, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.025323990861736246 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5290322580645161, + "acc_stderr": 0.028396016402760994, + "acc_norm": 0.5290322580645161, + "acc_norm_stderr": 0.028396016402760994 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3962962962962963, + "acc_stderr": 0.029822619458533997, + "acc_norm": 0.3962962962962963, + "acc_norm_stderr": 0.029822619458533997 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681681, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681681 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.02535574126305527, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.02535574126305527 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.026636539741116082, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.026636539741116082 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5871559633027523, + "acc_stderr": 0.02110912813341392, + "acc_norm": 0.5871559633027523, + "acc_norm_stderr": 0.02110912813341392 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.028568699752225875, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.028568699752225875 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635464, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635464 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.019997973035458333, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.019997973035458333 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.02847350127296376, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.02847350127296376 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.49107142857142855, + "acc_stderr": 0.04745033255489122, + "acc_norm": 0.49107142857142855, + "acc_norm_stderr": 0.04745033255489122 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.03338473403207401, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.03338473403207401 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.32513966480446926, + "acc_stderr": 0.015666542785053566, + "acc_norm": 0.32513966480446926, + "acc_norm_stderr": 0.015666542785053566 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.03093285879278986, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.03093285879278986 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37222946544980445, + "acc_stderr": 0.012346241297204368, + "acc_norm": 0.37222946544980445, + "acc_norm_stderr": 0.012346241297204368 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5637254901960784, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.5637254901960784, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.037131580674819135, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.037131580674819135 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589674, + "mc2": 0.48203869932917187, + "mc2_stderr": 0.016016757216134405 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5454545454545454, + "acc_stderr": 0.017119172208061504, + "acc_norm": 0.5560802833530106, + "acc_norm_stderr": 0.017081884623542543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chlee10/T3Q-Llama3-8B-Inst-sft1.0", + "model_sha": "fc787573395184e2bc4cb9ddd991608ec54d19b1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chlee10/T3Q-Llama3-8B-dpo-v2.0/result_2024-04-30 11:20:31.json b/chlee10/T3Q-Llama3-8B-dpo-v2.0/result_2024-04-30 11:20:31.json new file mode 100644 index 0000000000000000000000000000000000000000..cc8c53e19dc62e357fc369d32e047f882cd5e142 --- /dev/null +++ b/chlee10/T3Q-Llama3-8B-dpo-v2.0/result_2024-04-30 11:20:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938167, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955265 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41127265484963155, + "acc_stderr": 0.004910588449330012, + "acc_norm": 0.55646285600478, + "acc_norm_stderr": 0.004957863944093126 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.672514619883041, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.672514619883041, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.04354631077260597, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.04354631077260597 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5734355044699873, + "acc_stderr": 0.01768606697567565, + "acc_norm": 0.5734355044699873, + "acc_norm_stderr": 0.01768606697567565 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789958, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789958 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.028173917761762902, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.028173917761762902 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262971, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262971 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6767676767676768, + "acc_stderr": 0.033322999210706465, + "acc_norm": 0.6767676767676768, + "acc_norm_stderr": 0.033322999210706465 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.025323990861736246, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.025323990861736246 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978815, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978815 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.034711928605184676, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.034711928605184676 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5580645161290323, + "acc_stderr": 0.028251557906849748, + "acc_norm": 0.5580645161290323, + "acc_norm_stderr": 0.028251557906849748 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.40370370370370373, + "acc_stderr": 0.029914812342227624, + "acc_norm": 0.40370370370370373, + "acc_norm_stderr": 0.029914812342227624 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.4105960264900662, + "acc_stderr": 0.0401668959484993, + "acc_norm": 0.4105960264900662, + "acc_norm_stderr": 0.0401668959484993 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.024870815251057093, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.024870815251057093 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.73, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.026720034380514998, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.026720034380514998 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.558641975308642, + "acc_stderr": 0.02762873715566877, + "acc_norm": 0.558641975308642, + "acc_norm_stderr": 0.02762873715566877 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6476683937823834, + "acc_stderr": 0.03447478286414357, + "acc_norm": 0.6476683937823834, + "acc_norm_stderr": 0.03447478286414357 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.581651376146789, + "acc_stderr": 0.021149548596443888, + "acc_norm": 0.581651376146789, + "acc_norm_stderr": 0.021149548596443888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874144, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874144 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.44281045751633985, + "acc_stderr": 0.020095083154577347, + "acc_norm": 0.44281045751633985, + "acc_norm_stderr": 0.020095083154577347 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963768, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963768 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053756, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053756 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3541899441340782, + "acc_stderr": 0.015995644947299232, + "acc_norm": 0.3541899441340782, + "acc_norm_stderr": 0.015995644947299232 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125464, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125464 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6163265306122448, + "acc_stderr": 0.031130880396235946, + "acc_norm": 0.6163265306122448, + "acc_norm_stderr": 0.031130880396235946 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6962025316455697, + "acc_stderr": 0.029936696387138615, + "acc_norm": 0.6962025316455697, + "acc_norm_stderr": 0.029936696387138615 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3767926988265971, + "acc_stderr": 0.012376459593894397, + "acc_norm": 0.3767926988265971, + "acc_norm_stderr": 0.012376459593894397 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3072215422276622, + "mc1_stderr": 0.016150201321323006, + "mc2": 0.4846186641562934, + "mc2_stderr": 0.015578680439855304 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46635182998819363, + "acc_stderr": 0.01715138411713187, + "acc_norm": 0.5844155844155844, + "acc_norm_stderr": 0.016943586313076568 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chlee10/T3Q-Llama3-8B-dpo-v2.0", + "model_sha": "4125bb8807297d6922489f04f98007e71d11aabe", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chlee10/T3Q-Llama3-8B-sft1.0-dpo1.0/result_2024-04-25 21:39:19.json b/chlee10/T3Q-Llama3-8B-sft1.0-dpo1.0/result_2024-04-25 21:39:19.json new file mode 100644 index 0000000000000000000000000000000000000000..660c9fff29b003ced9ae58c1646bed84482791e7 --- /dev/null +++ b/chlee10/T3Q-Llama3-8B-sft1.0-dpo1.0/result_2024-04-25 21:39:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.01421244498065189, + "acc_norm": 0.4445392491467577, + "acc_norm_stderr": 0.014521226405627072 + }, + "harness|ko_hellaswag|10": { + "acc": 0.371539533957379, + "acc_stderr": 0.004822286556305219, + "acc_norm": 0.48227444732125074, + "acc_norm_stderr": 0.004986644894743119 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370608, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370608 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4623243933588761, + "acc_stderr": 0.01782913176428719, + "acc_norm": 0.4623243933588761, + "acc_norm_stderr": 0.01782913176428719 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.032662042990646775, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.032662042990646775 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.02836504154256459, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.02836504154256459 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.0355580405176393, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.0355580405176393 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062946, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062946 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5205128205128206, + "acc_stderr": 0.02532966316348994, + "acc_norm": 0.5205128205128206, + "acc_norm_stderr": 0.02532966316348994 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.034711928605184676, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.034711928605184676 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.535483870967742, + "acc_stderr": 0.028372287797962945, + "acc_norm": 0.535483870967742, + "acc_norm_stderr": 0.028372287797962945 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749465, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749465 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4185185185185185, + "acc_stderr": 0.030078013075022062, + "acc_norm": 0.4185185185185185, + "acc_norm_stderr": 0.030078013075022062 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.039580272311215706, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.039580272311215706 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6965174129353234, + "acc_stderr": 0.03251006816458618, + "acc_norm": 0.6965174129353234, + "acc_norm_stderr": 0.03251006816458618 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3941798941798942, + "acc_stderr": 0.025167982333894143, + "acc_norm": 0.3941798941798942, + "acc_norm_stderr": 0.025167982333894143 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5867052023121387, + "acc_stderr": 0.026511261369409244, + "acc_norm": 0.5867052023121387, + "acc_norm_stderr": 0.026511261369409244 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5981651376146789, + "acc_stderr": 0.021020106172997006, + "acc_norm": 0.5981651376146789, + "acc_norm_stderr": 0.021020106172997006 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.545751633986928, + "acc_stderr": 0.02850980780262659, + "acc_norm": 0.545751633986928, + "acc_norm_stderr": 0.02850980780262659 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635464, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635464 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309172, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309172 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4199346405228758, + "acc_stderr": 0.019966811178256483, + "acc_norm": 0.4199346405228758, + "acc_norm_stderr": 0.019966811178256483 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5, + "acc_stderr": 0.04745789978762494, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04745789978762494 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160834, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160834 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.35307262569832404, + "acc_stderr": 0.01598420454526857, + "acc_norm": 0.35307262569832404, + "acc_norm_stderr": 0.01598420454526857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.03086214492108756, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.03086214492108756 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.03137624072561619, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.03137624072561619 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3728813559322034, + "acc_stderr": 0.012350630058333364, + "acc_norm": 0.3728813559322034, + "acc_norm_stderr": 0.012350630058333364 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.034760990605016355, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.034760990605016355 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.03646204963253812, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.03646204963253812 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133033, + "mc2": 0.4854293191581354, + "mc2_stderr": 0.016010786172757696 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5478158205430933, + "acc_stderr": 0.017111567130916792, + "acc_norm": 0.5548996458087367, + "acc_norm_stderr": 0.01708641743100547 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chlee10/T3Q-Llama3-8B-sft1.0-dpo1.0", + "model_sha": "9d4145672bca05d0db5ada69f9d17cfa660a50c7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chlee10/T3Q-Merge-SOLAR/result_2024-03-12 09:12:06.json b/chlee10/T3Q-Merge-SOLAR/result_2024-03-12 09:12:06.json new file mode 100644 index 0000000000000000000000000000000000000000..2962009d063ecb69f3939d3480255aedb1cf1f78 --- /dev/null +++ b/chlee10/T3Q-Merge-SOLAR/result_2024-03-12 09:12:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5042662116040956, + "acc_stderr": 0.014610858923956952, + "acc_norm": 0.5614334470989761, + "acc_norm_stderr": 0.014500682618212865 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5001991635132443, + "acc_stderr": 0.00498978101559546, + "acc_norm": 0.6901015733917546, + "acc_norm_stderr": 0.004615063817741868 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6577266922094508, + "acc_stderr": 0.01696703176641363, + "acc_norm": 0.6577266922094508, + "acc_norm_stderr": 0.01696703176641363 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751468, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751468 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.702020202020202, + "acc_stderr": 0.03258630383836556, + "acc_norm": 0.702020202020202, + "acc_norm_stderr": 0.03258630383836556 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.048786087144669955, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.048786087144669955 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5717948717948718, + "acc_stderr": 0.025088301454694827, + "acc_norm": 0.5717948717948718, + "acc_norm_stderr": 0.025088301454694827 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5903225806451613, + "acc_stderr": 0.02797605491534737, + "acc_norm": 0.5903225806451613, + "acc_norm_stderr": 0.02797605491534737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731571, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731571 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616255, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.038118909889404126, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.038118909889404126 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4312169312169312, + "acc_stderr": 0.025506481698138208, + "acc_norm": 0.4312169312169312, + "acc_norm_stderr": 0.025506481698138208 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5347222222222222, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.5347222222222222, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5895061728395061, + "acc_stderr": 0.027371350925124764, + "acc_norm": 0.5895061728395061, + "acc_norm_stderr": 0.027371350925124764 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7098445595854922, + "acc_stderr": 0.03275264467791516, + "acc_norm": 0.7098445595854922, + "acc_norm_stderr": 0.03275264467791516 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6825688073394496, + "acc_stderr": 0.0199571521984605, + "acc_norm": 0.6825688073394496, + "acc_norm_stderr": 0.0199571521984605 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.028304576673141107, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.028304576673141107 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.040089737857792046, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.040089737857792046 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.020226106567657807, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.020226106567657807 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.03409386946992699, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.03409386946992699 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.39776536312849164, + "acc_stderr": 0.016369204971262978, + "acc_norm": 0.39776536312849164, + "acc_norm_stderr": 0.016369204971262978 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4889705882352941, + "acc_stderr": 0.030365446477275668, + "acc_norm": 0.4889705882352941, + "acc_norm_stderr": 0.030365446477275668 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6040816326530613, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.6040816326530613, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7468354430379747, + "acc_stderr": 0.02830465794303531, + "acc_norm": 0.7468354430379747, + "acc_norm_stderr": 0.02830465794303531 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4406779661016949, + "acc_stderr": 0.012680037994097055, + "acc_norm": 0.4406779661016949, + "acc_norm_stderr": 0.012680037994097055 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6323529411764706, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.6323529411764706, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.03646204963253812, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.03646204963253812 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4908200734394125, + "mc1_stderr": 0.017500550724819746, + "mc2": 0.6705772574223984, + "mc2_stderr": 0.015718273421063033 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5478158205430933, + "acc_stderr": 0.017111567130916796, + "acc_norm": 0.5525383707201889, + "acc_norm_stderr": 0.01709519030150058 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chlee10/T3Q-Merge-SOLAR", + "model_sha": "d78e59009cdfc3b5481548978e1b0d9bdc183401", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chlee10/T3Q-ko-solar-sft-v1.0/result_2024-03-18 18:06:19.json b/chlee10/T3Q-ko-solar-sft-v1.0/result_2024-03-18 18:06:19.json new file mode 100644 index 0000000000000000000000000000000000000000..d298b6e71abae38e922315cf162b849f52fe628e --- /dev/null +++ b/chlee10/T3Q-ko-solar-sft-v1.0/result_2024-03-18 18:06:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6672354948805461, + "acc_stderr": 0.013769863046192309, + "acc_norm": 0.7158703071672355, + "acc_norm_stderr": 0.013179442447653887 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4453296156144194, + "acc_stderr": 0.0049598642991781315, + "acc_norm": 0.5913164708225453, + "acc_norm_stderr": 0.004905859114942294 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7075351213282248, + "acc_stderr": 0.016267000684598645, + "acc_norm": 0.7075351213282248, + "acc_norm_stderr": 0.016267000684598645 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5106382978723404, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.5106382978723404, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.0387862677100236, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.0387862677100236 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.027604689028581975, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.027604689028581975 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6098654708520179, + "acc_stderr": 0.03273766725459157, + "acc_norm": 0.6098654708520179, + "acc_norm_stderr": 0.03273766725459157 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6616161616161617, + "acc_stderr": 0.033711241426263035, + "acc_norm": 0.6616161616161617, + "acc_norm_stderr": 0.033711241426263035 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.047551296160629475, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.047551296160629475 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5615384615384615, + "acc_stderr": 0.02515826601686861, + "acc_norm": 0.5615384615384615, + "acc_norm_stderr": 0.02515826601686861 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.0487831731214563, + "acc_norm": 0.62, + "acc_norm_stderr": 0.0487831731214563 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.603225806451613, + "acc_stderr": 0.027831231605767944, + "acc_norm": 0.603225806451613, + "acc_norm_stderr": 0.027831231605767944 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.811965811965812, + "acc_stderr": 0.02559819368665224, + "acc_norm": 0.811965811965812, + "acc_norm_stderr": 0.02559819368665224 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.569811320754717, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.569811320754717, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.03807301726504514, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.03807301726504514 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067877, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067877 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5705521472392638, + "acc_stderr": 0.038890666191127236, + "acc_norm": 0.5705521472392638, + "acc_norm_stderr": 0.038890666191127236 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.02686949074481526, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.02686949074481526 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7098445595854922, + "acc_stderr": 0.03275264467791516, + "acc_norm": 0.7098445595854922, + "acc_norm_stderr": 0.03275264467791516 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7247706422018348, + "acc_stderr": 0.019149093743155193, + "acc_norm": 0.7247706422018348, + "acc_norm_stderr": 0.019149093743155193 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.545751633986928, + "acc_stderr": 0.028509807802626595, + "acc_norm": 0.545751633986928, + "acc_norm_stderr": 0.028509807802626595 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5592105263157895, + "acc_stderr": 0.04040311062490435, + "acc_norm": 0.5592105263157895, + "acc_norm_stderr": 0.04040311062490435 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.020220920829626912, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.020220920829626912 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.028893955412115886, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.028893955412115886 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608043, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.32625698324022345, + "acc_stderr": 0.01568044151888918, + "acc_norm": 0.32625698324022345, + "acc_norm_stderr": 0.01568044151888918 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.39, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03035969707904611, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03035969707904611 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5959183673469388, + "acc_stderr": 0.031414708025865865, + "acc_norm": 0.5959183673469388, + "acc_norm_stderr": 0.031414708025865865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584342, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584342 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4015645371577575, + "acc_stderr": 0.012520315120147125, + "acc_norm": 0.4015645371577575, + "acc_norm_stderr": 0.012520315120147125 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4944920440636475, + "mc1_stderr": 0.017502438990451067, + "mc2": 0.6137347015392837, + "mc2_stderr": 0.015029785876738285 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45690672963400236, + "acc_stderr": 0.01712638909308678, + "acc_norm": 0.526564344746163, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chlee10/T3Q-ko-solar-sft-v1.0", + "model_sha": "7653a59ef47297617d832d660cd152e68edb0dc2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chlee10/T3Q-ko-solar-sft-v2.0/result_2024-03-18 09:14:32.json b/chlee10/T3Q-ko-solar-sft-v2.0/result_2024-03-18 09:14:32.json new file mode 100644 index 0000000000000000000000000000000000000000..22ddc322b5b6350d3cba161ec689d05a0d15440d --- /dev/null +++ b/chlee10/T3Q-ko-solar-sft-v2.0/result_2024-03-18 09:14:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6552901023890785, + "acc_stderr": 0.01388881628678211, + "acc_norm": 0.7192832764505119, + "acc_norm_stderr": 0.013131238126975583 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4446325433180641, + "acc_stderr": 0.004959094146471531, + "acc_norm": 0.5936068512248556, + "acc_norm_stderr": 0.004901558132335523 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.70242656449553, + "acc_stderr": 0.01634911191290943, + "acc_norm": 0.70242656449553, + "acc_norm_stderr": 0.01634911191290943 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5234042553191489, + "acc_stderr": 0.0326501947503358, + "acc_norm": 0.5234042553191489, + "acc_norm_stderr": 0.0326501947503358 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5884244372990354, + "acc_stderr": 0.027950481494401255, + "acc_norm": 0.5884244372990354, + "acc_norm_stderr": 0.027950481494401255 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6502242152466368, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.6502242152466368, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7474747474747475, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.7474747474747475, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6275862068965518, + "acc_stderr": 0.04028731532947559, + "acc_norm": 0.6275862068965518, + "acc_norm_stderr": 0.04028731532947559 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5630252100840336, + "acc_stderr": 0.03221943636566197, + "acc_norm": 0.5630252100840336, + "acc_norm_stderr": 0.03221943636566197 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6051282051282051, + "acc_stderr": 0.02478431694215641, + "acc_norm": 0.6051282051282051, + "acc_norm_stderr": 0.02478431694215641 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.02786932057166463, + "acc_norm": 0.6, + "acc_norm_stderr": 0.02786932057166463 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8632478632478633, + "acc_stderr": 0.02250903393707781, + "acc_norm": 0.8632478632478633, + "acc_norm_stderr": 0.02250903393707781 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5471698113207547, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.5471698113207547, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251976, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251976 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.038118909889404126, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.038118909889404126 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.025197101074246483, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.025197101074246483 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.77, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5982658959537572, + "acc_stderr": 0.026394104177643634, + "acc_norm": 0.5982658959537572, + "acc_norm_stderr": 0.026394104177643634 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6503067484662577, + "acc_stderr": 0.03746668325470023, + "acc_norm": 0.6503067484662577, + "acc_norm_stderr": 0.03746668325470023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.027237415094592484, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.027237415094592484 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366596, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7247706422018348, + "acc_stderr": 0.019149093743155196, + "acc_norm": 0.7247706422018348, + "acc_norm_stderr": 0.019149093743155196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5915032679738562, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.5915032679738562, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.039418975265163046, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.039418975265163046 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.0401790127598175, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.0401790127598175 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.020102583895887184, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.020102583895887184 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596154, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596154 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03372343271653062, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03372343271653062 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3307262569832402, + "acc_stderr": 0.01573502625896612, + "acc_norm": 0.3307262569832402, + "acc_norm_stderr": 0.01573502625896612 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5, + "acc_stderr": 0.030372836961539352, + "acc_norm": 0.5, + "acc_norm_stderr": 0.030372836961539352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.03093285879278987, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.03093285879278987 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6835443037974683, + "acc_stderr": 0.030274974880218974, + "acc_norm": 0.6835443037974683, + "acc_norm_stderr": 0.030274974880218974 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39308996088657105, + "acc_stderr": 0.012474899613873956, + "acc_norm": 0.39308996088657105, + "acc_norm_stderr": 0.012474899613873956 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.03364487286088298, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.03364487286088298 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5385556915544676, + "mc1_stderr": 0.017451384104637455, + "mc2": 0.6483349686930436, + "mc2_stderr": 0.014972680382687285 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4309327036599764, + "acc_stderr": 0.017025558196043136, + "acc_norm": 0.4651711924439197, + "acc_norm_stderr": 0.017148598015747422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chlee10/T3Q-ko-solar-sft-v2.0", + "model_sha": "11dbc1ddae75713e8e1710f837a33492cab1c7b0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/chlee10/T3Q-ko-solar-sft-v3.0/result_2024-03-25 14:35:32.json b/chlee10/T3Q-ko-solar-sft-v3.0/result_2024-03-25 14:35:32.json new file mode 100644 index 0000000000000000000000000000000000000000..bc6c4302157c1fe11fe51af6134bae349d7709d8 --- /dev/null +++ b/chlee10/T3Q-ko-solar-sft-v3.0/result_2024-03-25 14:35:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6902730375426621, + "acc_stderr": 0.013512058415238361, + "acc_norm": 0.7380546075085325, + "acc_norm_stderr": 0.012849054826858117 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5327623979286995, + "acc_stderr": 0.0049790580784786955, + "acc_norm": 0.6916948814977096, + "acc_norm_stderr": 0.004608495469860373 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7251461988304093, + "acc_stderr": 0.034240429246915824, + "acc_norm": 0.7251461988304093, + "acc_norm_stderr": 0.034240429246915824 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7669902912621359, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.7669902912621359, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7075351213282248, + "acc_stderr": 0.016267000684598645, + "acc_norm": 0.7075351213282248, + "acc_norm_stderr": 0.016267000684598645 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.502127659574468, + "acc_stderr": 0.032685726586674936, + "acc_norm": 0.502127659574468, + "acc_norm_stderr": 0.032685726586674936 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.03892212195333045, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.03892212195333045 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6559485530546624, + "acc_stderr": 0.026981478043648043, + "acc_norm": 0.6559485530546624, + "acc_norm_stderr": 0.026981478043648043 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.672645739910314, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.672645739910314, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.043285772152629715, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.043285772152629715 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7525252525252525, + "acc_stderr": 0.030746300742124515, + "acc_norm": 0.7525252525252525, + "acc_norm_stderr": 0.030746300742124515 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6386554621848739, + "acc_stderr": 0.031204691225150023, + "acc_norm": 0.6386554621848739, + "acc_norm_stderr": 0.031204691225150023 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6564102564102564, + "acc_stderr": 0.02407869658063549, + "acc_norm": 0.6564102564102564, + "acc_norm_stderr": 0.02407869658063549 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.47783251231527096, + "acc_stderr": 0.03514528562175008, + "acc_norm": 0.47783251231527096, + "acc_norm_stderr": 0.03514528562175008 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6193548387096774, + "acc_stderr": 0.027621717832907036, + "acc_norm": 0.6193548387096774, + "acc_norm_stderr": 0.027621717832907036 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8461538461538461, + "acc_stderr": 0.023636873317489274, + "acc_norm": 0.8461538461538461, + "acc_norm_stderr": 0.023636873317489274 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6113207547169811, + "acc_stderr": 0.03000048544867599, + "acc_norm": 0.6113207547169811, + "acc_norm_stderr": 0.03000048544867599 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630882, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630882 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7512437810945274, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.7512437810945274, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.03789401760283646, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.03789401760283646 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43915343915343913, + "acc_stderr": 0.025559920550531013, + "acc_norm": 0.43915343915343913, + "acc_norm_stderr": 0.025559920550531013 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.76, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6184971098265896, + "acc_stderr": 0.0261521986197268, + "acc_norm": 0.6184971098265896, + "acc_norm_stderr": 0.0261521986197268 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.588957055214724, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.588957055214724, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6790123456790124, + "acc_stderr": 0.025976566010862744, + "acc_norm": 0.6790123456790124, + "acc_norm_stderr": 0.025976566010862744 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.03074890536390988, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.03074890536390988 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4824561403508772, + "acc_stderr": 0.04700708033551038, + "acc_norm": 0.4824561403508772, + "acc_norm_stderr": 0.04700708033551038 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7486238532110092, + "acc_stderr": 0.018599206360287415, + "acc_norm": 0.7486238532110092, + "acc_norm_stderr": 0.018599206360287415 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.027184498909941613, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.027184498909941613 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.03941897526516303, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.03941897526516303 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6513157894736842, + "acc_stderr": 0.03878139888797611, + "acc_norm": 0.6513157894736842, + "acc_norm_stderr": 0.03878139888797611 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5996732026143791, + "acc_stderr": 0.019821843688271768, + "acc_norm": 0.5996732026143791, + "acc_norm_stderr": 0.019821843688271768 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.02965823509766691, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.02965823509766691 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.38994413407821227, + "acc_stderr": 0.016312376629213067, + "acc_norm": 0.38994413407821227, + "acc_norm_stderr": 0.016312376629213067 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.7, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5367647058823529, + "acc_stderr": 0.03029061918048569, + "acc_norm": 0.5367647058823529, + "acc_norm_stderr": 0.03029061918048569 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6857142857142857, + "acc_stderr": 0.02971932942241746, + "acc_norm": 0.6857142857142857, + "acc_norm_stderr": 0.02971932942241746 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.029818024749753102, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.029818024749753102 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44132985658409385, + "acc_stderr": 0.012682016335646678, + "acc_norm": 0.44132985658409385, + "acc_norm_stderr": 0.012682016335646678 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.03283472056108561, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.03283472056108561 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6523867809057528, + "mc1_stderr": 0.016670769188897306, + "mc2": 0.7621547077458142, + "mc2_stderr": 0.013708206565316855 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6186540731995277, + "acc_stderr": 0.016699301768828088, + "acc_norm": 0.6493506493506493, + "acc_norm_stderr": 0.0164055569038933 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "chlee10/T3Q-ko-solar-sft-v3.0", + "model_sha": "185bb7d877596bf3a9cf4d4e533c0ab15190a44a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/choco9966/Llama-2-7b-instruct-tuning/result_2023-10-19 08:44:42.json b/choco9966/Llama-2-7b-instruct-tuning/result_2023-10-19 08:44:42.json new file mode 100644 index 0000000000000000000000000000000000000000..1c35061a190456c2a5478f3871105cf0f87d5a06 --- /dev/null +++ b/choco9966/Llama-2-7b-instruct-tuning/result_2023-10-19 08:44:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2960750853242321, + "acc_stderr": 0.013340916085246268, + "acc_norm": 0.33361774744027306, + "acc_norm_stderr": 0.013778687054176534 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34534953196574386, + "acc_stderr": 0.0047451035439012934, + "acc_norm": 0.4252141007767377, + "acc_norm_stderr": 0.004933650697000603 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370608, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370608 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.34951456310679613, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.34951456310679613, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3895274584929757, + "acc_stderr": 0.017438082556264594, + "acc_norm": 0.3895274584929757, + "acc_norm_stderr": 0.017438082556264594 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771124, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771124 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.03148955829745529, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.03148955829745529 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.03550920185689629, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.03550920185689629 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3890675241157556, + "acc_stderr": 0.027690337536485372, + "acc_norm": 0.3890675241157556, + "acc_norm_stderr": 0.027690337536485372 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3452914798206278, + "acc_stderr": 0.03191100192835794, + "acc_norm": 0.3452914798206278, + "acc_norm_stderr": 0.03191100192835794 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.040287315329475604, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.040287315329475604 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.33613445378151263, + "acc_stderr": 0.030684737115135377, + "acc_norm": 0.33613445378151263, + "acc_norm_stderr": 0.030684737115135377 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3128205128205128, + "acc_stderr": 0.023507579020645365, + "acc_norm": 0.3128205128205128, + "acc_norm_stderr": 0.023507579020645365 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.047128212574267705, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.047128212574267705 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358611, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358611 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3580645161290323, + "acc_stderr": 0.027273890594300642, + "acc_norm": 0.3580645161290323, + "acc_norm_stderr": 0.027273890594300642 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.032745319388423504, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.032745319388423504 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32075471698113206, + "acc_stderr": 0.028727502957880274, + "acc_norm": 0.32075471698113206, + "acc_norm_stderr": 0.028727502957880274 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.04653429807913508, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.04653429807913508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871916, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48258706467661694, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.48258706467661694, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.035149425512674394, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.035149425512674394 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.0236369759961018, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.0236369759961018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3439306358381503, + "acc_stderr": 0.025574123786546648, + "acc_norm": 0.3439306358381503, + "acc_norm_stderr": 0.025574123786546648 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.03731133519673893, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.03731133519673893 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38271604938271603, + "acc_stderr": 0.027044538138402616, + "acc_norm": 0.38271604938271603, + "acc_norm_stderr": 0.027044538138402616 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3394495412844037, + "acc_stderr": 0.02030210934266235, + "acc_norm": 0.3394495412844037, + "acc_norm_stderr": 0.02030210934266235 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.02799672318063145, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.02799672318063145 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.49586776859504134, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013315, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013315 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2957516339869281, + "acc_stderr": 0.018463154132632813, + "acc_norm": 0.2957516339869281, + "acc_norm_stderr": 0.018463154132632813 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460987, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460987 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.01444415780826145, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.01444415780826145 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.0279715413701706, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.0279715413701706 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.031052391937584356, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.031052391937584356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2627118644067797, + "acc_stderr": 0.011240545514995669, + "acc_norm": 0.2627118644067797, + "acc_norm_stderr": 0.011240545514995669 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.032702871814820816, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.032702871814820816 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.03793713171165634, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.03793713171165634 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3329253365973072, + "mc1_stderr": 0.016497402382012055, + "mc2": 0.5140993490896929, + "mc2_stderr": 0.016082660027674764 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.25737898465171194, + "acc_stderr": 0.015030899730346749, + "acc_norm": 0.29043683589138136, + "acc_norm_stderr": 0.015607602569814626 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "choco9966/Llama-2-7b-instruct-tuning", + "model_sha": "0914768714fca5e74eef736b357d9f82ccc9e089", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cockroach54/opensolar-qlora-dpo/result_2024-04-08 12:14:13.json b/cockroach54/opensolar-qlora-dpo/result_2024-04-08 12:14:13.json new file mode 100644 index 0000000000000000000000000000000000000000..ce8eb875a9f9c79a948c5ab65acb59204384a91f --- /dev/null +++ b/cockroach54/opensolar-qlora-dpo/result_2024-04-08 12:14:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43856655290102387, + "acc_stderr": 0.014500682618212864, + "acc_norm": 0.5093856655290102, + "acc_norm_stderr": 0.014608816322065003 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42949611631149176, + "acc_stderr": 0.0049399259587288745, + "acc_norm": 0.592212706632145, + "acc_norm_stderr": 0.0049041892578912715 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280042, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280042 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6717752234993615, + "acc_stderr": 0.01679168564019289, + "acc_norm": 0.6717752234993615, + "acc_norm_stderr": 0.01679168564019289 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.39, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.39, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033582, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033582 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5916398713826366, + "acc_stderr": 0.02791705074848462, + "acc_norm": 0.5916398713826366, + "acc_norm_stderr": 0.02791705074848462 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.03318833286217281, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.03318833286217281 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6818181818181818, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.6818181818181818, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.02532399086173626, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.02532399086173626 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.028040981380761536, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.028040981380761536 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392926, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392926 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.02938162072646508, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.02938162072646508 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.02441923496681906, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.02441923496681906 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5493827160493827, + "acc_stderr": 0.027684721415656196, + "acc_norm": 0.5493827160493827, + "acc_norm_stderr": 0.027684721415656196 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6476683937823834, + "acc_stderr": 0.03447478286414357, + "acc_norm": 0.6476683937823834, + "acc_norm_stderr": 0.03447478286414357 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5981651376146789, + "acc_stderr": 0.02102010617299701, + "acc_norm": 0.5981651376146789, + "acc_norm_stderr": 0.02102010617299701 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138293, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.020130388312904524, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.020130388312904524 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281274, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281274 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.033851779760448106, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.033851779760448106 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.01421957078810399, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.01421957078810399 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.029624663581159685, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.029624663581159685 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5959183673469388, + "acc_stderr": 0.03141470802586588, + "acc_norm": 0.5959183673469388, + "acc_norm_stderr": 0.03141470802586588 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.679324894514768, + "acc_stderr": 0.03038193194999041, + "acc_norm": 0.679324894514768, + "acc_norm_stderr": 0.03038193194999041 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3513689700130378, + "acc_stderr": 0.012192969457484024, + "acc_norm": 0.3513689700130378, + "acc_norm_stderr": 0.012192969457484024 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6078431372549019, + "acc_stderr": 0.03426712349247272, + "acc_norm": 0.6078431372549019, + "acc_norm_stderr": 0.03426712349247272 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.038254602783800266, + "acc_norm": 0.6, + "acc_norm_stderr": 0.038254602783800266 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.01574402724825605, + "mc2": 0.44128612257799116, + "mc2_stderr": 0.01538358607364249 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5029515938606848, + "acc_stderr": 0.017190054580194694, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cockroach54/opensolar-qlora-dpo", + "model_sha": "3f91d6345924d65810a117dd1509affd2940bcb2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cockroach54/solar-sft-qlora/result_2024-04-08 01:24:27.json b/cockroach54/solar-sft-qlora/result_2024-04-08 01:24:27.json new file mode 100644 index 0000000000000000000000000000000000000000..45395313a66455e33cb751993b218532694f7433 --- /dev/null +++ b/cockroach54/solar-sft-qlora/result_2024-04-08 01:24:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938156, + "acc_norm": 0.45307167235494883, + "acc_norm_stderr": 0.014546892052005628 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4058952399920335, + "acc_stderr": 0.004900608529778612, + "acc_norm": 0.5507866958773153, + "acc_norm_stderr": 0.0049639745040030245 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5964912280701754, + "acc_stderr": 0.037627386999170565, + "acc_norm": 0.5964912280701754, + "acc_norm_stderr": 0.037627386999170565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6347381864623244, + "acc_stderr": 0.017218530028838643, + "acc_norm": 0.6347381864623244, + "acc_norm_stderr": 0.017218530028838643 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.0389136449583582, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.0389136449583582 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.027604689028581986, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.027604689028581986 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.043171711948702556, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.043171711948702556 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6919191919191919, + "acc_stderr": 0.03289477330098615, + "acc_norm": 0.6919191919191919, + "acc_norm_stderr": 0.03289477330098615 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062947, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062947 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6008403361344538, + "acc_stderr": 0.03181110032413925, + "acc_norm": 0.6008403361344538, + "acc_norm_stderr": 0.03181110032413925 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.02535100632816969, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02535100632816969 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356462, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356462 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5967741935483871, + "acc_stderr": 0.027906150826041146, + "acc_norm": 0.5967741935483871, + "acc_norm_stderr": 0.027906150826041146 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.02624677294689048, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.02624677294689048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6865671641791045, + "acc_stderr": 0.03280188205348642, + "acc_norm": 0.6865671641791045, + "acc_norm_stderr": 0.03280188205348642 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.03807301726504511, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.03807301726504511 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.02507598176760168, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.02507598176760168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5982658959537572, + "acc_stderr": 0.02639410417764363, + "acc_norm": 0.5982658959537572, + "acc_norm_stderr": 0.02639410417764363 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456608, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456608 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.02733954664066274, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.02733954664066274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6683937823834197, + "acc_stderr": 0.03397636541089118, + "acc_norm": 0.6683937823834197, + "acc_norm_stderr": 0.03397636541089118 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4824561403508772, + "acc_stderr": 0.04700708033551038, + "acc_norm": 0.4824561403508772, + "acc_norm_stderr": 0.04700708033551038 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.671559633027523, + "acc_stderr": 0.020135902797298395, + "acc_norm": 0.671559633027523, + "acc_norm_stderr": 0.020135902797298395 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.02849199358617156, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.02849199358617156 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4820261437908497, + "acc_stderr": 0.020214761037872408, + "acc_norm": 0.4820261437908497, + "acc_norm_stderr": 0.020214761037872408 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596147, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.18212290502793296, + "acc_stderr": 0.012907958130579973, + "acc_norm": 0.18212290502793296, + "acc_norm_stderr": 0.012907958130579973 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556166, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556166 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.030486039389105307, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.030486039389105307 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38852672750977835, + "acc_stderr": 0.012448817838292365, + "acc_norm": 0.38852672750977835, + "acc_norm_stderr": 0.012448817838292365 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.038154943086889305, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.038154943086889305 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.44477229481563696, + "mc2_stderr": 0.015541175732167036 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4781582054309327, + "acc_stderr": 0.01717394447429438, + "acc_norm": 0.51357733175915, + "acc_norm_stderr": 0.017184015060401455 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cockroach54/solar-sft-qlora", + "model_sha": "2b783cbb6a5fe3b5a0274872835ef1de55619eb8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cocoirun/AIFT-42dot-PLM-1.3B-ao-instruct-all-v0.4-ff-e1/result_2024-01-19 01:40:38.json b/cocoirun/AIFT-42dot-PLM-1.3B-ao-instruct-all-v0.4-ff-e1/result_2024-01-19 01:40:38.json new file mode 100644 index 0000000000000000000000000000000000000000..e3d138e39093da6e58797b8403c43cb61822b504 --- /dev/null +++ b/cocoirun/AIFT-42dot-PLM-1.3B-ao-instruct-all-v0.4-ff-e1/result_2024-01-19 01:40:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27986348122866894, + "acc_stderr": 0.013119040897725922, + "acc_norm": 0.33276450511945393, + "acc_norm_stderr": 0.013769863046192307 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35391356303525195, + "acc_stderr": 0.0047720549044044346, + "acc_norm": 0.45130452101175067, + "acc_norm_stderr": 0.004966060995315058 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245231, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245231 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1650485436893204, + "acc_stderr": 0.036756688322331886, + "acc_norm": 0.1650485436893204, + "acc_norm_stderr": 0.036756688322331886 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23627075351213284, + "acc_stderr": 0.015190473717037488, + "acc_norm": 0.23627075351213284, + "acc_norm_stderr": 0.015190473717037488 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.03502553170678316, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.03502553170678316 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.029896145682095462, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.029896145682095462 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.19292604501607716, + "acc_stderr": 0.022411516780911366, + "acc_norm": 0.19292604501607716, + "acc_norm_stderr": 0.022411516780911366 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.28699551569506726, + "acc_stderr": 0.030360379710291964, + "acc_norm": 0.28699551569506726, + "acc_norm_stderr": 0.030360379710291964 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20202020202020202, + "acc_stderr": 0.028606204289229865, + "acc_norm": 0.20202020202020202, + "acc_norm_stderr": 0.028606204289229865 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.21379310344827587, + "acc_stderr": 0.034165204477475494, + "acc_norm": 0.21379310344827587, + "acc_norm_stderr": 0.034165204477475494 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.040233822736177476, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.040233822736177476 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868963, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868963 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2641025641025641, + "acc_stderr": 0.022352193737453268, + "acc_norm": 0.2641025641025641, + "acc_norm_stderr": 0.022352193737453268 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23548387096774193, + "acc_stderr": 0.024137632429337717, + "acc_norm": 0.23548387096774193, + "acc_norm_stderr": 0.024137632429337717 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.02619980880756194, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.02619980880756194 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02784081149587193, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02784081149587193 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719197, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719197 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.03036049015401464, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.03036049015401464 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1907514450867052, + "acc_stderr": 0.029957851329869337, + "acc_norm": 0.1907514450867052, + "acc_norm_stderr": 0.029957851329869337 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643895, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643895 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.25722543352601157, + "acc_stderr": 0.023532925431044283, + "acc_norm": 0.25722543352601157, + "acc_norm_stderr": 0.023532925431044283 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292404, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292404 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24382716049382716, + "acc_stderr": 0.023891879541959607, + "acc_norm": 0.24382716049382716, + "acc_norm_stderr": 0.023891879541959607 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29533678756476683, + "acc_stderr": 0.03292296639155141, + "acc_norm": 0.29533678756476683, + "acc_norm_stderr": 0.03292296639155141 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21284403669724772, + "acc_stderr": 0.017549376389313694, + "acc_norm": 0.21284403669724772, + "acc_norm_stderr": 0.017549376389313694 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.02656892101545716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.02656892101545716 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952925, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952925 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.017630827375148383, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.017630827375148383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.026244920349843007, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.026244920349843007 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.032847388576472056, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.032847388576472056 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.02981263070156974, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.02981263070156974 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.02866685779027465, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.02866685779027465 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24472573839662448, + "acc_stderr": 0.027985699387036413, + "acc_norm": 0.24472573839662448, + "acc_norm_stderr": 0.027985699387036413 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26597131681877445, + "acc_stderr": 0.011285033165551277, + "acc_norm": 0.26597131681877445, + "acc_norm_stderr": 0.011285033165551277 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604236, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476189, + "mc2": 0.4108140892688842, + "mc2_stderr": 0.014764599067980493 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2585596221959858, + "acc_stderr": 0.015053354438963988, + "acc_norm": 0.36835891381345925, + "acc_norm_stderr": 0.016583858982639074 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cocoirun/AIFT-42dot-PLM-1.3B-ao-instruct-all-v0.4-ff-e1", + "model_sha": "653712a5f567834ab62d9e6195665278fe9cd38b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cocoirun/AIFT-ko-orca-plat-Yi-ko-6b-v1.0/result_2024-01-19 12:49:59.json b/cocoirun/AIFT-ko-orca-plat-Yi-ko-6b-v1.0/result_2024-01-19 12:49:59.json new file mode 100644 index 0000000000000000000000000000000000000000..82d521832f4bca171513fd0ddb9c95472fdb2c88 --- /dev/null +++ b/cocoirun/AIFT-ko-orca-plat-Yi-ko-6b-v1.0/result_2024-01-19 12:49:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3515358361774744, + "acc_stderr": 0.013952413699600938, + "acc_norm": 0.40017064846416384, + "acc_norm_stderr": 0.014317197787809181 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3942441744672376, + "acc_stderr": 0.004876889983110831, + "acc_norm": 0.5246962756423024, + "acc_norm_stderr": 0.004983691099110912 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5504469987228607, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.5504469987228607, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840688, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840688 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262971, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262971 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.0348890161685273, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.0348890161685273 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.025088301454694834, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.025088301454694834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.028100964724272638, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.028100964724272638 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.029872577708891186, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.029872577708891186 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02351729433596328, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02351729433596328 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.026830805998952243, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.026830805998952243 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5614678899082569, + "acc_stderr": 0.021274713073954565, + "acc_norm": 0.5614678899082569, + "acc_norm_stderr": 0.021274713073954565 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604674, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604674 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.02795604616542452, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.02795604616542452 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.040260970832965585, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.040260970832965585 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.019977422600227467, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.019977422600227467 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.02755336616510137, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.02755336616510137 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.03070137211151092, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.03070137211151092 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2681564245810056, + "acc_stderr": 0.014816119635317006, + "acc_norm": 0.2681564245810056, + "acc_norm_stderr": 0.014816119635317006 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280058, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280058 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.33877551020408164, + "acc_stderr": 0.030299506562154185, + "acc_norm": 0.33877551020408164, + "acc_norm_stderr": 0.030299506562154185 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3226857887874837, + "acc_stderr": 0.011940264193195976, + "acc_norm": 0.3226857887874837, + "acc_norm_stderr": 0.011940264193195976 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875833, + "mc2": 0.4106463822065034, + "mc2_stderr": 0.015003599231002732 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5548996458087367, + "acc_stderr": 0.01708641743100547, + "acc_norm": 0.602125147579693, + "acc_norm_stderr": 0.01682795905473339 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cocoirun/AIFT-ko-orca-plat-Yi-ko-6b-v1.0", + "model_sha": "6d7195352afe946a4c349d4aa3dd407bc801bb09", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cocoirun/Yi-Ko-6B-instruct-v1.0/result_2024-01-08 05:52:21.json b/cocoirun/Yi-Ko-6B-instruct-v1.0/result_2024-01-08 05:52:21.json new file mode 100644 index 0000000000000000000000000000000000000000..be4447cd7ed05e22df56191acbf5b5fc17df0bd3 --- /dev/null +++ b/cocoirun/Yi-Ko-6B-instruct-v1.0/result_2024-01-08 05:52:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.318259385665529, + "acc_stderr": 0.013611993916971451, + "acc_norm": 0.3890784982935154, + "acc_norm_stderr": 0.014247309976045607 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3966341366261701, + "acc_stderr": 0.004881990487628916, + "acc_norm": 0.5318661621190998, + "acc_norm_stderr": 0.004979637330230312 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5772669220945083, + "acc_stderr": 0.01766518035195406, + "acc_norm": 0.5772669220945083, + "acc_norm_stderr": 0.01766518035195406 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6414141414141414, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.6414141414141414, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.02533466708095495, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.02533466708095495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.028434533152681848, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.028434533152681848 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.027778835904935427, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.027778835904935427 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815632, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815632 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.0343751933733825, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.0343751933733825 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149135, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149135 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.041014055198424264, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.041014055198424264 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.0278074900442762, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.0278074900442762 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6217616580310881, + "acc_stderr": 0.034998072761933376, + "acc_norm": 0.6217616580310881, + "acc_norm_stderr": 0.034998072761933376 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579859, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579859 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.634862385321101, + "acc_stderr": 0.020642801454384005, + "acc_norm": 0.634862385321101, + "acc_norm_stderr": 0.020642801454384005 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883034, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883034 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.019898412717635892, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.019898412717635892 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3396648044692737, + "acc_stderr": 0.01583940040621248, + "acc_norm": 0.3396648044692737, + "acc_norm_stderr": 0.01583940040621248 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.02993534270787775, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.02993534270787775 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301833, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301833 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3246414602346806, + "acc_stderr": 0.011959089388530025, + "acc_norm": 0.3246414602346806, + "acc_norm_stderr": 0.011959089388530025 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5637254901960784, + "acc_stderr": 0.034806931384570396, + "acc_norm": 0.5637254901960784, + "acc_norm_stderr": 0.034806931384570396 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33659730722154224, + "mc1_stderr": 0.01654241280949487, + "mc2": 0.46075563077342907, + "mc2_stderr": 0.015158476853516014 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.01718506973267653 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cocoirun/Yi-Ko-6B-instruct-v1.0", + "model_sha": "242d35ae3f63ff0d9721d078947d203b902d24ba", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cocoirun/Yi-Ko-6B-instruct-v1.2/result_2024-01-08 06:38:38.json b/cocoirun/Yi-Ko-6B-instruct-v1.2/result_2024-01-08 06:38:38.json new file mode 100644 index 0000000000000000000000000000000000000000..71c8f06646e12259f2c95d7ecf3f498a12c790c9 --- /dev/null +++ b/cocoirun/Yi-Ko-6B-instruct-v1.2/result_2024-01-08 06:38:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3506825938566553, + "acc_stderr": 0.013944635930726094, + "acc_norm": 0.39590443686006827, + "acc_norm_stderr": 0.014291228393536585 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3952399920334595, + "acc_stderr": 0.004879030010598925, + "acc_norm": 0.5301732722565226, + "acc_norm_stderr": 0.004980687467486101 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.0381107966983353, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.0381107966983353 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5632183908045977, + "acc_stderr": 0.01773647083780069, + "acc_norm": 0.5632183908045977, + "acc_norm_stderr": 0.01773647083780069 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288088, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288088 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03427308652999936, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03427308652999936 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.02530295889085015, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.02530295889085015 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5064516129032258, + "acc_stderr": 0.028441638233540505, + "acc_norm": 0.5064516129032258, + "acc_norm_stderr": 0.028441638233540505 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205608, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752052, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752052 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651283, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651283 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.02681771813034892, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.02681771813034892 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.03922378290610988, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.03922378290610988 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.027807490044276215, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.027807490044276215 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6373056994818653, + "acc_stderr": 0.03469713791704372, + "acc_norm": 0.6373056994818653, + "acc_norm_stderr": 0.03469713791704372 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6238532110091743, + "acc_stderr": 0.020769231968205078, + "acc_norm": 0.6238532110091743, + "acc_norm_stderr": 0.020769231968205078 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.020071257886886525, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.020071257886886525 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759422, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759422 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468638, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468638 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933112, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933112 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.03121956944530184, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.03121956944530184 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.01210681720306721, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.01210681720306721 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.34394124847001223, + "mc1_stderr": 0.016629087514276764, + "mc2": 0.4751707526182113, + "mc2_stderr": 0.015103772326605895 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4911452184179457, + "acc_stderr": 0.017187658199336736, + "acc_norm": 0.5147579693034239, + "acc_norm_stderr": 0.01718286443499856 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cocoirun/Yi-Ko-6B-instruct-v1.2", + "model_sha": "9509e3c5ea8bbe0911a7a90024c56dc5fced0d77", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cocoirun/Yi-Ko-6B-instruct-v1.3/result_2024-01-08 09:55:04.json b/cocoirun/Yi-Ko-6B-instruct-v1.3/result_2024-01-08 09:55:04.json new file mode 100644 index 0000000000000000000000000000000000000000..e5de00b208da7f8a0868c4bca2aecf7a2b0ad528 --- /dev/null +++ b/cocoirun/Yi-Ko-6B-instruct-v1.3/result_2024-01-08 09:55:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3438566552901024, + "acc_stderr": 0.013880644570156211, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.01425295984889289 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3963353913563035, + "acc_stderr": 0.004881359589148994, + "acc_norm": 0.5311690898227445, + "acc_norm_stderr": 0.00498007670739244 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5632183908045977, + "acc_stderr": 0.01773647083780069, + "acc_norm": 0.5632183908045977, + "acc_norm_stderr": 0.01773647083780069 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04316378599511326, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04316378599511326 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101736, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101736 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288088, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288088 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.02535100632816969, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02535100632816969 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5032258064516129, + "acc_stderr": 0.02844341422643831, + "acc_norm": 0.5032258064516129, + "acc_norm_stderr": 0.02844341422643831 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.029872577708891186, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.029872577708891186 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524586, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524586 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.025197101074246494, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.025197101074246494 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670788, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670788 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.026720034380514998, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.026720034380514998 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833925, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833925 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.03555300319557669, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.03555300319557669 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6477064220183486, + "acc_stderr": 0.020480568843999, + "acc_norm": 0.6477064220183486, + "acc_norm_stderr": 0.020480568843999 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.02843109544417665, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.02843109544417665 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.01986115519382916, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.01986115519382916 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.02878222756134724, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.02878222756134724 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100998, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100998 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103984, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103984 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235922, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235922 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811226, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811226 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.01206708307945223, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.01206708307945223 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.035091433756067866, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.035091433756067866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.038783721137112745, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.038783721137112745 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.34761321909424725, + "mc1_stderr": 0.016670769188897306, + "mc2": 0.4737193731919921, + "mc2_stderr": 0.015111963290355194 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4734356552538371, + "acc_stderr": 0.017166075717577747, + "acc_norm": 0.5029515938606848, + "acc_norm_stderr": 0.017190054580194694 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cocoirun/Yi-Ko-6B-instruct-v1.3", + "model_sha": "bcedeb9a4d4d9d1e1b9db519ee46bf072f3d160b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cocoirun/Yi-Ko-6B-instruct-v1.4/result_2024-01-08 11:09:22.json b/cocoirun/Yi-Ko-6B-instruct-v1.4/result_2024-01-08 11:09:22.json new file mode 100644 index 0000000000000000000000000000000000000000..a881bb251283aab7a04e7be29e8b1c1c1c8e81a7 --- /dev/null +++ b/cocoirun/Yi-Ko-6B-instruct-v1.4/result_2024-01-08 11:09:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32593856655290104, + "acc_stderr": 0.013697432466693244, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349819 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3980282812188807, + "acc_stderr": 0.004884909544477099, + "acc_norm": 0.5307707627962557, + "acc_norm_stderr": 0.0049803234000310795 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5644955300127714, + "acc_stderr": 0.017730589927926588, + "acc_norm": 0.5644955300127714, + "acc_norm_stderr": 0.017730589927926588 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789958, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789958 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758396, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758396 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173095, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173095 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114982, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114982 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.024180497164376896, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.024180497164376896 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.039247468767511305, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.039247468767511305 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.035415085788840193, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.035415085788840193 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6036697247706422, + "acc_stderr": 0.020971469947900525, + "acc_norm": 0.6036697247706422, + "acc_norm_stderr": 0.020971469947900525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874141, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874141 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43300653594771243, + "acc_stderr": 0.020045442473324227, + "acc_norm": 0.43300653594771243, + "acc_norm_stderr": 0.020045442473324227 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611324, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611324 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331165, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331165 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312547, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312547 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.012134433741002572, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.012134433741002572 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03888176921674101, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03888176921674101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3671970624235006, + "mc1_stderr": 0.016874805001453184, + "mc2": 0.5050750293974411, + "mc2_stderr": 0.015319101173412812 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.017188904359077318, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.017175671279836442 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cocoirun/Yi-Ko-6B-instruct-v1.4", + "model_sha": "872265b708df00f163503e29959209aa2c017057", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cocoirun/Yi-Ko-6B-instruct-v1.5-DPO/result_2024-01-16 04:55:55.json b/cocoirun/Yi-Ko-6B-instruct-v1.5-DPO/result_2024-01-16 04:55:55.json new file mode 100644 index 0000000000000000000000000000000000000000..5d48fa1a4283c2d497234b5b044a4faa52e2bd3a --- /dev/null +++ b/cocoirun/Yi-Ko-6B-instruct-v1.5-DPO/result_2024-01-16 04:55:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3319112627986348, + "acc_stderr": 0.01376098820088054, + "acc_norm": 0.39334470989761094, + "acc_norm_stderr": 0.014275101465693026 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3981278629755029, + "acc_stderr": 0.004885116465550273, + "acc_norm": 0.5326628161720772, + "acc_norm_stderr": 0.004979123236507972 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.017779225233394216, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.017779225233394216 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016338, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016338 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.0323854694875898, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.0323854694875898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.02533466708095496, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.02533466708095496 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.030351527323344948, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.030351527323344948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066475, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066475 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149135, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149135 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04155319955593147, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04155319955593147 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327228, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327228 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.035260770955482405, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.035260770955482405 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6036697247706422, + "acc_stderr": 0.020971469947900525, + "acc_norm": 0.6036697247706422, + "acc_norm_stderr": 0.020971469947900525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.028452639985088013, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.028452639985088013 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.019835176484375387, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.019835176484375387 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.028838921471251455, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.028838921471251455 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.02976826352893311, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.02976826352893311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.031591887529658504, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.031591887529658504 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.01213443374100257, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.01213443374100257 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35495716034271724, + "mc1_stderr": 0.0167508623813759, + "mc2": 0.48369975490627737, + "mc2_stderr": 0.015235836680653266 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45690672963400236, + "acc_stderr": 0.01712638909308678, + "acc_norm": 0.47461629279811096, + "acc_norm_stderr": 0.017168187201429253 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cocoirun/Yi-Ko-6B-instruct-v1.5-DPO", + "model_sha": "e80cfdbd631da5fa1fcd7efb3b7070c20d30abf3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cocoirun/Yi-Ko-6B-instruct-v1.5/result_2024-01-10 05:25:51.json b/cocoirun/Yi-Ko-6B-instruct-v1.5/result_2024-01-10 05:25:51.json new file mode 100644 index 0000000000000000000000000000000000000000..7482e9fd45c874bc3e2555f2b8a3f458243c2edb --- /dev/null +++ b/cocoirun/Yi-Ko-6B-instruct-v1.5/result_2024-01-10 05:25:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3302047781569966, + "acc_stderr": 0.013743085603760427, + "acc_norm": 0.3916382252559727, + "acc_norm_stderr": 0.014264122124938213 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39822744473212507, + "acc_stderr": 0.004885323175701675, + "acc_norm": 0.533061143198566, + "acc_norm_stderr": 0.004978861409119807 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.017769250583533253, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.017769250583533253 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101737, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101737 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.025339003010106522, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.025339003010106522 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4967741935483871, + "acc_stderr": 0.02844341422643833, + "acc_norm": 0.4967741935483871, + "acc_norm_stderr": 0.02844341422643833 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.030351527323344948, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.030351527323344948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075657, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075657 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307706, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307706 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04155319955593147, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04155319955593147 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.035260770955482405, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.035260770955482405 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6036697247706422, + "acc_stderr": 0.020971469947900525, + "acc_norm": 0.6036697247706422, + "acc_norm_stderr": 0.020971469947900525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.028452639985088013, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.028452639985088013 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401154, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401154 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.028838921471251455, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.028838921471251455 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.02972215209928006, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.02972215209928006 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.031591887529658504, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.031591887529658504 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34485006518904826, + "acc_stderr": 0.012139881006287066, + "acc_norm": 0.34485006518904826, + "acc_norm_stderr": 0.012139881006287066 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3561811505507956, + "mc1_stderr": 0.01676379072844634, + "mc2": 0.48361425036890654, + "mc2_stderr": 0.015235270316792369 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45690672963400236, + "acc_stderr": 0.01712638909308678, + "acc_norm": 0.4734356552538371, + "acc_norm_stderr": 0.01716607571757774 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cocoirun/Yi-Ko-6B-instruct-v1.5", + "model_sha": "9b54b9bb88aa9f4a9e2a0a45870f7e6f2b087de6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cocoirun/Yi-Ko-6B-instruct-v1.6/result_2024-01-18 06:54:36.json b/cocoirun/Yi-Ko-6B-instruct-v1.6/result_2024-01-18 06:54:36.json new file mode 100644 index 0000000000000000000000000000000000000000..9ec306f02df1866e6e83c404989077e6f7a10c47 --- /dev/null +++ b/cocoirun/Yi-Ko-6B-instruct-v1.6/result_2024-01-18 06:54:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3250853242320819, + "acc_stderr": 0.013688147309729119, + "acc_norm": 0.3890784982935154, + "acc_norm_stderr": 0.014247309976045607 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3957379008165704, + "acc_stderr": 0.004880092083408035, + "acc_norm": 0.5307707627962557, + "acc_norm_stderr": 0.0049803234000310795 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5887611749680716, + "acc_stderr": 0.01759597190805657, + "acc_norm": 0.5887611749680716, + "acc_norm_stderr": 0.01759597190805657 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.028333277109562807, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.028333277109562807 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016338, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016338 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.025339003010106522, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.025339003010106522 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5193548387096775, + "acc_stderr": 0.02842268740431211, + "acc_norm": 0.5193548387096775, + "acc_norm_stderr": 0.02842268740431211 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075657, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075657 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.02467786284133278, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.02467786284133278 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756656, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756656 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.03526077095548241, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.03526077095548241 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6018348623853211, + "acc_stderr": 0.020987989422654257, + "acc_norm": 0.6018348623853211, + "acc_norm_stderr": 0.020987989422654257 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536671, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536671 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.020017629214213094, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.020017629214213094 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611324, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611324 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280065, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280065 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.0314147080258659, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.0314147080258659 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.03121956944530185, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.03121956944530185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34876140808344197, + "acc_stderr": 0.012172035157127116, + "acc_norm": 0.34876140808344197, + "acc_norm_stderr": 0.012172035157127116 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3463892288861689, + "mc1_stderr": 0.01665699710912515, + "mc2": 0.4774130164055824, + "mc2_stderr": 0.015150702377730184 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4970484061393152, + "acc_stderr": 0.017190054580194694, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.017175671279836446 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cocoirun/Yi-Ko-6B-instruct-v1.6", + "model_sha": "26dea364cd3d14e9ba92296fc8d4f5ae1ebca559", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cocoirun/Yi-Ko-6B-instruct-v1.7/result_2024-01-18 08:12:38.json b/cocoirun/Yi-Ko-6B-instruct-v1.7/result_2024-01-18 08:12:38.json new file mode 100644 index 0000000000000000000000000000000000000000..13ed195d1af493ceb0b1f5db187e4d066d8bdc7a --- /dev/null +++ b/cocoirun/Yi-Ko-6B-instruct-v1.7/result_2024-01-18 08:12:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3293515358361775, + "acc_stderr": 0.013734057652635474, + "acc_norm": 0.37627986348122866, + "acc_norm_stderr": 0.014157022555407168 + }, + "harness|ko_hellaswag|10": { + "acc": 0.395538737303326, + "acc_stderr": 0.004879667889198493, + "acc_norm": 0.5268870742879904, + "acc_norm_stderr": 0.004982561815214123 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.565772669220945, + "acc_stderr": 0.01772458938967779, + "acc_norm": 0.565772669220945, + "acc_norm_stderr": 0.01772458938967779 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04316378599511326, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04316378599511326 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.040573247344190336, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.040573247344190336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126174, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126174 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.028422687404312107, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.028422687404312107 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.029872577708891172, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.029872577708891172 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652459, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652459 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02391998416404773, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02391998416404773 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.035260770955482405, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.035260770955482405 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5926605504587156, + "acc_stderr": 0.021065986244412877, + "acc_norm": 0.5926605504587156, + "acc_norm_stderr": 0.021065986244412877 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.039701582732351734, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.039701582732351734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.019898412717635892, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.019898412717635892 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650154, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650154 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.043642261558410445, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.043642261558410445 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21899441340782122, + "acc_stderr": 0.013831676687303188, + "acc_norm": 0.21899441340782122, + "acc_norm_stderr": 0.013831676687303188 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933105, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33702737940026073, + "acc_stderr": 0.012072836273691327, + "acc_norm": 0.33702737940026073, + "acc_norm_stderr": 0.012072836273691327 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.03495624522015476, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.03495624522015476 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.37576499388004897, + "mc1_stderr": 0.016954584060214307, + "mc2": 0.517667546183503, + "mc2_stderr": 0.015393604581783406 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5088547815820543, + "acc_stderr": 0.01718765819933674, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.017175671279836446 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cocoirun/Yi-Ko-6B-instruct-v1.7", + "model_sha": "136fb3e96172cdaf1505bc6e405cf93d2105ef86", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cocoirun/Yi-Ko-6B-instruct-v2.0/result_2024-01-18 09:36:51.json b/cocoirun/Yi-Ko-6B-instruct-v2.0/result_2024-01-18 09:36:51.json new file mode 100644 index 0000000000000000000000000000000000000000..9753366ce8487c86ceea37f4837d4f4b0e89c56a --- /dev/null +++ b/cocoirun/Yi-Ko-6B-instruct-v2.0/result_2024-01-18 09:36:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.013847460518892978, + "acc_norm": 0.3993174061433447, + "acc_norm_stderr": 0.014312094557946704 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3970324636526588, + "acc_stderr": 0.004882828727152303, + "acc_norm": 0.5301732722565226, + "acc_norm_stderr": 0.0049806874674860994 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5670498084291188, + "acc_stderr": 0.017718469101513982, + "acc_norm": 0.5670498084291188, + "acc_norm_stderr": 0.017718469101513982 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03427308652999935, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03427308652999935 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.025339003010106522, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.025339003010106522 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.033959703819985754, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.033959703819985754 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707874, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707874 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948485, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723368, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723368 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5154320987654321, + "acc_stderr": 0.0278074900442762, + "acc_norm": 0.5154320987654321, + "acc_norm_stderr": 0.0278074900442762 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6, + "acc_stderr": 0.021004201260420075, + "acc_norm": 0.6, + "acc_norm_stderr": 0.021004201260420075 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.02847293847803353, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.02847293847803353 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.020017629214213094, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.020017629214213094 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.02878222756134725, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.02878222756134725 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697623, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697623 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808848, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808848 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.031557828165561644, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.031557828165561644 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34810951760104303, + "acc_stderr": 0.012166738993698191, + "acc_norm": 0.34810951760104303, + "acc_norm_stderr": 0.012166738993698191 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070265, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070265 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3561811505507956, + "mc1_stderr": 0.01676379072844634, + "mc2": 0.49704759926076164, + "mc2_stderr": 0.015338831867878627 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5100354191263282, + "acc_stderr": 0.017186891286894056, + "acc_norm": 0.526564344746163, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cocoirun/Yi-Ko-6B-instruct-v2.0", + "model_sha": "2e59b99e978abf954ada547a9cefe40fa306a486", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/codellama/CodeLlama-34b-Instruct-hf/result_2023-11-20 14:38:19.json b/codellama/CodeLlama-34b-Instruct-hf/result_2023-11-20 14:38:19.json new file mode 100644 index 0000000000000000000000000000000000000000..55c430b4cdd6df9b1e1cf5f0bf50916f87641c22 --- /dev/null +++ b/codellama/CodeLlama-34b-Instruct-hf/result_2023-11-20 14:38:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3387372013651877, + "acc_stderr": 0.01383056892797433, + "acc_norm": 0.39334470989761094, + "acc_norm_stderr": 0.014275101465693028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3808006373232424, + "acc_stderr": 0.004845912857338656, + "acc_norm": 0.48974307906791475, + "acc_norm_stderr": 0.004988731406780661 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44061302681992337, + "acc_stderr": 0.017753396973908483, + "acc_norm": 0.44061302681992337, + "acc_norm_stderr": 0.017753396973908483 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.041716541613545426, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.041716541613545426 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069407, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069407 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.04304693795380665, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.04304693795380665 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006718, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006718 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.025069094387296542, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.025069094387296542 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.028040981380761554, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.028040981380761554 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.02925290592725198, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.02925290592725198 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596433, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596433 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47155963302752296, + "acc_stderr": 0.02140261569734805, + "acc_norm": 0.47155963302752296, + "acc_norm_stderr": 0.02140261569734805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.042639068927951336, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.042639068927951336 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.01952431674486635, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.01952431674486635 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2223463687150838, + "acc_stderr": 0.013907189208156881, + "acc_norm": 0.2223463687150838, + "acc_norm_stderr": 0.013907189208156881 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398864, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398864 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811224, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811224 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3455019556714472, + "acc_stderr": 0.012145303004087204, + "acc_norm": 0.3455019556714472, + "acc_norm_stderr": 0.012145303004087204 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386847, + "mc2": 0.47993655549180125, + "mc2_stderr": 0.015508244987757566 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4309327036599764, + "acc_stderr": 0.017025558196043136, + "acc_norm": 0.48642266824085006, + "acc_norm_stderr": 0.017184015060401448 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "codellama/CodeLlama-34b-Instruct-hf", + "model_sha": "bf5e5060fa30f33149efe84bbcc682001a00ab94", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cognitivecomputations/Llama-3-8B-Instruct-abliterated-v2/result_2024-05-28 05:34:49.json b/cognitivecomputations/Llama-3-8B-Instruct-abliterated-v2/result_2024-05-28 05:34:49.json new file mode 100644 index 0000000000000000000000000000000000000000..51d396f0274f38f04a71813d842e3d9aabb6dc96 --- /dev/null +++ b/cognitivecomputations/Llama-3-8B-Instruct-abliterated-v2/result_2024-05-28 05:34:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3643344709897611, + "acc_stderr": 0.014063260279882417, + "acc_norm": 0.43600682593856654, + "acc_norm_stderr": 0.01449122569923092 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35899223262298346, + "acc_stderr": 0.004787245377967103, + "acc_norm": 0.4714200358494324, + "acc_norm_stderr": 0.004981623292196191 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46360153256704983, + "acc_stderr": 0.01783252407959326, + "acc_norm": 0.46360153256704983, + "acc_norm_stderr": 0.01783252407959326 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.032662042990646775, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.032662042990646775 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828064, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828064 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.041443118108781526, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.041443118108781526 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319615, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.02533466708095489, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.02533466708095489 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5161290322580645, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.5161290322580645, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809445, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809445 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723456, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723456 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979034, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979034 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.026788811931562767, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.026788811931562767 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144809, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144809 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5944954128440367, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.5944954128440367, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4297385620915033, + "acc_stderr": 0.020027122784928575, + "acc_norm": 0.4297385620915033, + "acc_norm_stderr": 0.020027122784928575 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.02904919034254346, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.02904919034254346 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5, + "acc_stderr": 0.04745789978762494, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04745789978762494 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.014635185616527836, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.014635185616527836 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5959183673469388, + "acc_stderr": 0.03141470802586589, + "acc_norm": 0.5959183673469388, + "acc_norm_stderr": 0.03141470802586589 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.031376240725616185, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.031376240725616185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38070404172099087, + "acc_stderr": 0.012401430654645891, + "acc_norm": 0.38070404172099087, + "acc_norm_stderr": 0.012401430654645891 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03507793834791323, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03507793834791323 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.03713158067481913, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.03713158067481913 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326912, + "mc2": 0.4442952386830949, + "mc2_stderr": 0.015641982976796278 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4344746162927981, + "acc_stderr": 0.017042098620824935, + "acc_norm": 0.4651711924439197, + "acc_norm_stderr": 0.017148598015747422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cognitivecomputations/Llama-3-8B-Instruct-abliterated-v2", + "model_sha": "c99bd5b5e73f1d24504d1145b8a8dfd66c0a2378", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cognitivecomputations/WestLake-7B-v2-laser/result_2024-07-31 17:33:27.json b/cognitivecomputations/WestLake-7B-v2-laser/result_2024-07-31 17:33:27.json new file mode 100644 index 0000000000000000000000000000000000000000..ae811f5a2b4aa45bebe5f78cca9ae9d8d50fad12 --- /dev/null +++ b/cognitivecomputations/WestLake-7B-v2-laser/result_2024-07-31 17:33:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40273037542662116, + "acc_stderr": 0.01433223630679014, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3964349731129257, + "acc_stderr": 0.004881570100014375, + "acc_norm": 0.5231029675363473, + "acc_norm_stderr": 0.0049844520025639225 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.01783579880629064, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.01783579880629064 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.033408675019233246, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.033408675019233246 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5462184873949579, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.5462184873949579, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524572, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524572 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.035080801121998406, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.035080801121998406 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404948, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404948 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137285, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137285 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539277, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539277 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384486, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384486 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5174311926605505, + "acc_stderr": 0.021424291871853157, + "acc_norm": 0.5174311926605505, + "acc_norm_stderr": 0.021424291871853157 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.04463112720677171, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.04463112720677171 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.03977749934622074, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.03977749934622074 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.019706875804085634, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.019706875804085634 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281274, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281274 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.046840993210771065, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.046840993210771065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.03338473403207401, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.03338473403207401 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.014635185616527822, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.014635185616527822 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.7, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983576, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983576 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.01218777337074152, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.01218777337074152 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3843329253365973, + "mc1_stderr": 0.017028707301245196, + "mc2": 0.5495612298510391, + "mc2_stderr": 0.016301090987876694 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42621015348288077, + "acc_stderr": 0.01700212260948926, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.017019847535972205 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cognitivecomputations/WestLake-7B-v2-laser", + "model_sha": "0acaee8266dce7af1b34e8cefd1f2859d9944cd4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cognitivecomputations/dolphin-2.6-mixtral-8x7b/result_2024-06-13 20:41:02.json b/cognitivecomputations/dolphin-2.6-mixtral-8x7b/result_2024-06-13 20:41:02.json new file mode 100644 index 0000000000000000000000000000000000000000..fce1d4d12d8e15efb5b6f85e956db27de7bab16f --- /dev/null +++ b/cognitivecomputations/dolphin-2.6-mixtral-8x7b/result_2024-06-13 20:41:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27559726962457337, + "acc_stderr": 0.013057169655761838, + "acc_norm": 0.3225255972696246, + "acc_norm_stderr": 0.013659980894277366 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33190599482174865, + "acc_stderr": 0.004699350653695628, + "acc_norm": 0.42411870145389363, + "acc_norm_stderr": 0.004931984642695339 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.04944901092973781, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.04944901092973781 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45977011494252873, + "acc_stderr": 0.01782199409693353, + "acc_norm": 0.45977011494252873, + "acc_norm_stderr": 0.01782199409693353 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236785, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236785 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840674, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840674 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459157, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459157 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814592, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814592 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.032422250271150074, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.032422250271150074 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.033864057460620905, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.033864057460620905 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.02807158890109185, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.02807158890109185 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674074, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674074 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286102, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286102 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.0370385119309952, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.0370385119309952 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.025305906241590632, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.025305906241590632 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366595, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366595 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45504587155963305, + "acc_stderr": 0.02135050309092516, + "acc_norm": 0.45504587155963305, + "acc_norm_stderr": 0.02135050309092516 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.02855582751652879, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.02855582751652879 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.01961085147488028, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.01961085147488028 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611324, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611324 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486101, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486101 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.01465578083749772, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.01465578083749772 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.027146271936625166, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.027146271936625166 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.031680911612338825, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.031680911612338825 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.0121981406053536, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.0121981406053536 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.034956245220154725, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.034956245220154725 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589667, + "mc2": 0.49107879159936213, + "mc2_stderr": 0.016621609182765725 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2857142857142857, + "acc_stderr": 0.015531620786986724, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.016455496000314523 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cognitivecomputations/dolphin-2.6-mixtral-8x7b", + "model_sha": "db797db3f23f58b79f9815222b83c1a6f7bb3f25", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cognitivecomputations/dolphin-2.7-mixtral-8x7b/result_2024-05-25 06:10:24.json b/cognitivecomputations/dolphin-2.7-mixtral-8x7b/result_2024-05-25 06:10:24.json new file mode 100644 index 0000000000000000000000000000000000000000..f8de4495852584ecf35c41a03931c3c84492326c --- /dev/null +++ b/cognitivecomputations/dolphin-2.7-mixtral-8x7b/result_2024-05-25 06:10:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25170648464163825, + "acc_stderr": 0.012682496334042961, + "acc_norm": 0.3165529010238908, + "acc_norm_stderr": 0.01359243151906808 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3185620394343756, + "acc_stderr": 0.004649665273890649, + "acc_norm": 0.39255128460466043, + "acc_norm_stderr": 0.00487320326936631 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3959131545338442, + "acc_stderr": 0.017488247006979273, + "acc_norm": 0.3959131545338442, + "acc_norm_stderr": 0.017488247006979273 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.032025630761017346, + "acc_norm": 0.4, + "acc_norm_stderr": 0.032025630761017346 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.40404040404040403, + "acc_stderr": 0.03496130972056127, + "acc_norm": 0.40404040404040403, + "acc_norm_stderr": 0.03496130972056127 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.031918633744784645, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.031918633744784645 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3717948717948718, + "acc_stderr": 0.024503472557110943, + "acc_norm": 0.3717948717948718, + "acc_norm_stderr": 0.024503472557110943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.033864057460620905, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.033864057460620905 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34516129032258064, + "acc_stderr": 0.027045746573534327, + "acc_norm": 0.34516129032258064, + "acc_norm_stderr": 0.027045746573534327 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768817, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768817 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.030052580579557838, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.030052580579557838 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253252, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253252 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.03522865864099598, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.03522865864099598 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.0250437573185202, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.0250437573185202 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.02686462436675664, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.02686462436675664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607708, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4018348623853211, + "acc_stderr": 0.02102010617299701, + "acc_norm": 0.4018348623853211, + "acc_norm_stderr": 0.02102010617299701 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142624, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142624 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292535, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292535 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.019659922493623343, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.019659922493623343 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516992, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516992 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28938547486033517, + "acc_stderr": 0.015166544550490312, + "acc_norm": 0.28938547486033517, + "acc_norm_stderr": 0.015166544550490312 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5021097046413502, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.5021097046413502, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.012014142101842963, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.012014142101842963 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.01607750926613303, + "mc2": 0.4852690277608778, + "mc2_stderr": 0.016708659698307562 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29634002361275086, + "acc_stderr": 0.015699701628594232, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.016689333596980112 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cognitivecomputations/dolphin-2.7-mixtral-8x7b", + "model_sha": "626c8252e4fd574a9aee4c5f0590529a59412345", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cognitivecomputations/dolphin-2.9.1-llama-3-8b/result_2024-05-18 01:33:09.json b/cognitivecomputations/dolphin-2.9.1-llama-3-8b/result_2024-05-18 01:33:09.json new file mode 100644 index 0000000000000000000000000000000000000000..11a0d24826d8967e4bf630cfa47a9c5046f8ffbe --- /dev/null +++ b/cognitivecomputations/dolphin-2.9.1-llama-3-8b/result_2024-05-18 01:33:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3651877133105802, + "acc_stderr": 0.0140702655192688, + "acc_norm": 0.4334470989761092, + "acc_norm_stderr": 0.014481376224558903 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36715793666600277, + "acc_stderr": 0.004810449343572393, + "acc_norm": 0.4797849034056961, + "acc_norm_stderr": 0.0049857015938980015 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4789272030651341, + "acc_stderr": 0.0178640767862129, + "acc_norm": 0.4789272030651341, + "acc_norm_stderr": 0.0178640767862129 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.028173917761762878, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.028173917761762878 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4595959595959596, + "acc_stderr": 0.03550702465131341, + "acc_norm": 0.4595959595959596, + "acc_norm_stderr": 0.03550702465131341 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062947, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062947 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938145, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938145 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.033864057460620905, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.033864057460620905 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768817, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768817 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02911661760608301, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02911661760608301 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887249, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887249 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.02441923496681907, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.02441923496681907 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.03814269893261837, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.03814269893261837 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46238532110091746, + "acc_stderr": 0.021376575274397576, + "acc_norm": 0.46238532110091746, + "acc_norm_stderr": 0.021376575274397576 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.01969145905235417, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.01969145905235417 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.03128039084329883, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.03128039084329883 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249614, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249614 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.027472274473233818, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.027472274473233818 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.031722950043323296, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.031722950043323296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.011855911587048226, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.011855911587048226 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31211750305997554, + "mc1_stderr": 0.01622075676952092, + "mc2": 0.4952590004277726, + "mc2_stderr": 0.016054114356981217 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48288075560802834, + "acc_stderr": 0.017180275246085622, + "acc_norm": 0.5360094451003542, + "acc_norm_stderr": 0.017145715365486664 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cognitivecomputations/dolphin-2.9.1-llama-3-8b", + "model_sha": "924427715104c3667868d9297e4069b4c62bfd88", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cognitivecomputations/dolphin-2.9.1-mixtral-1x22b/result_2024-05-25 00:03:12.json b/cognitivecomputations/dolphin-2.9.1-mixtral-1x22b/result_2024-05-25 00:03:12.json new file mode 100644 index 0000000000000000000000000000000000000000..b5b72802a18ec88f0cc80f26c0cb6b9c4b49d5d7 --- /dev/null +++ b/cognitivecomputations/dolphin-2.9.1-mixtral-1x22b/result_2024-05-25 00:03:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20563139931740615, + "acc_stderr": 0.011810745260742566, + "acc_norm": 0.24829351535836178, + "acc_norm_stderr": 0.012624912868089757 + }, + "harness|ko_hellaswag|10": { + "acc": 0.270264887472615, + "acc_stderr": 0.004431889783633821, + "acc_norm": 0.29506074487153955, + "acc_norm_stderr": 0.0045513798381561065 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3567251461988304, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.3567251461988304, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.33980582524271846, + "acc_stderr": 0.04689765937278135, + "acc_norm": 0.33980582524271846, + "acc_norm_stderr": 0.04689765937278135 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3397190293742018, + "acc_stderr": 0.01693639411430165, + "acc_norm": 0.3397190293742018, + "acc_norm_stderr": 0.01693639411430165 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501116, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501116 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102963, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102963 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31511254019292606, + "acc_stderr": 0.026385273703464496, + "acc_norm": 0.31511254019292606, + "acc_norm_stderr": 0.026385273703464496 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.03021683101150875, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.03021683101150875 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732524, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732524 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3277310924369748, + "acc_stderr": 0.030489911417673227, + "acc_norm": 0.3277310924369748, + "acc_norm_stderr": 0.030489911417673227 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2743589743589744, + "acc_stderr": 0.022622765767493197, + "acc_norm": 0.2743589743589744, + "acc_norm_stderr": 0.022622765767493197 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.02786932057166463, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02786932057166463 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.43162393162393164, + "acc_stderr": 0.0324483553531149, + "acc_norm": 0.43162393162393164, + "acc_norm_stderr": 0.0324483553531149 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35094339622641507, + "acc_stderr": 0.02937364625323469, + "acc_norm": 0.35094339622641507, + "acc_norm_stderr": 0.02937364625323469 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4228855721393035, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.4228855721393035, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.034765996075164785, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.034765996075164785 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047736, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047736 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566018, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566018 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3786127167630058, + "acc_stderr": 0.02611374936131034, + "acc_norm": 0.3786127167630058, + "acc_norm_stderr": 0.02611374936131034 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724149, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724149 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38271604938271603, + "acc_stderr": 0.02704453813840262, + "acc_norm": 0.38271604938271603, + "acc_norm_stderr": 0.02704453813840262 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2849740932642487, + "acc_stderr": 0.0325771407770966, + "acc_norm": 0.2849740932642487, + "acc_norm_stderr": 0.0325771407770966 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30458715596330277, + "acc_stderr": 0.019732299420354038, + "acc_norm": 0.30458715596330277, + "acc_norm_stderr": 0.019732299420354038 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.027732834353363947, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.027732834353363947 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03583496176361061, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03583496176361061 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28594771241830064, + "acc_stderr": 0.018280485072954683, + "acc_norm": 0.28594771241830064, + "acc_norm_stderr": 0.018280485072954683 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.02804594694204241, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.02804594694204241 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456053, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456053 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2324022346368715, + "acc_stderr": 0.014125968754673389, + "acc_norm": 0.2324022346368715, + "acc_norm_stderr": 0.014125968754673389 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.02667925227010312, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.02667925227010312 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3881856540084388, + "acc_stderr": 0.03172295004332328, + "acc_norm": 0.3881856540084388, + "acc_norm_stderr": 0.03172295004332328 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28226857887874834, + "acc_stderr": 0.011495852176241944, + "acc_norm": 0.28226857887874834, + "acc_norm_stderr": 0.011495852176241944 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399811, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399811 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268047, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268047 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875837, + "mc2": 0.4493369588420673, + "mc2_stderr": 0.01648015939334263 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.10979929161747344, + "acc_stderr": 0.01074876468672161, + "acc_norm": 0.27744982290436837, + "acc_norm_stderr": 0.015393630236605973 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cognitivecomputations/dolphin-2.9.1-mixtral-1x22b", + "model_sha": "fbd34cc9ac4f9fbf52d79754c81f30a19eb8c874", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cognitivecomputations/dolphin-2.9.1-yi-1.5-9b/result_2024-05-25 05:44:46.json b/cognitivecomputations/dolphin-2.9.1-yi-1.5-9b/result_2024-05-25 05:44:46.json new file mode 100644 index 0000000000000000000000000000000000000000..a8d5eae9923ad0adbff87c6fb6a22f22d86c466c --- /dev/null +++ b/cognitivecomputations/dolphin-2.9.1-yi-1.5-9b/result_2024-05-25 05:44:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31313993174061433, + "acc_stderr": 0.013552671543623501, + "acc_norm": 0.3651877133105802, + "acc_norm_stderr": 0.014070265519268802 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3397729535949014, + "acc_stderr": 0.004726640532562043, + "acc_norm": 0.41525592511451903, + "acc_norm_stderr": 0.0049175903781382094 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273482, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273482 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4521072796934866, + "acc_stderr": 0.01779775149386562, + "acc_norm": 0.4521072796934866, + "acc_norm_stderr": 0.01779775149386562 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.041443118108781526, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.041443118108781526 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933903, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933903 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406795, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406795 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568392, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568392 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.03057281131029961, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.03057281131029961 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.047093069786618966, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.047093069786618966 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.030114442019668085, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.030114442019668085 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.035281314729336065, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.035281314729336065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5052910052910053, + "acc_stderr": 0.02574986828855657, + "acc_norm": 0.5052910052910053, + "acc_norm_stderr": 0.02574986828855657 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353928, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353928 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.04598188057816542, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.04598188057816542 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214327, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214327 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171573, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171573 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.03878139888797611, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.03878139888797611 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.019780465954777535, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.019780465954777535 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347254, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347254 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.046695106638751926, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.046695106638751926 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3396648044692737, + "acc_stderr": 0.015839400406212498, + "acc_norm": 0.3396648044692737, + "acc_norm_stderr": 0.015839400406212498 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768081, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768081 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.02833295951403122, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.02833295951403122 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48523206751054854, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.48523206751054854, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.01210681720306721, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.01210681720306721 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674119, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674119 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512566, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512566 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3427172582619339, + "mc1_stderr": 0.016614949385347046, + "mc2": 0.5192104127324159, + "mc2_stderr": 0.01613029326024926 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5466351829988194, + "acc_stderr": 0.01711541822522687, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.01705775370216029 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cognitivecomputations/dolphin-2.9.1-yi-1.5-9b", + "model_sha": "91f0a521e3e2a0675a3549aa5d3f40717068de94", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cognitivecomputations/dolphin-2.9.2-Phi-3-Medium-abliterated/result_2024-06-17 06:11:40.json b/cognitivecomputations/dolphin-2.9.2-Phi-3-Medium-abliterated/result_2024-06-17 06:11:40.json new file mode 100644 index 0000000000000000000000000000000000000000..dbfac29ebc338fcc409aa419a5910815e9c5d2ea --- /dev/null +++ b/cognitivecomputations/dolphin-2.9.2-Phi-3-Medium-abliterated/result_2024-06-17 06:11:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2551194539249147, + "acc_stderr": 0.012739038695202105, + "acc_norm": 0.28071672354948807, + "acc_norm_stderr": 0.01313123812697558 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2978490340569608, + "acc_stderr": 0.0045637817073769385, + "acc_norm": 0.32901812387970525, + "acc_norm_stderr": 0.004688963175758122 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.04846748253977239, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.04846748253977239 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.40102171136653897, + "acc_stderr": 0.017526133150124582, + "acc_norm": 0.40102171136653897, + "acc_norm_stderr": 0.017526133150124582 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42765273311897106, + "acc_stderr": 0.02809924077580956, + "acc_norm": 0.42765273311897106, + "acc_norm_stderr": 0.02809924077580956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4797979797979798, + "acc_stderr": 0.03559443565563918, + "acc_norm": 0.4797979797979798, + "acc_norm_stderr": 0.03559443565563918 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.040233822736177476, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.040233822736177476 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35384615384615387, + "acc_stderr": 0.024243783994062178, + "acc_norm": 0.35384615384615387, + "acc_norm_stderr": 0.024243783994062178 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.02829205683011273, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.02829205683011273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03255326307272487, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03255326307272487 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.030242233800854498, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.030242233800854498 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.029723278961476664, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.029723278961476664 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.0353549015013729, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.0353549015013729 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5026455026455027, + "acc_stderr": 0.025750949678130387, + "acc_norm": 0.5026455026455027, + "acc_norm_stderr": 0.025750949678130387 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.026424816594009845, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.026424816594009845 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3374233128834356, + "acc_stderr": 0.037149084099355745, + "acc_norm": 0.3374233128834356, + "acc_norm_stderr": 0.037149084099355745 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.027402042040269952, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.027402042040269952 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3316062176165803, + "acc_stderr": 0.03397636541089116, + "acc_norm": 0.3316062176165803, + "acc_norm_stderr": 0.03397636541089116 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.042663394431593955, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.042663394431593955 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3412844036697248, + "acc_stderr": 0.020328612816592435, + "acc_norm": 0.3412844036697248, + "acc_norm_stderr": 0.020328612816592435 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171566, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171566 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34477124183006536, + "acc_stderr": 0.01922832201869664, + "acc_norm": 0.34477124183006536, + "acc_norm_stderr": 0.01922832201869664 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.02878222756134724, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.02878222756134724 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2770949720670391, + "acc_stderr": 0.014968772435812143, + "acc_norm": 0.2770949720670391, + "acc_norm_stderr": 0.014968772435812143 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.02747227447323382, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.02747227447323382 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0306858205966108, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0306858205966108 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3011734028683181, + "acc_stderr": 0.01171714875164843, + "acc_norm": 0.3011734028683181, + "acc_norm_stderr": 0.01171714875164843 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.03228210387037892, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.03228210387037892 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.036639749943912434, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.036639749943912434 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777315, + "mc2": 0.4498599340309103, + "mc2_stderr": 0.016455443007086346 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.35537190082644626, + "acc_stderr": 0.01645549600031454, + "acc_norm": 0.4002361275088548, + "acc_norm_stderr": 0.016844693510505066 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cognitivecomputations/dolphin-2.9.2-Phi-3-Medium-abliterated", + "model_sha": "d50be5f22ca9745a2a3175996611d6a840318b7f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cognitivecomputations/dolphin-2.9.2-Phi-3-Medium/result_2024-06-17 06:11:45.json b/cognitivecomputations/dolphin-2.9.2-Phi-3-Medium/result_2024-06-17 06:11:45.json new file mode 100644 index 0000000000000000000000000000000000000000..200ea4811d515c70fe0b3bac0960093f2cd69458 --- /dev/null +++ b/cognitivecomputations/dolphin-2.9.2-Phi-3-Medium/result_2024-06-17 06:11:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25, + "acc_stderr": 0.012653835621466646, + "acc_norm": 0.2764505119453925, + "acc_norm_stderr": 0.013069662474252425 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2984465245966939, + "acc_stderr": 0.004566412808642455, + "acc_norm": 0.3286197968532165, + "acc_norm_stderr": 0.004687514708345314 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.0484674825397724, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.0484674825397724 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3959131545338442, + "acc_stderr": 0.01748824700697928, + "acc_norm": 0.3959131545338442, + "acc_norm_stderr": 0.01748824700697928 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595852, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595852 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40514469453376206, + "acc_stderr": 0.02788238379132595, + "acc_norm": 0.40514469453376206, + "acc_norm_stderr": 0.02788238379132595 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.032284106267163895, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.032284106267163895 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094788, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094788 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5641025641025641, + "acc_stderr": 0.03248577511578401, + "acc_norm": 0.5641025641025641, + "acc_norm_stderr": 0.03248577511578401 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.030197611600197946, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.030197611600197946 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505417, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505417 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3814814814814815, + "acc_stderr": 0.02961671892749759, + "acc_norm": 0.3814814814814815, + "acc_norm_stderr": 0.02961671892749759 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165582, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165582 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5158730158730159, + "acc_stderr": 0.025738330639412152, + "acc_norm": 0.5158730158730159, + "acc_norm_stderr": 0.025738330639412152 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.026538189104705467, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.026538189104705467 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4104938271604938, + "acc_stderr": 0.027371350925124768, + "acc_norm": 0.4104938271604938, + "acc_norm_stderr": 0.027371350925124768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.03423465100104283, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.03423465100104283 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.344954128440367, + "acc_stderr": 0.02038060540506697, + "acc_norm": 0.344954128440367, + "acc_norm_stderr": 0.02038060540506697 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.01916241858862356, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.01916241858862356 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.028538650028878645, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.028538650028878645 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475356, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475356 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.0266792522701031, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.0266792522701031 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3206751054852321, + "acc_stderr": 0.03038193194999041, + "acc_norm": 0.3206751054852321, + "acc_norm_stderr": 0.03038193194999041 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2861799217731421, + "acc_stderr": 0.011543642878150755, + "acc_norm": 0.2861799217731421, + "acc_norm_stderr": 0.011543642878150755 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394812, + "mc2": 0.4520337458540359, + "mc2_stderr": 0.01644631717925023 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3837072018890201, + "acc_stderr": 0.016718924637231833, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.017014038119297463 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cognitivecomputations/dolphin-2.9.2-Phi-3-Medium", + "model_sha": "0470c5b912b51fa6e27d87a8ea7feafacd8cb101", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cognitivecomputations/dolphin-2.9.2-qwen2-7b/result_2024-07-03 20:08:15.json b/cognitivecomputations/dolphin-2.9.2-qwen2-7b/result_2024-07-03 20:08:15.json new file mode 100644 index 0000000000000000000000000000000000000000..e638b2a5e33526ce40bef84e2d421046be65c0e8 --- /dev/null +++ b/cognitivecomputations/dolphin-2.9.2-qwen2-7b/result_2024-07-03 20:08:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3779863481228669, + "acc_stderr": 0.0141696645203031, + "acc_norm": 0.447098976109215, + "acc_norm_stderr": 0.014529380160526847 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3999203345947023, + "acc_stderr": 0.004888805003103067, + "acc_norm": 0.5301732722565226, + "acc_norm_stderr": 0.0049806874674860994 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7076023391812866, + "acc_stderr": 0.03488647713457922, + "acc_norm": 0.7076023391812866, + "acc_norm_stderr": 0.03488647713457922 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.8155339805825242, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.8155339805825242, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.648786717752235, + "acc_stderr": 0.017069982051499427, + "acc_norm": 0.648786717752235, + "acc_norm_stderr": 0.017069982051499427 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5914893617021276, + "acc_stderr": 0.032134180267015755, + "acc_norm": 0.5914893617021276, + "acc_norm_stderr": 0.032134180267015755 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.027604689028581972, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.027604689028581972 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6143497757847534, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.6143497757847534, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5793103448275863, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.5793103448275863, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6554621848739496, + "acc_stderr": 0.030868682604121615, + "acc_norm": 0.6554621848739496, + "acc_norm_stderr": 0.030868682604121615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6384615384615384, + "acc_stderr": 0.024359581465397014, + "acc_norm": 0.6384615384615384, + "acc_norm_stderr": 0.024359581465397014 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.5270935960591133, + "acc_stderr": 0.03512819077876106, + "acc_norm": 0.5270935960591133, + "acc_norm_stderr": 0.03512819077876106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6129032258064516, + "acc_stderr": 0.02770935967503249, + "acc_norm": 0.6129032258064516, + "acc_norm_stderr": 0.02770935967503249 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8461538461538461, + "acc_stderr": 0.023636873317489277, + "acc_norm": 0.8461538461538461, + "acc_norm_stderr": 0.023636873317489277 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.630188679245283, + "acc_stderr": 0.029711421880107936, + "acc_norm": 0.630188679245283, + "acc_norm_stderr": 0.029711421880107936 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4777777777777778, + "acc_stderr": 0.030455413985678408, + "acc_norm": 0.4777777777777778, + "acc_norm_stderr": 0.030455413985678408 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.0320384104021332, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.0320384104021332 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5793650793650794, + "acc_stderr": 0.025424835086924, + "acc_norm": 0.5793650793650794, + "acc_norm_stderr": 0.025424835086924 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.026261677607806642, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.026261677607806642 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5950920245398773, + "acc_stderr": 0.038566721635489125, + "acc_norm": 0.5950920245398773, + "acc_norm_stderr": 0.038566721635489125 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.02686949074481526, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.02686949074481526 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.03292296639155141, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.03292296639155141 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594964, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594964 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7119266055045872, + "acc_stderr": 0.019416445892636025, + "acc_norm": 0.7119266055045872, + "acc_norm_stderr": 0.019416445892636025 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.5317460317460317, + "acc_stderr": 0.04463112720677174, + "acc_norm": 0.5317460317460317, + "acc_norm_stderr": 0.04463112720677174 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.02782610930728369, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.02782610930728369 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7933884297520661, + "acc_stderr": 0.03695980128098825, + "acc_norm": 0.7933884297520661, + "acc_norm_stderr": 0.03695980128098825 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6513157894736842, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.6513157894736842, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5375816993464052, + "acc_stderr": 0.02017061497496977, + "acc_norm": 0.5375816993464052, + "acc_norm_stderr": 0.02017061497496977 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3971631205673759, + "acc_stderr": 0.0291898056735871, + "acc_norm": 0.3971631205673759, + "acc_norm_stderr": 0.0291898056735871 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5324074074074074, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.5324074074074074, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3553072625698324, + "acc_stderr": 0.016006989934803185, + "acc_norm": 0.3553072625698324, + "acc_norm_stderr": 0.016006989934803185 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4889705882352941, + "acc_stderr": 0.030365446477275675, + "acc_norm": 0.4889705882352941, + "acc_norm_stderr": 0.030365446477275675 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.689795918367347, + "acc_stderr": 0.02961345987248438, + "acc_norm": 0.689795918367347, + "acc_norm_stderr": 0.02961345987248438 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39895697522816165, + "acc_stderr": 0.01250675765529367, + "acc_norm": 0.39895697522816165, + "acc_norm_stderr": 0.01250675765529367 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7107843137254902, + "acc_stderr": 0.03182231867647553, + "acc_norm": 0.7107843137254902, + "acc_norm_stderr": 0.03182231867647553 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31946144430844553, + "mc1_stderr": 0.0163226441829605, + "mc2": 0.4989402714351484, + "mc2_stderr": 0.015667479792977293 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6068476977567887, + "acc_stderr": 0.016793262801287078, + "acc_norm": 0.6505312868949232, + "acc_norm_stderr": 0.016392797085769836 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cognitivecomputations/dolphin-2.9.2-qwen2-7b", + "model_sha": "c443c4eb5138ed746ac49ed98bf3c183dc5380ac", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cognitivecomputations/dolphin-2.9.3-mistral-7B-32k/result_2024-07-06 04:33:13.json b/cognitivecomputations/dolphin-2.9.3-mistral-7B-32k/result_2024-07-06 04:33:13.json new file mode 100644 index 0000000000000000000000000000000000000000..4d2cb920853e217299b84ea1c798ce853c24661d --- /dev/null +++ b/cognitivecomputations/dolphin-2.9.3-mistral-7B-32k/result_2024-07-06 04:33:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33447098976109213, + "acc_stderr": 0.013787460322441372, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.014252959848892898 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37353116908982276, + "acc_stderr": 0.004827526584889676, + "acc_norm": 0.47191794463254333, + "acc_norm_stderr": 0.004981905293878147 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.03762738699917055, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.03762738699917055 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44316730523627074, + "acc_stderr": 0.017764085035348386, + "acc_norm": 0.44316730523627074, + "acc_norm_stderr": 0.017764085035348386 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.030472973363380045, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.030472973363380045 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.028043399858210635, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.028043399858210635 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4128205128205128, + "acc_stderr": 0.024962683564331824, + "acc_norm": 0.4128205128205128, + "acc_norm_stderr": 0.024962683564331824 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.0281291127091659, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.0281291127091659 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.03035152732334494, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.03035152732334494 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.03047144586718323, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.03047144586718323 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253252, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253252 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.035080801121998406, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.035080801121998406 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562417, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.026864624366756643, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.026864624366756643 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422708, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442205, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442205 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41651376146788993, + "acc_stderr": 0.021136376504030874, + "acc_norm": 0.41651376146788993, + "acc_norm_stderr": 0.021136376504030874 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483205, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412243, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412243 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.02847350127296376, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.02847350127296376 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.046840993210771065, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.046840993210771065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220508, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220508 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3014705882352941, + "acc_stderr": 0.027875982114273168, + "acc_norm": 0.3014705882352941, + "acc_norm_stderr": 0.027875982114273168 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763127, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763127 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3324641460234681, + "acc_stderr": 0.012032022332260516, + "acc_norm": 0.3324641460234681, + "acc_norm_stderr": 0.012032022332260516 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3157894736842105, + "mc1_stderr": 0.016272287957916933, + "mc2": 0.4860633004380586, + "mc2_stderr": 0.01619548225737121 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5348288075560803, + "acc_stderr": 0.017148598015747425, + "acc_norm": 0.5584415584415584, + "acc_norm_stderr": 0.017072525875563106 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cognitivecomputations/dolphin-2.9.3-mistral-7B-32k", + "model_sha": "311c04607bf1434fc8294cd7ff4aa7a4ac968044", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cognitivecomputations/laserxtral/result_2024-07-31 17:32:41.json b/cognitivecomputations/laserxtral/result_2024-07-31 17:32:41.json new file mode 100644 index 0000000000000000000000000000000000000000..f0926a1fdf83090bbb17c35091ad705a1146cc1c --- /dev/null +++ b/cognitivecomputations/laserxtral/result_2024-07-31 17:32:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.01404910656495501, + "acc_norm": 0.41638225255972694, + "acc_norm_stderr": 0.014405618279436174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39384584744074885, + "acc_stderr": 0.004876028037941941, + "acc_norm": 0.5107548297151961, + "acc_norm_stderr": 0.00498862697817309 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781167, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781167 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4789272030651341, + "acc_stderr": 0.017864076786212896, + "acc_norm": 0.4789272030651341, + "acc_norm_stderr": 0.017864076786212896 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.03353046167412299, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.03353046167412299 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841585, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841585 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924336, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924336 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114993, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114993 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137588, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137588 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377906, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377906 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.02780165621232366, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.02780165621232366 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.03606065001832919, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.03606065001832919 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5064220183486239, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.5064220183486239, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401157, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401157 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219588, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219588 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802748, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095268, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095268 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254184, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254184 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.03189141832421396, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.03189141832421396 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.03172295004332332, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.03172295004332332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3455019556714472, + "acc_stderr": 0.012145303004087206, + "acc_norm": 0.3455019556714472, + "acc_norm_stderr": 0.012145303004087206 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3623011015911873, + "mc1_stderr": 0.01682664689726226, + "mc2": 0.5464179644913709, + "mc2_stderr": 0.016182349375852938 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40731995277449823, + "acc_stderr": 0.01689245669519127, + "acc_norm": 0.41086186540731995, + "acc_norm_stderr": 0.016914972767841062 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cognitivecomputations/laserxtral", + "model_sha": "1eb0192a8181eb7ce68c2d2947b3bcb79c02b3c2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cognitivecomputations/openchat-3.5-0106-laser/result_2024-07-31 17:32:58.json b/cognitivecomputations/openchat-3.5-0106-laser/result_2024-07-31 17:32:58.json new file mode 100644 index 0000000000000000000000000000000000000000..aa366f4c9340047ae4808fe57b1ed8d79b37429d --- /dev/null +++ b/cognitivecomputations/openchat-3.5-0106-laser/result_2024-07-31 17:32:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3387372013651877, + "acc_stderr": 0.01383056892797433, + "acc_norm": 0.40187713310580203, + "acc_norm_stderr": 0.014327268614578278 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3678550089623581, + "acc_stderr": 0.004812361060493925, + "acc_norm": 0.4689304919338777, + "acc_norm_stderr": 0.00498013867916104 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.048979577377811674, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.048979577377811674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48020434227330777, + "acc_stderr": 0.017865944827291612, + "acc_norm": 0.48020434227330777, + "acc_norm_stderr": 0.017865944827291612 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.031709956060406545, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.031709956060406545 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.02823776942208532, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.02823776942208532 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232964, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232964 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502748, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502748 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.02860595370200425, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.02860595370200425 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3814814814814815, + "acc_stderr": 0.029616718927497582, + "acc_norm": 0.3814814814814815, + "acc_norm_stderr": 0.029616718927497582 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.02479606060269995, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.02479606060269995 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960719, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960719 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.036080032255696545, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.036080032255696545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5137614678899083, + "acc_stderr": 0.02142920208987408, + "acc_norm": 0.5137614678899083, + "acc_norm_stderr": 0.02142920208987408 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.0403356566784832, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.0403356566784832 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024106, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024106 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.0338517797604481, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.0338517797604481 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103984, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103984 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.02922719246003203, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.02922719246003203 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.0317229500433233, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.0317229500433233 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.011977676704715997, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.011977676704715997 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33414932680538556, + "mc1_stderr": 0.016512530677150524, + "mc2": 0.5160382121524839, + "mc2_stderr": 0.01578055487160405 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4474616292798111, + "acc_stderr": 0.01709519030150058, + "acc_norm": 0.47461629279811096, + "acc_norm_stderr": 0.017168187201429257 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cognitivecomputations/openchat-3.5-0106-laser", + "model_sha": "62c30dc92aa9ba9070ff0f726440029aaf5bed34", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/colable/LDCC-CCK-slerp/result_2024-01-23 14:23:10.json b/colable/LDCC-CCK-slerp/result_2024-01-23 14:23:10.json new file mode 100644 index 0000000000000000000000000000000000000000..f762ab71946774c4fe47c4e7faf7bc2a9a5884e3 --- /dev/null +++ b/colable/LDCC-CCK-slerp/result_2024-01-23 14:23:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.48208191126279865, + "acc_stderr": 0.014602005585490978, + "acc_norm": 0.5392491467576792, + "acc_norm_stderr": 0.014566303676636584 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46942840071698866, + "acc_stderr": 0.004980445551991257, + "acc_norm": 0.6434973112925712, + "acc_norm_stderr": 0.00477987225063371 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6500638569604087, + "acc_stderr": 0.017055679797150426, + "acc_norm": 0.6500638569604087, + "acc_norm_stderr": 0.017055679797150426 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.03266204299064677, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.03266204299064677 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.639871382636656, + "acc_stderr": 0.027264297599804015, + "acc_norm": 0.639871382636656, + "acc_norm_stderr": 0.027264297599804015 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7070707070707071, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.7070707070707071, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929777, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5769230769230769, + "acc_stderr": 0.02504919787604236, + "acc_norm": 0.5769230769230769, + "acc_norm_stderr": 0.02504919787604236 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.567741935483871, + "acc_stderr": 0.02818173972001941, + "acc_norm": 0.567741935483871, + "acc_norm_stderr": 0.02818173972001941 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392933, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5584905660377358, + "acc_stderr": 0.03056159042673184, + "acc_norm": 0.5584905660377358, + "acc_norm_stderr": 0.03056159042673184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555404, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555404 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42328042328042326, + "acc_stderr": 0.025446365634406793, + "acc_norm": 0.42328042328042326, + "acc_norm_stderr": 0.025446365634406793 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5809248554913294, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.5809248554913294, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6049382716049383, + "acc_stderr": 0.027201117666925654, + "acc_norm": 0.6049382716049383, + "acc_norm_stderr": 0.027201117666925654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7202072538860104, + "acc_stderr": 0.032396370467357036, + "acc_norm": 0.7202072538860104, + "acc_norm_stderr": 0.032396370467357036 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6825688073394496, + "acc_stderr": 0.0199571521984605, + "acc_norm": 0.6825688073394496, + "acc_norm_stderr": 0.0199571521984605 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.020220920829626912, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.020220920829626912 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.02949482760014437, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.02949482760014437 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977747, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977747 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3474860335195531, + "acc_stderr": 0.015925564060208154, + "acc_norm": 0.3474860335195531, + "acc_norm_stderr": 0.015925564060208154 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.030306257722468317, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.030306257722468317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.031680911612338825, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.031680911612338825 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7721518987341772, + "acc_stderr": 0.02730348459906943, + "acc_norm": 0.7721518987341772, + "acc_norm_stderr": 0.02730348459906943 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4256844850065189, + "acc_stderr": 0.012628393551811943, + "acc_norm": 0.4256844850065189, + "acc_norm_stderr": 0.012628393551811943 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.703030303030303, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.703030303030303, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4810281517747858, + "mc1_stderr": 0.01749089640576234, + "mc2": 0.6394893558635282, + "mc2_stderr": 0.015859977923434996 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5796930342384888, + "acc_stderr": 0.01697059828117771, + "acc_norm": 0.58913813459268, + "acc_norm_stderr": 0.01691497276784105 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "colable/LDCC-CCK-slerp", + "model_sha": "7a5372d053f06452502aeb215aa12f84c28f99fe", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/colable/llama-ko-peft-v0.5/result_2024-01-30 06:29:34.json b/colable/llama-ko-peft-v0.5/result_2024-01-30 06:29:34.json new file mode 100644 index 0000000000000000000000000000000000000000..55bea2a63ca53ca05e6710d0f3d4ac52f3fa2179 --- /dev/null +++ b/colable/llama-ko-peft-v0.5/result_2024-01-30 06:29:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3361774744027304, + "acc_stderr": 0.013804855026205763, + "acc_norm": 0.4138225255972696, + "acc_norm_stderr": 0.014392730009221009 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3777136028679546, + "acc_stderr": 0.0048382464107862766, + "acc_norm": 0.4914359689304919, + "acc_norm_stderr": 0.0049890494303912935 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3391812865497076, + "acc_stderr": 0.036310534964889056, + "acc_norm": 0.3391812865497076, + "acc_norm_stderr": 0.036310534964889056 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.38058748403575987, + "acc_stderr": 0.017362564126075418, + "acc_norm": 0.38058748403575987, + "acc_norm_stderr": 0.017362564126075418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.30638297872340425, + "acc_stderr": 0.030135906478517563, + "acc_norm": 0.30638297872340425, + "acc_norm_stderr": 0.030135906478517563 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.034843315926805875, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.034843315926805875 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40836012861736337, + "acc_stderr": 0.027917050748484624, + "acc_norm": 0.40836012861736337, + "acc_norm_stderr": 0.027917050748484624 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.37404580152671757, + "acc_stderr": 0.042438692422305246, + "acc_norm": 0.37404580152671757, + "acc_norm_stderr": 0.042438692422305246 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.41414141414141414, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.41414141414141414, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.03941707632064889, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.03941707632064889 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.33613445378151263, + "acc_stderr": 0.03068473711513537, + "acc_norm": 0.33613445378151263, + "acc_norm_stderr": 0.03068473711513537 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35384615384615387, + "acc_stderr": 0.024243783994062167, + "acc_norm": 0.35384615384615387, + "acc_norm_stderr": 0.024243783994062167 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.03178529710642749, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.03178529710642749 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36774193548387096, + "acc_stderr": 0.027430866579973474, + "acc_norm": 0.36774193548387096, + "acc_norm_stderr": 0.027430866579973474 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.39316239316239315, + "acc_stderr": 0.03199957924651047, + "acc_norm": 0.39316239316239315, + "acc_norm_stderr": 0.03199957924651047 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3320754716981132, + "acc_stderr": 0.02898545565233439, + "acc_norm": 0.3320754716981132, + "acc_norm_stderr": 0.02898545565233439 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302506, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302506 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844072, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43781094527363185, + "acc_stderr": 0.035080801121998406, + "acc_norm": 0.43781094527363185, + "acc_norm_stderr": 0.035080801121998406 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.03456425745087001, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.03456425745087001 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.0220190800122179, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.0220190800122179 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.40173410404624277, + "acc_stderr": 0.026394104177643637, + "acc_norm": 0.40173410404624277, + "acc_norm_stderr": 0.026394104177643637 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.026406145973625658, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.026406145973625658 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38860103626943004, + "acc_stderr": 0.03517739796373133, + "acc_norm": 0.38860103626943004, + "acc_norm_stderr": 0.03517739796373133 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4055045871559633, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.4055045871559633, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147127, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147127 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.027305308076274702, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.027305308076274702 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.04507732278775094, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.04507732278775094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.037610708698674805, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.037610708698674805 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.018975427920507208, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.018975427920507208 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952685, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952685 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828978, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828978 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961462, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961462 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.42616033755274263, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.42616033755274263, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3135593220338983, + "acc_stderr": 0.011849234291459329, + "acc_norm": 0.3135593220338983, + "acc_norm_stderr": 0.011849234291459329 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.033321399446680854, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.033321399446680854 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.037563357751878974, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.037563357751878974 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557966, + "mc2": 0.4051354754510643, + "mc2_stderr": 0.014928061222109967 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29634002361275086, + "acc_stderr": 0.015699701628594232, + "acc_norm": 0.3754427390791027, + "acc_norm_stderr": 0.01664841158951109 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "colable/llama-ko-peft-v0.5", + "model_sha": "f6bf099eac1dba8f58df3aa96dde99e83a2edbe1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/colable/llama-ko-peft-v0.6/result_2024-02-02 02:48:02.json b/colable/llama-ko-peft-v0.6/result_2024-02-02 02:48:02.json new file mode 100644 index 0000000000000000000000000000000000000000..4a237fe2c11eeba5750d23985e66b8c770e06c78 --- /dev/null +++ b/colable/llama-ko-peft-v0.6/result_2024-02-02 02:48:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32764505119453924, + "acc_stderr": 0.01371584794071934, + "acc_norm": 0.3993174061433447, + "acc_norm_stderr": 0.014312094557946709 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3794064927305318, + "acc_stderr": 0.004842476363739974, + "acc_norm": 0.48904600677155946, + "acc_norm_stderr": 0.0049885838203099185 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3391812865497076, + "acc_stderr": 0.03631053496488905, + "acc_norm": 0.3391812865497076, + "acc_norm_stderr": 0.03631053496488905 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260595, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260595 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.351213282247765, + "acc_stderr": 0.01706998205149943, + "acc_norm": 0.351213282247765, + "acc_norm_stderr": 0.01706998205149943 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.03047297336338004, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.03047297336338004 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.034843315926805875, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.034843315926805875 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3633440514469453, + "acc_stderr": 0.02731684767419271, + "acc_norm": 0.3633440514469453, + "acc_norm_stderr": 0.02731684767419271 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3991031390134529, + "acc_stderr": 0.032867453125679603, + "acc_norm": 0.3991031390134529, + "acc_norm_stderr": 0.032867453125679603 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3787878787878788, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.3787878787878788, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.038061426873099935, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.038061426873099935 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.037932811853078084, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.037932811853078084 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.03086868260412163, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.03086868260412163 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3435897435897436, + "acc_stderr": 0.02407869658063547, + "acc_norm": 0.3435897435897436, + "acc_norm_stderr": 0.02407869658063547 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.04524596007030049, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.04524596007030049 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.43162393162393164, + "acc_stderr": 0.0324483553531149, + "acc_norm": 0.43162393162393164, + "acc_norm_stderr": 0.0324483553531149 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32075471698113206, + "acc_stderr": 0.028727502957880267, + "acc_norm": 0.32075471698113206, + "acc_norm_stderr": 0.028727502957880267 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.02708037281514565, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.02708037281514565 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4527363184079602, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.4527363184079602, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.02271746789770861, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.02271746789770861 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.02552247463212161, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.02552247463212161 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.03731133519673891, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.03731133519673891 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32098765432098764, + "acc_stderr": 0.025976566010862734, + "acc_norm": 0.32098765432098764, + "acc_norm_stderr": 0.025976566010862734 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.31088082901554404, + "acc_stderr": 0.03340361906276585, + "acc_norm": 0.31088082901554404, + "acc_norm_stderr": 0.03340361906276585 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3504587155963303, + "acc_stderr": 0.02045607759982446, + "acc_norm": 0.3504587155963303, + "acc_norm_stderr": 0.02045607759982446 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928724, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928724 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.02724561304721535, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.02724561304721535 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952925, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952925 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3349673202614379, + "acc_stderr": 0.019094228167000314, + "acc_norm": 0.3349673202614379, + "acc_norm_stderr": 0.019094228167000314 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02988691054762697, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02988691054762697 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.014149575348976267, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.014149575348976267 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935893, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935893 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3881856540084388, + "acc_stderr": 0.03172295004332328, + "acc_norm": 0.3881856540084388, + "acc_norm_stderr": 0.03172295004332328 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27835723598435463, + "acc_stderr": 0.01144699019738098, + "acc_norm": 0.27835723598435463, + "acc_norm_stderr": 0.01144699019738098 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373618, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373618 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283344, + "mc2": 0.40967011932858216, + "mc2_stderr": 0.015062527514760943 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2833530106257379, + "acc_stderr": 0.015492852084597242, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.016689333596980115 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "colable/llama-ko-peft-v0.6", + "model_sha": "792f7230511c0a7b72b41ceea944464dc149dd29", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/colable/llama-ko-peft/result_2024-01-26 14:53:38.json b/colable/llama-ko-peft/result_2024-01-26 14:53:38.json new file mode 100644 index 0000000000000000000000000000000000000000..c7fdfee2ed237a30625577673cc9befb055ee749 --- /dev/null +++ b/colable/llama-ko-peft/result_2024-01-26 14:53:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30716723549488056, + "acc_stderr": 0.013481034054980945, + "acc_norm": 0.39590443686006827, + "acc_norm_stderr": 0.01429122839353659 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37821151165106554, + "acc_stderr": 0.004839497020536614, + "acc_norm": 0.49173471420035847, + "acc_norm_stderr": 0.004989099611536827 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.01757070523925655, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.01757070523925655 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357783, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357783 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.0350729543137052, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.0350729543137052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.39228295819935693, + "acc_stderr": 0.02773125864701199, + "acc_norm": 0.39228295819935693, + "acc_norm_stderr": 0.02773125864701199 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.43434343434343436, + "acc_stderr": 0.035315058793591834, + "acc_norm": 0.43434343434343436, + "acc_norm_stderr": 0.035315058793591834 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.039609335494512087, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.039609335494512087 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3717948717948718, + "acc_stderr": 0.02450347255711095, + "acc_norm": 0.3717948717948718, + "acc_norm_stderr": 0.02450347255711095 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3903225806451613, + "acc_stderr": 0.027751256636969576, + "acc_norm": 0.3903225806451613, + "acc_norm_stderr": 0.027751256636969576 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.49145299145299143, + "acc_stderr": 0.032751303000970296, + "acc_norm": 0.49145299145299143, + "acc_norm_stderr": 0.032751303000970296 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.0294451753281996, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.0294451753281996 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.02742001935094527, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.02742001935094527 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.03445406271987054, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.03445406271987054 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5024875621890548, + "acc_stderr": 0.0353549015013729, + "acc_norm": 0.5024875621890548, + "acc_norm_stderr": 0.0353549015013729 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.03496101481191181, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.03496101481191181 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.022019080012217893, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.022019080012217893 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.026424816594009845, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.026424816594009845 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35802469135802467, + "acc_stderr": 0.026675611926037086, + "acc_norm": 0.35802469135802467, + "acc_norm_stderr": 0.026675611926037086 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38341968911917096, + "acc_stderr": 0.03508984236295342, + "acc_norm": 0.38341968911917096, + "acc_norm_stderr": 0.03508984236295342 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42385321100917434, + "acc_stderr": 0.02118726320908751, + "acc_norm": 0.42385321100917434, + "acc_norm_stderr": 0.02118726320908751 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.027996723180631462, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.027996723180631462 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4380165289256198, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.03878139888797611, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.03878139888797611 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.019162418588623546, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.019162418588623546 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509317, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915206, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915206 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025445, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025445 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.014149575348976259, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.014149575348976259 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983576, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983576 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.031067211262872478, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.031067211262872478 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4008438818565401, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.4008438818565401, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3116036505867014, + "acc_stderr": 0.011829039182849657, + "acc_norm": 0.3116036505867014, + "acc_norm_stderr": 0.011829039182849657 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.033321399446680854, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.033321399446680854 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.038049136539710114, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.038049136539710114 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.015000674373570345, + "mc2": 0.4005762989369565, + "mc2_stderr": 0.014867917039397242 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3152302243211334, + "acc_stderr": 0.015973534923794473, + "acc_norm": 0.42621015348288077, + "acc_norm_stderr": 0.017002122609489263 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "colable/llama-ko-peft", + "model_sha": "6cb7c5edacc6fa18604bc63d8758d83a4495ee48", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/colable/llama2-ko-DPO/result_2024-02-05 11:18:20.json b/colable/llama2-ko-DPO/result_2024-02-05 11:18:20.json new file mode 100644 index 0000000000000000000000000000000000000000..7feeb899f3edb0161dd007fc8594397ff1e95fde --- /dev/null +++ b/colable/llama2-ko-DPO/result_2024-02-05 11:18:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.22696245733788395, + "acc_stderr": 0.01224049153613286, + "acc_norm": 0.22696245733788395, + "acc_norm_stderr": 0.01224049153613286 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2504481179047998, + "acc_stderr": 0.004323856300539177, + "acc_norm": 0.2504481179047998, + "acc_norm_stderr": 0.004323856300539177 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 1.0, + "mc1_stderr": 0.0, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.014846044968252247, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.014846044968252247 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "colable/llama2-ko-DPO", + "model_sha": "0a3bf586bd51bcb788ce8d1999aad474e023f605", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cong1230/LDCC_LoRA_NoRemote_full_re/result_2024-03-06 03:12:32.json b/cong1230/LDCC_LoRA_NoRemote_full_re/result_2024-03-06 03:12:32.json new file mode 100644 index 0000000000000000000000000000000000000000..021af3b8f52a346d294800f859466c64b9b7bbea --- /dev/null +++ b/cong1230/LDCC_LoRA_NoRemote_full_re/result_2024-03-06 03:12:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40955631399317405, + "acc_stderr": 0.014370358632472432, + "acc_norm": 0.45819112627986347, + "acc_norm_stderr": 0.014560220308714697 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4122684724158534, + "acc_stderr": 0.0049123700239130175, + "acc_norm": 0.5630352519418442, + "acc_norm_stderr": 0.004949969363017657 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.524904214559387, + "acc_stderr": 0.01785777070490104, + "acc_norm": 0.524904214559387, + "acc_norm_stderr": 0.01785777070490104 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840674, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840674 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.032284106267163895, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.032284106267163895 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710852, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710852 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.02293097307163335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.02293097307163335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5467889908256881, + "acc_stderr": 0.021343255165546044, + "acc_norm": 0.5467889908256881, + "acc_norm_stderr": 0.021343255165546044 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762623, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762623 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510923, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510923 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42448979591836733, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.42448979591836733, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.031591887529658504, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.031591887529658504 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3324641460234681, + "acc_stderr": 0.012032022332260509, + "acc_norm": 0.3324641460234681, + "acc_norm_stderr": 0.012032022332260509 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.45903460196798157, + "mc2_stderr": 0.015239505582292652 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4757969303423849, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5608028335301063, + "acc_norm_stderr": 0.017062775744780705 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cong1230/LDCC_LoRA_NoRemote_full_re", + "model_sha": "45e971a281b7c4f593a8a419d5866e5b3740a75c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cong1230/LDCC_LoRA_full/result_2024-02-27 18:34:53.json b/cong1230/LDCC_LoRA_full/result_2024-02-27 18:34:53.json new file mode 100644 index 0000000000000000000000000000000000000000..bebf59d1fa990bf30d9315d22410d735347c3232 --- /dev/null +++ b/cong1230/LDCC_LoRA_full/result_2024-02-27 18:34:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4104095563139932, + "acc_stderr": 0.014374922192642662, + "acc_norm": 0.454778156996587, + "acc_norm_stderr": 0.014551507060836357 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4116709818761203, + "acc_stderr": 0.00491130356976979, + "acc_norm": 0.559151563433579, + "acc_norm_stderr": 0.004954740808837189 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.541507024265645, + "acc_stderr": 0.017818248603465578, + "acc_norm": 0.541507024265645, + "acc_norm_stderr": 0.017818248603465578 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489424, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489424 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03540294377095368, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03540294377095368 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539746, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539746 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502744, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502744 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.037336266553835096, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.037336266553835096 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577657, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577657 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111502, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111502 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413327, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413327 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.544954128440367, + "acc_stderr": 0.021350503090925163, + "acc_norm": 0.544954128440367, + "acc_norm_stderr": 0.021350503090925163 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283683, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283683 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.019691459052354164, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.019691459052354164 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298825, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298825 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.03000856284500347, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.03000856284500347 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33572359843546284, + "acc_stderr": 0.012061304157664618, + "acc_norm": 0.33572359843546284, + "acc_norm_stderr": 0.012061304157664618 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.015945068581236614, + "mc2": 0.4533991041143101, + "mc2_stderr": 0.0152257842500209 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45454545454545453, + "acc_stderr": 0.017119172208061504, + "acc_norm": 0.5336481700118064, + "acc_norm_stderr": 0.017151384117131862 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cong1230/LDCC_LoRA_full", + "model_sha": "7efb9500a688630bd5ae02dfd47c2190bf9502c3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cosmicvalor/mistral-orthogonalized/result_2024-05-25 13:34:24.json b/cosmicvalor/mistral-orthogonalized/result_2024-05-25 13:34:24.json new file mode 100644 index 0000000000000000000000000000000000000000..d1cb2dbd635fcb907732e43cfa663179595cb097 --- /dev/null +++ b/cosmicvalor/mistral-orthogonalized/result_2024-05-25 13:34:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3165529010238908, + "acc_stderr": 0.013592431519068077, + "acc_norm": 0.35665529010238906, + "acc_norm_stderr": 0.013998056902620199 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36387173869747064, + "acc_stderr": 0.00480129095438708, + "acc_norm": 0.45359490141406095, + "acc_norm_stderr": 0.004968244611429389 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4393358876117497, + "acc_stderr": 0.01774787424568362, + "acc_norm": 0.4393358876117497, + "acc_norm_stderr": 0.01774787424568362 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120575, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120575 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489425, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489425 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.033141902221106564, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.033141902221106564 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.042258754519696386, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.042258754519696386 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.041443118108781506, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.041443118108781506 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.02506909438729655, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.02506909438729655 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.027906150826041136, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.027906150826041136 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.030197611600197943, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.030197611600197943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524575, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524575 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.03487558640462064, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.03487558640462064 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02764847787741332, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02764847787741332 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.03578038165008585, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.03578038165008585 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44954128440366975, + "acc_stderr": 0.02132788141782337, + "acc_norm": 0.44954128440366975, + "acc_norm_stderr": 0.02132788141782337 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626567, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626567 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355442, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169927, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169927 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.046355501356099754, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.046355501356099754 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.031798763421768524, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.031798763421768524 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2223463687150838, + "acc_stderr": 0.013907189208156881, + "acc_norm": 0.2223463687150838, + "acc_norm_stderr": 0.013907189208156881 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.027678468642144707, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.027678468642144707 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763128, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763128 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.01197150729498278, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.01197150729498278 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.03804913653971012, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.03804913653971012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31701346389228885, + "mc1_stderr": 0.016289203374403392, + "mc2": 0.5011144706409336, + "mc2_stderr": 0.015993955615013156 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3530106257378985, + "acc_stderr": 0.01643074598242713, + "acc_norm": 0.38134592680047225, + "acc_norm_stderr": 0.016699301768828084 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cosmicvalor/mistral-orthogonalized", + "model_sha": "506b0b0702b88d39147e56edfa86e782a420fe24", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/cpm-ai/gemma-ko-v01/result_2024-05-23 00:44:36.json b/cpm-ai/gemma-ko-v01/result_2024-05-23 00:44:36.json new file mode 100644 index 0000000000000000000000000000000000000000..98e2747b411c86e10625d86ac65d6cde64a656e7 --- /dev/null +++ b/cpm-ai/gemma-ko-v01/result_2024-05-23 00:44:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.22525597269624573, + "acc_stderr": 0.012207839995407312, + "acc_norm": 0.28498293515358364, + "acc_norm_stderr": 0.013191348179838795 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2996415056761601, + "acc_stderr": 0.004571647137441118, + "acc_norm": 0.3408683529177455, + "acc_norm_stderr": 0.0047303245566241315 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.0458212416016155, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.0458212416016155 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3065134099616858, + "acc_stderr": 0.0164869528930415, + "acc_norm": 0.3065134099616858, + "acc_norm_stderr": 0.0164869528930415 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.029896145682095462, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.029896145682095462 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.036643147772880864, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.036643147772880864 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31189710610932475, + "acc_stderr": 0.02631185807185416, + "acc_norm": 0.31189710610932475, + "acc_norm_stderr": 0.02631185807185416 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2914798206278027, + "acc_stderr": 0.030500283176545913, + "acc_norm": 0.2914798206278027, + "acc_norm_stderr": 0.030500283176545913 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.031544498882702866, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.031544498882702866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416546, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416546 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2815126050420168, + "acc_stderr": 0.029213549414372143, + "acc_norm": 0.2815126050420168, + "acc_norm_stderr": 0.029213549414372143 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28717948717948716, + "acc_stderr": 0.02293992541853061, + "acc_norm": 0.28717948717948716, + "acc_norm_stderr": 0.02293992541853061 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042774, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042774 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.03265903381186195, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.03265903381186195 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32075471698113206, + "acc_stderr": 0.028727502957880267, + "acc_norm": 0.32075471698113206, + "acc_norm_stderr": 0.028727502957880267 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766124, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766124 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.38308457711442784, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.38308457711442784, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.020223880317923854, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.020223880317923854 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.025722802200895813, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.025722802200895813 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.026571483480719974, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.026571483480719974 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28256880733944956, + "acc_stderr": 0.019304243497707152, + "acc_norm": 0.28256880733944956, + "acc_norm_stderr": 0.019304243497707152 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.0273053080762747, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.0273053080762747 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.47107438016528924, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.034597776068105365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.034597776068105365 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2826797385620915, + "acc_stderr": 0.018217269552053435, + "acc_norm": 0.2826797385620915, + "acc_norm_stderr": 0.018217269552053435 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.027467401804057986, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.027467401804057986 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103987, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103987 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21691176470588236, + "acc_stderr": 0.02503584522771125, + "acc_norm": 0.21691176470588236, + "acc_norm_stderr": 0.02503584522771125 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2816326530612245, + "acc_stderr": 0.02879518557429129, + "acc_norm": 0.2816326530612245, + "acc_norm_stderr": 0.02879518557429129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.33755274261603374, + "acc_stderr": 0.030781549102026205, + "acc_norm": 0.33755274261603374, + "acc_norm_stderr": 0.030781549102026205 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27249022164276404, + "acc_stderr": 0.011371658294311523, + "acc_norm": 0.27249022164276404, + "acc_norm_stderr": 0.011371658294311523 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.036639749943912434, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.036639749943912434 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777308, + "mc2": 0.460721475426754, + "mc2_stderr": 0.016207934283947855 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24321133412042503, + "acc_stderr": 0.014750068360453264, + "acc_norm": 0.30932703659976385, + "acc_norm_stderr": 0.015891320505520893 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "cpm-ai/gemma-ko-v01", + "model_sha": "a905ed3019a197e37d19172641dd4ba2b10a56a3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/currybab/gemma-2b-ko-dev-pb/result_2024-03-26 07:34:18.json b/currybab/gemma-2b-ko-dev-pb/result_2024-03-26 07:34:18.json new file mode 100644 index 0000000000000000000000000000000000000000..14064d64b907bbddbfce82627713cc736fdcb7bd --- /dev/null +++ b/currybab/gemma-2b-ko-dev-pb/result_2024-03-26 07:34:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31569965870307165, + "acc_stderr": 0.013582571095815291, + "acc_norm": 0.3575085324232082, + "acc_norm_stderr": 0.014005494275916573 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36227843059151565, + "acc_stderr": 0.004796763521045229, + "acc_norm": 0.4538936466839275, + "acc_norm_stderr": 0.004968521608065472 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2656449553001277, + "acc_stderr": 0.015794302487888715, + "acc_norm": 0.2656449553001277, + "acc_norm_stderr": 0.015794302487888715 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2297872340425532, + "acc_stderr": 0.027501752944412424, + "acc_norm": 0.2297872340425532, + "acc_norm_stderr": 0.027501752944412424 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288085, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288085 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26688102893890675, + "acc_stderr": 0.02512263760881664, + "acc_norm": 0.26688102893890675, + "acc_norm_stderr": 0.02512263760881664 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.030769352008229143, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.030769352008229143 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.031156269519646836, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.031156269519646836 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.037800192304380156, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.037800192304380156 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.029472485833136074, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.029472485833136074 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2923076923076923, + "acc_stderr": 0.023060438380857744, + "acc_norm": 0.2923076923076923, + "acc_norm_stderr": 0.023060438380857744 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22660098522167488, + "acc_stderr": 0.02945486383529297, + "acc_norm": 0.22660098522167488, + "acc_norm_stderr": 0.02945486383529297 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.0255606047210229, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.0255606047210229 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3247863247863248, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.3247863247863248, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2, + "acc_stderr": 0.024618298195866514, + "acc_norm": 0.2, + "acc_norm_stderr": 0.024618298195866514 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.046313813194254635, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.046313813194254635 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.026335739404055803, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.026335739404055803 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.29850746268656714, + "acc_stderr": 0.03235743789355041, + "acc_norm": 0.29850746268656714, + "acc_norm_stderr": 0.03235743789355041 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1907514450867052, + "acc_stderr": 0.029957851329869334, + "acc_norm": 0.1907514450867052, + "acc_norm_stderr": 0.029957851329869334 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.02226181769240017, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.02226181769240017 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.023357365785874044, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.023357365785874044 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.03559039531617342, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.03559039531617342 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916647, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916647 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25504587155963304, + "acc_stderr": 0.018688500856535846, + "acc_norm": 0.25504587155963304, + "acc_norm_stderr": 0.018688500856535846 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.02573885479781873, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.02573885479781873 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2066115702479339, + "acc_stderr": 0.036959801280988254, + "acc_norm": 0.2066115702479339, + "acc_norm_stderr": 0.036959801280988254 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.238562091503268, + "acc_stderr": 0.0172423858287796, + "acc_norm": 0.238562091503268, + "acc_norm_stderr": 0.0172423858287796 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590627, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590627 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26256983240223464, + "acc_stderr": 0.014716824273017763, + "acc_norm": 0.26256983240223464, + "acc_norm_stderr": 0.014716824273017763 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29044117647058826, + "acc_stderr": 0.027576468622740522, + "acc_norm": 0.29044117647058826, + "acc_norm_stderr": 0.027576468622740522 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.02721283588407316, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.02721283588407316 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658342, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658342 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26597131681877445, + "acc_stderr": 0.011285033165551276, + "acc_norm": 0.26597131681877445, + "acc_norm_stderr": 0.011285033165551276 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006518, + "mc2": 0.4351036475713623, + "mc2_stderr": 0.01572968169308527 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3010625737898465, + "acc_stderr": 0.015771113299945457, + "acc_norm": 0.3600944510035419, + "acc_norm_stderr": 0.01650368672044007 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "currybab/gemma-2b-ko-dev-pb", + "model_sha": "5c1619d624d9ef6889ff86d4c59fccea132e62de", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/currybab/testgf/result_2024-04-08 08:23:11.json b/currybab/testgf/result_2024-04-08 08:23:11.json new file mode 100644 index 0000000000000000000000000000000000000000..5ebbc9a2d328acdeb021d66ac57fc258c7f7be86 --- /dev/null +++ b/currybab/testgf/result_2024-04-08 08:23:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.24829351535836178, + "acc_stderr": 0.012624912868089783, + "acc_norm": 0.2935153583617747, + "acc_norm_stderr": 0.013307250444941115 + }, + "harness|ko_hellaswag|10": { + "acc": 0.31318462457677754, + "acc_stderr": 0.004628409084218762, + "acc_norm": 0.36885082652858, + "acc_norm_stderr": 0.0048150733340005985 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393163, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393163 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.280970625798212, + "acc_stderr": 0.016073127851221232, + "acc_norm": 0.280970625798212, + "acc_norm_stderr": 0.016073127851221232 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288087, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288087 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3183279742765273, + "acc_stderr": 0.026457225067811025, + "acc_norm": 0.3183279742765273, + "acc_norm_stderr": 0.026457225067811025 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677697, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677697 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533085, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533085 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.29743589743589743, + "acc_stderr": 0.02317740813146592, + "acc_norm": 0.29743589743589743, + "acc_norm_stderr": 0.02317740813146592 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.031447125816782426, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.031447125816782426 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.29354838709677417, + "acc_stderr": 0.025906087021319295, + "acc_norm": 0.29354838709677417, + "acc_norm_stderr": 0.025906087021319295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.44017094017094016, + "acc_stderr": 0.032520741720630506, + "acc_norm": 0.44017094017094016, + "acc_norm_stderr": 0.032520741720630506 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35094339622641507, + "acc_stderr": 0.029373646253234686, + "acc_norm": 0.35094339622641507, + "acc_norm_stderr": 0.029373646253234686 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302506, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302506 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3681592039800995, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.3681592039800995, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594295, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.023330654054535886, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.023330654054535886 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869355, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869355 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.315028901734104, + "acc_stderr": 0.025009313790069713, + "acc_norm": 0.315028901734104, + "acc_norm_stderr": 0.025009313790069713 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.026105673861409818, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.026105673861409818 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3174311926605505, + "acc_stderr": 0.019957152198460504, + "acc_norm": 0.3174311926605505, + "acc_norm_stderr": 0.019957152198460504 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848877, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848877 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.027870745278290327, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.027870745278290327 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.26973684210526316, + "acc_stderr": 0.03611780560284898, + "acc_norm": 0.26973684210526316, + "acc_norm_stderr": 0.03611780560284898 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663137, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663137 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590627, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590627 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.028353212866863434, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.028353212866863434 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29535864978902954, + "acc_stderr": 0.02969633871342289, + "acc_norm": 0.29535864978902954, + "acc_norm_stderr": 0.02969633871342289 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28292046936114734, + "acc_stderr": 0.011503891323188976, + "acc_norm": 0.28292046936114734, + "acc_norm_stderr": 0.011503891323188976 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.036462049632538115, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.036462049632538115 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024626, + "mc2": 0.45249267433918944, + "mc2_stderr": 0.016019280038075977 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21723730814639905, + "acc_stderr": 0.014177416034265046, + "acc_norm": 0.2798110979929162, + "acc_norm_stderr": 0.015433715795427745 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "currybab/testgf", + "model_sha": "6e57d949f2c3e0878433689581e75d81cc4f60e8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/daebum/LoRA-Submit-Test/result_2024-01-28 10:00:55.json b/daebum/LoRA-Submit-Test/result_2024-01-28 10:00:55.json new file mode 100644 index 0000000000000000000000000000000000000000..4a7c6af4289c357b9271f361f0b2e05083d42f75 --- /dev/null +++ b/daebum/LoRA-Submit-Test/result_2024-01-28 10:00:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44283276450511944, + "acc_stderr": 0.014515573873348902, + "acc_norm": 0.4906143344709898, + "acc_norm_stderr": 0.014608816322065 + }, + "harness|ko_hellaswag|10": { + "acc": 0.441346345349532, + "acc_stderr": 0.004955330277304267, + "acc_norm": 0.6038637721569409, + "acc_norm_stderr": 0.004880937933163287 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.047504583990416946, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.047504583990416946 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6947637292464879, + "acc_stderr": 0.016467711947635126, + "acc_norm": 0.6947637292464879, + "acc_norm_stderr": 0.016467711947635126 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464245, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.032600385118357715, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.032600385118357715 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5605381165919282, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.5605381165919282, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6414141414141414, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.6414141414141414, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.025323990861736253, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.025323990861736253 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.033661244890514495, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.033661244890514495 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5580645161290323, + "acc_stderr": 0.028251557906849755, + "acc_norm": 0.5580645161290323, + "acc_norm_stderr": 0.028251557906849755 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392923, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.02882088466625325, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.02882088466625325 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.032941184790540964, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.032941184790540964 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.037657466938651504, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.037657466938651504 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762606, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762606 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.026817718130348927, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.026817718130348927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6373056994818653, + "acc_stderr": 0.034697137917043715, + "acc_norm": 0.6373056994818653, + "acc_norm_stderr": 0.034697137917043715 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6, + "acc_stderr": 0.021004201260420078, + "acc_norm": 0.6, + "acc_norm_stderr": 0.021004201260420078 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848877, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848877 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249034, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249034 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.020087362076702857, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.020087362076702857 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802749, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.01437816988409841, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.01437816988409841 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.02972215209928005, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.02972215209928005 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.031601069934496004, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.031601069934496004 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7046413502109705, + "acc_stderr": 0.029696338713422882, + "acc_norm": 0.7046413502109705, + "acc_norm_stderr": 0.029696338713422882 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3396349413298566, + "acc_stderr": 0.012095592506931969, + "acc_norm": 0.3396349413298566, + "acc_norm_stderr": 0.012095592506931969 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.03465868196380762, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.03465868196380762 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024637, + "mc2": 0.433248954860134, + "mc2_stderr": 0.014923611219837554 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5844155844155844, + "acc_stderr": 0.016943586313076565, + "acc_norm": 0.602125147579693, + "acc_norm_stderr": 0.01682795905473339 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "daebum/LoRA-Submit-Test", + "model_sha": "2a9cac07cd1b4e26a59baedfbd448ccd2fe6b0e2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/daebum/LoRA-Submit-Test/result_2024-01-28 10:05:59.json b/daebum/LoRA-Submit-Test/result_2024-01-28 10:05:59.json new file mode 100644 index 0000000000000000000000000000000000000000..4a7c6af4289c357b9271f361f0b2e05083d42f75 --- /dev/null +++ b/daebum/LoRA-Submit-Test/result_2024-01-28 10:05:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44283276450511944, + "acc_stderr": 0.014515573873348902, + "acc_norm": 0.4906143344709898, + "acc_norm_stderr": 0.014608816322065 + }, + "harness|ko_hellaswag|10": { + "acc": 0.441346345349532, + "acc_stderr": 0.004955330277304267, + "acc_norm": 0.6038637721569409, + "acc_norm_stderr": 0.004880937933163287 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.047504583990416946, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.047504583990416946 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6947637292464879, + "acc_stderr": 0.016467711947635126, + "acc_norm": 0.6947637292464879, + "acc_norm_stderr": 0.016467711947635126 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464245, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.032600385118357715, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.032600385118357715 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5605381165919282, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.5605381165919282, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6414141414141414, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.6414141414141414, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.025323990861736253, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.025323990861736253 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.033661244890514495, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.033661244890514495 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5580645161290323, + "acc_stderr": 0.028251557906849755, + "acc_norm": 0.5580645161290323, + "acc_norm_stderr": 0.028251557906849755 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392923, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.02882088466625325, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.02882088466625325 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.032941184790540964, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.032941184790540964 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.037657466938651504, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.037657466938651504 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762606, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762606 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.026817718130348927, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.026817718130348927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6373056994818653, + "acc_stderr": 0.034697137917043715, + "acc_norm": 0.6373056994818653, + "acc_norm_stderr": 0.034697137917043715 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6, + "acc_stderr": 0.021004201260420078, + "acc_norm": 0.6, + "acc_norm_stderr": 0.021004201260420078 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848877, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848877 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249034, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249034 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.020087362076702857, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.020087362076702857 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802749, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.01437816988409841, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.01437816988409841 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.02972215209928005, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.02972215209928005 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.031601069934496004, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.031601069934496004 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7046413502109705, + "acc_stderr": 0.029696338713422882, + "acc_norm": 0.7046413502109705, + "acc_norm_stderr": 0.029696338713422882 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3396349413298566, + "acc_stderr": 0.012095592506931969, + "acc_norm": 0.3396349413298566, + "acc_norm_stderr": 0.012095592506931969 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.03465868196380762, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.03465868196380762 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024637, + "mc2": 0.433248954860134, + "mc2_stderr": 0.014923611219837554 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5844155844155844, + "acc_stderr": 0.016943586313076565, + "acc_norm": 0.602125147579693, + "acc_norm_stderr": 0.01682795905473339 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "daebum/LoRA-Submit-Test", + "model_sha": "2a9cac07cd1b4e26a59baedfbd448ccd2fe6b0e2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/daekeun-ml/Llama-2-ko-DPO-13B/result_2023-10-31 08:54:20.json b/daekeun-ml/Llama-2-ko-DPO-13B/result_2023-10-31 08:54:20.json new file mode 100644 index 0000000000000000000000000000000000000000..f409b1bb657917939713fb28400224191158c06c --- /dev/null +++ b/daekeun-ml/Llama-2-ko-DPO-13B/result_2023-10-31 08:54:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43600682593856654, + "acc_stderr": 0.014491225699230916, + "acc_norm": 0.47525597269624575, + "acc_norm_stderr": 0.01459348769493774 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4387572196773551, + "acc_stderr": 0.004952209831856584, + "acc_norm": 0.5827524397530373, + "acc_norm_stderr": 0.004920967192255291 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.017797751493865636, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.017797751493865636 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101736, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101736 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126167, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126167 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.03125610824421881, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.03125610824421881 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.037507570448955384, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.037507570448955384 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02306818884826111, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02306818884826111 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5779816513761468, + "acc_stderr": 0.021174991407763178, + "acc_norm": 0.5779816513761468, + "acc_norm_stderr": 0.021174991407763178 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147127, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147127 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.027732834353363954, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.027732834353363954 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762626, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762626 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03141554629402543, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03141554629402543 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.03093285879278984, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.03093285879278984 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.012198140605353593, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.012198140605353593 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070264, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070264 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3684210526315789, + "mc1_stderr": 0.016886551261046046, + "mc2": 0.5190921371587374, + "mc2_stderr": 0.015978390538660552 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4734356552538371, + "acc_stderr": 0.017166075717577747, + "acc_norm": 0.538370720188902, + "acc_norm_stderr": 0.017139660221845557 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "daekeun-ml/Llama-2-ko-DPO-13B", + "model_sha": "dba5dd11263b1b42fa7d904d627f41d47330317b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/daekeun-ml/Llama-2-ko-OpenOrca-gugugo-13B/result_2023-11-14 01:13:56.json b/daekeun-ml/Llama-2-ko-OpenOrca-gugugo-13B/result_2023-11-14 01:13:56.json new file mode 100644 index 0000000000000000000000000000000000000000..f2fbe567f38ed549276f17ed20adf9249800886c --- /dev/null +++ b/daekeun-ml/Llama-2-ko-OpenOrca-gugugo-13B/result_2023-11-14 01:13:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3370307167235495, + "acc_stderr": 0.013813476652902276, + "acc_norm": 0.3967576791808874, + "acc_norm_stderr": 0.014296513020180637 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3999203345947023, + "acc_stderr": 0.0048888050031030755, + "acc_norm": 0.5243975303724357, + "acc_norm_stderr": 0.004983837641502893 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.049486373240266356, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.049486373240266356 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5134099616858238, + "acc_stderr": 0.017873531736510365, + "acc_norm": 0.5134099616858238, + "acc_norm_stderr": 0.017873531736510365 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101737, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101737 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.028150232244535594, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.028150232244535594 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03547601494006936, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03547601494006936 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.02483881198803316, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02483881198803316 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5897435897435898, + "acc_stderr": 0.03222414045241107, + "acc_norm": 0.5897435897435898, + "acc_norm_stderr": 0.03222414045241107 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.03056159042673183, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.03056159042673183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507382, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507382 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092056, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092056 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261107, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261107 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.038470214204560246, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.038470214204560246 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.0357795481394837, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.0357795481394837 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5174311926605505, + "acc_stderr": 0.021424291871853157, + "acc_norm": 0.5174311926605505, + "acc_norm_stderr": 0.021424291871853157 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.019249785691717217, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.019249785691717217 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.031546962856566295, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.031546962856566295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.03141470802586589, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.03141470802586589 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.03195514741370671, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.03195514741370671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214941, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214941 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015008, + "mc2": 0.4200527342817689, + "mc2_stderr": 0.01514912154156884 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36717827626918537, + "acc_stderr": 0.016572727807458595, + "acc_norm": 0.4852420306965762, + "acc_norm_stderr": 0.017182864434998574 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "daekeun-ml/Llama-2-ko-OpenOrca-gugugo-13B", + "model_sha": "4a8383dc00731b8d09cec6d4f48eba631833b445", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/daekeun-ml/Llama-2-ko-instruct-13B/result_2023-10-29 16:17:31.json b/daekeun-ml/Llama-2-ko-instruct-13B/result_2023-10-29 16:17:31.json new file mode 100644 index 0000000000000000000000000000000000000000..f741ff23c1386332442b4778e58e6c4945c6a1b9 --- /dev/null +++ b/daekeun-ml/Llama-2-ko-instruct-13B/result_2023-10-29 16:17:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3993174061433447, + "acc_stderr": 0.014312094557946704, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.01457558392201967 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4229237203744274, + "acc_stderr": 0.004930138842768219, + "acc_norm": 0.5690101573391755, + "acc_norm_stderr": 0.004942026200279584 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5606641123882503, + "acc_stderr": 0.017747874245683602, + "acc_norm": 0.5606641123882503, + "acc_norm_stderr": 0.017747874245683602 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.031660988918880785, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.031660988918880785 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.02293097307163335, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.02293097307163335 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5743119266055046, + "acc_stderr": 0.021199235972470795, + "acc_norm": 0.5743119266055046, + "acc_norm_stderr": 0.021199235972470795 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.02811092849280908, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.02811092849280908 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.01969145905235416, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.01969145905235416 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534792, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534792 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.03018753206032938, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.03018753206032938 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744858, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3474576271186441, + "acc_stderr": 0.012161417729749806, + "acc_norm": 0.3474576271186441, + "acc_norm_stderr": 0.012161417729749806 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630573, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630573 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283344, + "mc2": 0.4199929776899167, + "mc2_stderr": 0.014679195459056854 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4923258559622196, + "acc_stderr": 0.017188329219654276, + "acc_norm": 0.5844155844155844, + "acc_norm_stderr": 0.016943586313076565 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "daekeun-ml/Llama-2-ko-instruct-13B", + "model_sha": "a29fb540227b3cbc88a308cc5ed62c26b28d84f1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/dasomysky/unv_v0.1.0/result_2023-12-19 10:08:30.json b/dasomysky/unv_v0.1.0/result_2023-12-19 10:08:30.json new file mode 100644 index 0000000000000000000000000000000000000000..c2fe2afb76080ca8f541da5ff3fc49e6a39df854 --- /dev/null +++ b/dasomysky/unv_v0.1.0/result_2023-12-19 10:08:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4104095563139932, + "acc_stderr": 0.014374922192642667, + "acc_norm": 0.4735494880546075, + "acc_norm_stderr": 0.014590931358120179 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3721370244971121, + "acc_stderr": 0.0048238677613324675, + "acc_norm": 0.4676359290977893, + "acc_norm_stderr": 0.004979317515432532 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45977011494252873, + "acc_stderr": 0.01782199409693353, + "acc_norm": 0.45977011494252873, + "acc_norm_stderr": 0.01782199409693353 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236785, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236785 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079021, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079021 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894262, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894262 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.03515520728670417, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.03515520728670417 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.02515826601686856, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.02515826601686856 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3709677419354839, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.3709677419354839, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.03057281131029961, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.03057281131029961 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37358490566037733, + "acc_stderr": 0.029773082713319875, + "acc_norm": 0.37358490566037733, + "acc_norm_stderr": 0.029773082713319875 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857406, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857406 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736412, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.024870815251057093, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.024870815251057093 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.02758600622160771, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.02758600622160771 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.036080032255696545, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.036080032255696545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583703, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583703 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4954128440366973, + "acc_stderr": 0.021436420955529428, + "acc_norm": 0.4954128440366973, + "acc_norm_stderr": 0.021436420955529428 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849725, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.019780465954777535, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.019780465954777535 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.02878222756134725, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.02878222756134725 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.045723723587374296, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.045723723587374296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.032847388576472056, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.032847388576472056 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2, + "acc_stderr": 0.013378001241813053, + "acc_norm": 0.2, + "acc_norm_stderr": 0.013378001241813053 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841195, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163906, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3389830508474576, + "acc_stderr": 0.01208994185758447, + "acc_norm": 0.3389830508474576, + "acc_norm_stderr": 0.01208994185758447 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398395, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398395 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.01613222972815506, + "mc2": 0.4566409454989933, + "mc2_stderr": 0.016796069345486716 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.37662337662337664, + "acc_stderr": 0.016658799874051975, + "acc_norm": 0.3919716646989374, + "acc_norm_stderr": 0.016784332119424077 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "dasomysky/unv_v0.1.0", + "model_sha": "5c1f11f93821e38bbb9245a2f6713e0fd421edf3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/dasomysky/unv_v0.1.2/result_2023-12-28 04:44:41.json b/dasomysky/unv_v0.1.2/result_2023-12-28 04:44:41.json new file mode 100644 index 0000000000000000000000000000000000000000..2f011b5fe9d1a71004d267dc51bb1dc375935182 --- /dev/null +++ b/dasomysky/unv_v0.1.2/result_2023-12-28 04:44:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.181740614334471, + "acc_stderr": 0.011269198948880236, + "acc_norm": 0.2431740614334471, + "acc_norm_stderr": 0.012536554144587096 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2529376618203545, + "acc_stderr": 0.004338071318912315, + "acc_norm": 0.25184226249751046, + "acc_norm_stderr": 0.004331840012787854 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21637426900584794, + "acc_stderr": 0.03158149539338734, + "acc_norm": 0.21637426900584794, + "acc_norm_stderr": 0.03158149539338734 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28735632183908044, + "acc_stderr": 0.0161824107306827, + "acc_norm": 0.28735632183908044, + "acc_norm_stderr": 0.0161824107306827 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.03047297336338005, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.03047297336338005 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2765273311897106, + "acc_stderr": 0.02540383297817961, + "acc_norm": 0.2765273311897106, + "acc_norm_stderr": 0.02540383297817961 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.038808483010823944, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.038808483010823944 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20202020202020202, + "acc_stderr": 0.028606204289229872, + "acc_norm": 0.20202020202020202, + "acc_norm_stderr": 0.028606204289229872 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.226890756302521, + "acc_stderr": 0.02720537153827948, + "acc_norm": 0.226890756302521, + "acc_norm_stderr": 0.02720537153827948 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653697, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653697 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24516129032258063, + "acc_stderr": 0.024472243840895514, + "acc_norm": 0.24516129032258063, + "acc_norm_stderr": 0.024472243840895514 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.028120966503914404, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.028120966503914404 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.02661648298050171, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.02661648298050171 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473836, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473836 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.029705284056772426, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.029705284056772426 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749895, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749895 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708607, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708607 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02438366553103545, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02438366553103545 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20207253886010362, + "acc_stderr": 0.02897908979429673, + "acc_norm": 0.20207253886010362, + "acc_norm_stderr": 0.02897908979429673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.0182240781172991, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.0182240781172991 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.040261875275912046, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.040261875275912046 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.017776947157528034, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.017776947157528034 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.024987106365642973, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.024987106365642973 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1574074074074074, + "acc_stderr": 0.024837173518242384, + "acc_norm": 0.1574074074074074, + "acc_norm_stderr": 0.024837173518242384 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859936, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859936 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.0358870281282637, + "acc_norm": 0.15, + "acc_norm_stderr": 0.0358870281282637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19117647058823528, + "acc_stderr": 0.023886881922440335, + "acc_norm": 0.19117647058823528, + "acc_norm_stderr": 0.023886881922440335 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.16326530612244897, + "acc_stderr": 0.023661699177098604, + "acc_norm": 0.16326530612244897, + "acc_norm_stderr": 0.023661699177098604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24119947848761408, + "acc_stderr": 0.010926496102034954, + "acc_norm": 0.24119947848761408, + "acc_norm_stderr": 0.010926496102034954 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2107843137254902, + "acc_stderr": 0.028626547912437406, + "acc_norm": 0.2107843137254902, + "acc_norm_stderr": 0.028626547912437406 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752325, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09327036599763873, + "acc_stderr": 0.00999828619027671, + "acc_norm": 0.31286894923258557, + "acc_norm_stderr": 0.015941010118302658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "dasomysky/unv_v0.1.2", + "model_sha": "0362c3851124321261564d6aa05b1e0b647d63c4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/dasomysky/unv_v0.1.3/result_2023-12-31 11:52:24.json b/dasomysky/unv_v0.1.3/result_2023-12-31 11:52:24.json new file mode 100644 index 0000000000000000000000000000000000000000..2c6657a90f2577c5af452aba5a762998fbe6beb7 --- /dev/null +++ b/dasomysky/unv_v0.1.3/result_2023-12-31 11:52:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5563139931740614, + "acc_stderr": 0.014518421825670437, + "acc_norm": 0.60580204778157, + "acc_norm_stderr": 0.01428052266746733 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41874128659629556, + "acc_stderr": 0.004923445627861522, + "acc_norm": 0.5096594303923522, + "acc_norm_stderr": 0.004988850185477487 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.037712831076265434, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.037712831076265434 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.049392914472734785, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.049392914472734785 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.421455938697318, + "acc_stderr": 0.017657976412654857, + "acc_norm": 0.421455938697318, + "acc_norm_stderr": 0.017657976412654857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.30638297872340425, + "acc_stderr": 0.030135906478517563, + "acc_norm": 0.30638297872340425, + "acc_norm_stderr": 0.030135906478517563 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288087, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288087 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3987138263665595, + "acc_stderr": 0.027809322585774503, + "acc_norm": 0.3987138263665595, + "acc_norm_stderr": 0.027809322585774503 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4797979797979798, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.4797979797979798, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.039417076320648906, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.039417076320648906 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102308, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102308 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.03194740072265541, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.03194740072265541 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849734, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5683760683760684, + "acc_stderr": 0.0324483553531149, + "acc_norm": 0.5683760683760684, + "acc_norm_stderr": 0.0324483553531149 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.38113207547169814, + "acc_stderr": 0.029890609686286623, + "acc_norm": 0.38113207547169814, + "acc_norm_stderr": 0.029890609686286623 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.03528131472933607, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.03528131472933607 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.03496101481191181, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.03496101481191181 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596433, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596433 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.026613350840261753, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.026613350840261753 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3950617283950617, + "acc_stderr": 0.02720111766692565, + "acc_norm": 0.3950617283950617, + "acc_norm_stderr": 0.02720111766692565 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42568807339449544, + "acc_stderr": 0.0211992359724708, + "acc_norm": 0.42568807339449544, + "acc_norm_stderr": 0.0211992359724708 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.027780141207023337, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.027780141207023337 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.019861155193829163, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.019861155193829163 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590954, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590954 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.043642261558410445, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.043642261558410445 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160834, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160834 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30614525139664805, + "acc_stderr": 0.0154144944879032, + "acc_norm": 0.30614525139664805, + "acc_norm_stderr": 0.0154144944879032 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3877551020408163, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.3877551020408163, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.318122555410691, + "acc_stderr": 0.011895407281104097, + "acc_norm": 0.318122555410691, + "acc_norm_stderr": 0.011895407281104097 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457038, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457038 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766373, + "mc2": 0.41040887559572004, + "mc2_stderr": 0.01656022256396514 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42266824085005905, + "acc_stderr": 0.016983506079577604, + "acc_norm": 0.42621015348288077, + "acc_norm_stderr": 0.01700212260948926 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "dasomysky/unv_v0.1.3", + "model_sha": "32c92b396deacef3a5b4d5a4d224a68c02de6c1b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/dasomysky/unv_v0.1.4/result_2024-01-01 05:58:28.json b/dasomysky/unv_v0.1.4/result_2024-01-01 05:58:28.json new file mode 100644 index 0000000000000000000000000000000000000000..a5e0943a80eb5d4b26e9268c03751969452a3bed --- /dev/null +++ b/dasomysky/unv_v0.1.4/result_2024-01-01 05:58:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5793515358361775, + "acc_stderr": 0.014426211252508406, + "acc_norm": 0.6177474402730375, + "acc_norm_stderr": 0.014200454049979268 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4320852419836686, + "acc_stderr": 0.004943537242344417, + "acc_norm": 0.5263891655048795, + "acc_norm_stderr": 0.004982826916687152 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4227330779054917, + "acc_stderr": 0.01766518035195406, + "acc_norm": 0.4227330779054917, + "acc_norm_stderr": 0.01766518035195406 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.030363582197238167, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.030363582197238167 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40836012861736337, + "acc_stderr": 0.027917050748484627, + "acc_norm": 0.40836012861736337, + "acc_norm_stderr": 0.027917050748484627 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.04142313771996664, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.04142313771996664 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102318, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102318 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509568, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509568 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.027379871229943245, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.027379871229943245 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5726495726495726, + "acc_stderr": 0.032408473935163266, + "acc_norm": 0.5726495726495726, + "acc_norm_stderr": 0.032408473935163266 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3660377358490566, + "acc_stderr": 0.029647813539365252, + "acc_norm": 0.3660377358490566, + "acc_norm_stderr": 0.029647813539365252 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4653179190751445, + "acc_stderr": 0.026854257928258875, + "acc_norm": 0.4653179190751445, + "acc_norm_stderr": 0.026854257928258875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38271604938271603, + "acc_stderr": 0.027044538138402616, + "acc_norm": 0.38271604938271603, + "acc_norm_stderr": 0.027044538138402616 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.03602573571288442, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.03602573571288442 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43853211009174314, + "acc_stderr": 0.021274713073954572, + "acc_norm": 0.43853211009174314, + "acc_norm_stderr": 0.021274713073954572 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033522, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033522 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.03878139888797609, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.03878139888797609 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.01962744474841224, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.01962744474841224 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.032847388576472056, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.032847388576472056 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3239895697522816, + "acc_stderr": 0.011952840809646556, + "acc_norm": 0.3239895697522816, + "acc_norm_stderr": 0.011952840809646556 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.0157853708583967, + "mc2": 0.4346267676040251, + "mc2_stderr": 0.01684074169964695 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3412042502951594, + "acc_stderr": 0.016300368742137302, + "acc_norm": 0.3482880755608028, + "acc_norm_stderr": 0.01637992673914804 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "dasomysky/unv_v0.1.4", + "model_sha": "b52c7656ac8fb81a80cb4d0d06a76e3273b3f9ba", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/davidkim205/komt-mistral-7b-v1/result_2023-11-02 05:58:27.json b/davidkim205/komt-mistral-7b-v1/result_2023-11-02 05:58:27.json new file mode 100644 index 0000000000000000000000000000000000000000..5e659ed65bc346ebeba62fca8a3ebc5b8db43dcf --- /dev/null +++ b/davidkim205/komt-mistral-7b-v1/result_2023-11-02 05:58:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3216723549488055, + "acc_stderr": 0.013650488084494164, + "acc_norm": 0.3660409556313993, + "acc_norm_stderr": 0.014077223108470142 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3586934873531169, + "acc_stderr": 0.004786368011500455, + "acc_norm": 0.46016729735112527, + "acc_norm_stderr": 0.004973922192982241 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45849297573435505, + "acc_stderr": 0.017818248603465564, + "acc_norm": 0.45849297573435505, + "acc_norm_stderr": 0.017818248603465564 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745633, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745633 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085328, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085328 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.03169380235712997, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.03169380235712997 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3871794871794872, + "acc_stderr": 0.024697216930878948, + "acc_norm": 0.3871794871794872, + "acc_norm_stderr": 0.024697216930878948 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909281, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909281 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.02786932057166464, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02786932057166464 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006114, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006114 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972742, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972742 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176095, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176095 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123936, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123936 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247079, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247079 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047732, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047732 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357334, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.026613350840261743, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.026613350840261743 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.038566721635489125, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.038566721635489125 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02712511551316686, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02712511551316686 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43302752293577984, + "acc_stderr": 0.021244146569074338, + "acc_norm": 0.43302752293577984, + "acc_norm_stderr": 0.021244146569074338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883034, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883034 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32516339869281047, + "acc_stderr": 0.018950886770806297, + "acc_norm": 0.32516339869281047, + "acc_norm_stderr": 0.018950886770806297 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169924, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169924 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2916201117318436, + "acc_stderr": 0.015201032512520429, + "acc_norm": 0.2916201117318436, + "acc_norm_stderr": 0.015201032512520429 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.028064998167040094, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.028064998167040094 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.03254462010767859, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.03254462010767859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2848761408083442, + "acc_stderr": 0.01152783084636902, + "acc_norm": 0.2848761408083442, + "acc_norm_stderr": 0.01152783084636902 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.03228210387037892, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.03228210387037892 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.03663974994391243, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.03663974994391243 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589667, + "mc2": 0.47070833796075856, + "mc2_stderr": 0.015435009084049225 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41086186540731995, + "acc_stderr": 0.016914972767841062, + "acc_norm": 0.48288075560802834, + "acc_norm_stderr": 0.017180275246085622 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "davidkim205/komt-mistral-7b-v1", + "model_sha": "feb41a27b8dafcc6912185ff9d8da66951ca5758", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/davidkim205/komt-qwen1.5-7b-sft-v1/result_2024-02-20 10:41:55.json b/davidkim205/komt-qwen1.5-7b-sft-v1/result_2024-02-20 10:41:55.json new file mode 100644 index 0000000000000000000000000000000000000000..5af4c29a47c7e5fe54a02c6ce9dbb264954f59df --- /dev/null +++ b/davidkim205/komt-qwen1.5-7b-sft-v1/result_2024-02-20 10:41:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2636518771331058, + "acc_stderr": 0.012875929151297065, + "acc_norm": 0.3148464163822526, + "acc_norm_stderr": 0.013572657703084948 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3212507468631747, + "acc_stderr": 0.0046600252708170154, + "acc_norm": 0.3813981278629755, + "acc_norm_stderr": 0.004847372670134632 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.04944901092973781, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.04944901092973781 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.38569604086845466, + "acc_stderr": 0.017406476619212907, + "acc_norm": 0.38569604086845466, + "acc_norm_stderr": 0.017406476619212907 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977112, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977112 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761923, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761923 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40514469453376206, + "acc_stderr": 0.02788238379132595, + "acc_norm": 0.40514469453376206, + "acc_norm_stderr": 0.02788238379132595 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168284, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168284 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4393939393939394, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.4393939393939394, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36923076923076925, + "acc_stderr": 0.024468615241478905, + "acc_norm": 0.36923076923076925, + "acc_norm_stderr": 0.024468615241478905 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.027709359675032495, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.027709359675032495 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.031256108244218796, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.031256108244218796 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.0294451753281996, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.0294451753281996 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.046313813194254635, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.046313813194254635 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524572, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524572 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.0349610148119118, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.0349610148119118 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137292, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137292 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554858, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554858 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724146, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724146 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.027431623722415012, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.027431623722415012 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322004, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322004 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4073394495412844, + "acc_stderr": 0.021065986244412877, + "acc_norm": 0.4073394495412844, + "acc_norm_stderr": 0.021065986244412877 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924316, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225868, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225868 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.03842498559395269, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.03842498559395269 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.019450768432505514, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.019450768432505514 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02728160834446941, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02728160834446941 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.01448750085285041, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.01448750085285041 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.25735294117647056, + "acc_stderr": 0.02655651947004153, + "acc_norm": 0.25735294117647056, + "acc_norm_stderr": 0.02655651947004153 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.41350210970464135, + "acc_stderr": 0.03205649904851859, + "acc_norm": 0.41350210970464135, + "acc_norm_stderr": 0.03205649904851859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30638852672750977, + "acc_stderr": 0.011773980329380701, + "acc_norm": 0.30638852672750977, + "acc_norm_stderr": 0.011773980329380701 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398396, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398396 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713613, + "mc2": 0.47301187443284115, + "mc2_stderr": 0.01568801072346984 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2845336481700118, + "acc_stderr": 0.015512301654971779, + "acc_norm": 0.3364817001180638, + "acc_norm_stderr": 0.016245085294386553 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "davidkim205/komt-qwen1.5-7b-sft-v1", + "model_sha": "328ecb93cc8eda7abc166d125400b37db335c0d9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/davidkim205/komt-solar-10.7b-sft-v1/result_2024-02-14 23:16:26.json b/davidkim205/komt-solar-10.7b-sft-v1/result_2024-02-14 23:16:26.json new file mode 100644 index 0000000000000000000000000000000000000000..95f4be093a700e2e7c29a08c2be4f697dc4aa257 --- /dev/null +++ b/davidkim205/komt-solar-10.7b-sft-v1/result_2024-02-14 23:16:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4377133105802048, + "acc_stderr": 0.014497573881108288, + "acc_norm": 0.5042662116040956, + "acc_norm_stderr": 0.014610858923956952 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4393547102170882, + "acc_stderr": 0.004952942072999279, + "acc_norm": 0.5992830113523202, + "acc_norm_stderr": 0.004890422457747261 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6491228070175439, + "acc_stderr": 0.03660298834049162, + "acc_norm": 0.6491228070175439, + "acc_norm_stderr": 0.03660298834049162 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7184466019417476, + "acc_stderr": 0.04453254836326469, + "acc_norm": 0.7184466019417476, + "acc_norm_stderr": 0.04453254836326469 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.685823754789272, + "acc_stderr": 0.016599291735884928, + "acc_norm": 0.685823754789272, + "acc_norm_stderr": 0.016599291735884928 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480864, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480864 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5659574468085107, + "acc_stderr": 0.03240038086792747, + "acc_norm": 0.5659574468085107, + "acc_norm_stderr": 0.03240038086792747 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.639871382636656, + "acc_stderr": 0.02726429759980401, + "acc_norm": 0.639871382636656, + "acc_norm_stderr": 0.02726429759980401 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6816143497757847, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.6816143497757847, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.042764865428145914, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.042764865428145914 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7878787878787878, + "acc_stderr": 0.029126522834586815, + "acc_norm": 0.7878787878787878, + "acc_norm_stderr": 0.029126522834586815 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.04858083574266344, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.04858083574266344 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6386554621848739, + "acc_stderr": 0.03120469122515002, + "acc_norm": 0.6386554621848739, + "acc_norm_stderr": 0.03120469122515002 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.658974358974359, + "acc_stderr": 0.02403548967633508, + "acc_norm": 0.658974358974359, + "acc_norm_stderr": 0.02403548967633508 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.035158955511656986, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.035158955511656986 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6903225806451613, + "acc_stderr": 0.026302774983517414, + "acc_norm": 0.6903225806451613, + "acc_norm_stderr": 0.026302774983517414 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8333333333333334, + "acc_stderr": 0.02441494730454368, + "acc_norm": 0.8333333333333334, + "acc_norm_stderr": 0.02441494730454368 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6075471698113207, + "acc_stderr": 0.030052580579557845, + "acc_norm": 0.6075471698113207, + "acc_norm_stderr": 0.030052580579557845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.04653429807913508, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.04653429807913508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.025670080636909193, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.025670080636909193 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5625, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.5625, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5705521472392638, + "acc_stderr": 0.03889066619112723, + "acc_norm": 0.5705521472392638, + "acc_norm_stderr": 0.03889066619112723 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6604938271604939, + "acc_stderr": 0.026348564412011628, + "acc_norm": 0.6604938271604939, + "acc_norm_stderr": 0.026348564412011628 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7875647668393783, + "acc_stderr": 0.02951928261681723, + "acc_norm": 0.7875647668393783, + "acc_norm_stderr": 0.02951928261681723 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7321100917431193, + "acc_stderr": 0.018987462257978652, + "acc_norm": 0.7321100917431193, + "acc_norm_stderr": 0.018987462257978652 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.027684181883302895, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.027684181883302895 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.03984979653302871, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.03984979653302871 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.618421052631579, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.618421052631579, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5522875816993464, + "acc_stderr": 0.02011692534742242, + "acc_norm": 0.5522875816993464, + "acc_norm_stderr": 0.02011692534742242 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41134751773049644, + "acc_stderr": 0.02935491115994097, + "acc_norm": 0.41134751773049644, + "acc_norm_stderr": 0.02935491115994097 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261445, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261445 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5220588235294118, + "acc_stderr": 0.030343264224213528, + "acc_norm": 0.5220588235294118, + "acc_norm_stderr": 0.030343264224213528 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.02782078198114968, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.02782078198114968 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4067796610169492, + "acc_stderr": 0.012546325596569551, + "acc_norm": 0.4067796610169492, + "acc_norm_stderr": 0.012546325596569551 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3219094247246022, + "mc1_stderr": 0.016355567611960376, + "mc2": 0.4789718203709238, + "mc2_stderr": 0.015356824108835494 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.551357733175915, + "acc_stderr": 0.017099430514725785, + "acc_norm": 0.5749704840613932, + "acc_norm_stderr": 0.016996016308362883 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "davidkim205/komt-solar-10.7b-sft-v1", + "model_sha": "db672cd50d7e12655e11f338624232ba410cd136", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/davidkim205/komt-solar-10.7b-sft-v2/result_2024-03-02 03:09:23.json b/davidkim205/komt-solar-10.7b-sft-v2/result_2024-03-02 03:09:23.json new file mode 100644 index 0000000000000000000000000000000000000000..adc55df22f8a33cf4888c0dd6d3af0c4b08b95ff --- /dev/null +++ b/davidkim205/komt-solar-10.7b-sft-v2/result_2024-03-02 03:09:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.45733788395904434, + "acc_stderr": 0.01455810654392406, + "acc_norm": 0.5247440273037542, + "acc_norm_stderr": 0.014593487694937743 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45867357100179246, + "acc_stderr": 0.004972708369656543, + "acc_norm": 0.6319458275243975, + "acc_norm_stderr": 0.004812905279066439 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6883780332056194, + "acc_stderr": 0.016562433867284176, + "acc_norm": 0.6883780332056194, + "acc_norm_stderr": 0.016562433867284176 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5446808510638298, + "acc_stderr": 0.032555253593403555, + "acc_norm": 0.5446808510638298, + "acc_norm_stderr": 0.032555253593403555 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6302250803858521, + "acc_stderr": 0.027417996705630995, + "acc_norm": 0.6302250803858521, + "acc_norm_stderr": 0.027417996705630995 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6367713004484304, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.6367713004484304, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6870229007633588, + "acc_stderr": 0.04066962905677697, + "acc_norm": 0.6870229007633588, + "acc_norm_stderr": 0.04066962905677697 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7676767676767676, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.7676767676767676, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.03156663099215416, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.03156663099215416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5974358974358974, + "acc_stderr": 0.024864995159767738, + "acc_norm": 0.5974358974358974, + "acc_norm_stderr": 0.024864995159767738 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145631, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145631 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6944444444444444, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.6944444444444444, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6387096774193548, + "acc_stderr": 0.02732754844795755, + "acc_norm": 0.6387096774193548, + "acc_norm_stderr": 0.02732754844795755 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8418803418803419, + "acc_stderr": 0.023902325549560413, + "acc_norm": 0.8418803418803419, + "acc_norm_stderr": 0.023902325549560413 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5660377358490566, + "acc_stderr": 0.030503292013342596, + "acc_norm": 0.5660377358490566, + "acc_norm_stderr": 0.030503292013342596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948485, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658752, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658752 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.032658195885126966, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.032658195885126966 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.025487187147859372, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.025487187147859372 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5486111111111112, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.5486111111111112, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.026261677607806642, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.026261677607806642 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.026869490744815254, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.026869490744815254 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7461139896373057, + "acc_stderr": 0.03141024780565318, + "acc_norm": 0.7461139896373057, + "acc_norm_stderr": 0.03141024780565318 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6972477064220184, + "acc_stderr": 0.01969871143475634, + "acc_norm": 0.6972477064220184, + "acc_norm_stderr": 0.01969871143475634 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.027684181883302898, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.027684181883302898 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7851239669421488, + "acc_stderr": 0.037494924487096966, + "acc_norm": 0.7851239669421488, + "acc_norm_stderr": 0.037494924487096966 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5473856209150327, + "acc_stderr": 0.020136790918492523, + "acc_norm": 0.5473856209150327, + "acc_norm_stderr": 0.020136790918492523 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4078014184397163, + "acc_stderr": 0.029316011776343562, + "acc_norm": 0.4078014184397163, + "acc_norm_stderr": 0.029316011776343562 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103986, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103986 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163908, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163908 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7721518987341772, + "acc_stderr": 0.027303484599069405, + "acc_norm": 0.7721518987341772, + "acc_norm_stderr": 0.027303484599069405 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41134289439374183, + "acc_stderr": 0.01256788267380369, + "acc_norm": 0.41134289439374183, + "acc_norm_stderr": 0.01256788267380369 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.033321399446680854, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.033321399446680854 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7212121212121212, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.7212121212121212, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3953488372093023, + "mc1_stderr": 0.01711581563241818, + "mc2": 0.5700355897565702, + "mc2_stderr": 0.01594828499248219 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.500590318772137, + "acc_stderr": 0.017190342123448586, + "acc_norm": 0.5159386068476978, + "acc_norm_stderr": 0.017181617837190195 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "davidkim205/komt-solar-10.7b-sft-v2", + "model_sha": "4a3a1acafd911da25449e1699686aed653acc7d9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/davidkim205/komt-solar-10.7b-sft-v3/result_2024-03-03 14:26:42.json b/davidkim205/komt-solar-10.7b-sft-v3/result_2024-03-03 14:26:42.json new file mode 100644 index 0000000000000000000000000000000000000000..7f78236a777a04590d8e051d7760a67cd05208c4 --- /dev/null +++ b/davidkim205/komt-solar-10.7b-sft-v3/result_2024-03-03 14:26:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5042662116040956, + "acc_stderr": 0.014610858923956952, + "acc_norm": 0.5588737201365188, + "acc_norm_stderr": 0.01450974774906466 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4965146385182235, + "acc_stderr": 0.004989660180792165, + "acc_norm": 0.68442541326429, + "acc_norm_stderr": 0.004637944965914619 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6023391812865497, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.6023391812865497, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6538952745849298, + "acc_stderr": 0.017011965266412077, + "acc_norm": 0.6538952745849298, + "acc_norm_stderr": 0.017011965266412077 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6109324758842444, + "acc_stderr": 0.027690337536485376, + "acc_norm": 0.6109324758842444, + "acc_norm_stderr": 0.027690337536485376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5695067264573991, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.5695067264573991, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.04243869242230524, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.04243869242230524 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7070707070707071, + "acc_stderr": 0.032424979581788166, + "acc_norm": 0.7070707070707071, + "acc_norm_stderr": 0.032424979581788166 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.048580835742663454, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.048580835742663454 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5692307692307692, + "acc_stderr": 0.025106820660539757, + "acc_norm": 0.5692307692307692, + "acc_norm_stderr": 0.025106820660539757 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5903225806451613, + "acc_stderr": 0.02797605491534737, + "acc_norm": 0.5903225806451613, + "acc_norm_stderr": 0.02797605491534737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.028286324075564424, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.028286324075564424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948482, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948482 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42328042328042326, + "acc_stderr": 0.025446365634406786, + "acc_norm": 0.42328042328042326, + "acc_norm_stderr": 0.025446365634406786 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5208333333333334, + "acc_stderr": 0.04177578950739994, + "acc_norm": 0.5208333333333334, + "acc_norm_stderr": 0.04177578950739994 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.595679012345679, + "acc_stderr": 0.027306625297327677, + "acc_norm": 0.595679012345679, + "acc_norm_stderr": 0.027306625297327677 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.032922966391551414, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.032922966391551414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6770642201834862, + "acc_stderr": 0.020048115923415325, + "acc_norm": 0.6770642201834862, + "acc_norm_stderr": 0.020048115923415325 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.040089737857792046, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.040089737857792046 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.02022394600507431, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.02022394600507431 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.03409386946992699, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.03409386946992699 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.39888268156424583, + "acc_stderr": 0.01637696614261008, + "acc_norm": 0.39888268156424583, + "acc_norm_stderr": 0.01637696614261008 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4963235294117647, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.4963235294117647, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6040816326530613, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.6040816326530613, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.43285528031290743, + "acc_stderr": 0.01265456523462286, + "acc_norm": 0.43285528031290743, + "acc_norm_stderr": 0.01265456523462286 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.03393388584958406, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.03393388584958406 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4883720930232558, + "mc1_stderr": 0.017498767175740098, + "mc2": 0.6664856342285238, + "mc2_stderr": 0.01574469205435104 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5312868949232585, + "acc_stderr": 0.017156666859785456, + "acc_norm": 0.538370720188902, + "acc_norm_stderr": 0.017139660221845564 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "davidkim205/komt-solar-10.7b-sft-v3", + "model_sha": "ebf09a336f80a4f84fecd9e412f002374a162473", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/davidkim205/komt-solar-10.7b-sft-v4/result_2024-03-03 14:38:52.json b/davidkim205/komt-solar-10.7b-sft-v4/result_2024-03-03 14:38:52.json new file mode 100644 index 0000000000000000000000000000000000000000..92379c988445ee4f6d97642c59ca68ebb4161428 --- /dev/null +++ b/davidkim205/komt-solar-10.7b-sft-v4/result_2024-03-03 14:38:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4991467576791809, + "acc_stderr": 0.014611369529813276, + "acc_norm": 0.5597269624573379, + "acc_norm_stderr": 0.014506769524804248 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4805815574586736, + "acc_stderr": 0.004986016938678531, + "acc_norm": 0.6654052977494523, + "acc_norm_stderr": 0.004708842600177422 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5906432748538012, + "acc_stderr": 0.03771283107626544, + "acc_norm": 0.5906432748538012, + "acc_norm_stderr": 0.03771283107626544 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6602809706257982, + "acc_stderr": 0.016936394114301638, + "acc_norm": 0.6602809706257982, + "acc_norm_stderr": 0.016936394114301638 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.027466610213140112, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.027466610213140112 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.03191178226713547, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.03191178226713547 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.03156663099215416, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.03156663099215416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5615384615384615, + "acc_stderr": 0.02515826601686861, + "acc_norm": 0.5615384615384615, + "acc_norm_stderr": 0.02515826601686861 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5935483870967742, + "acc_stderr": 0.027941727346256308, + "acc_norm": 0.5935483870967742, + "acc_norm_stderr": 0.027941727346256308 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417618, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417618 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.047093069786618966, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.047093069786618966 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4470899470899471, + "acc_stderr": 0.025606723995777025, + "acc_norm": 0.4470899470899471, + "acc_norm_stderr": 0.025606723995777025 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.026817718130348916, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.026817718130348916 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6080246913580247, + "acc_stderr": 0.027163686038271146, + "acc_norm": 0.6080246913580247, + "acc_norm_stderr": 0.027163686038271146 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.032922966391551414, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.032922966391551414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.04702880432049615, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.04702880432049615 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6825688073394496, + "acc_stderr": 0.0199571521984605, + "acc_norm": 0.6825688073394496, + "acc_norm_stderr": 0.0199571521984605 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6013071895424836, + "acc_stderr": 0.028036092273891772, + "acc_norm": 0.6013071895424836, + "acc_norm_stderr": 0.028036092273891772 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041019, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041019 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.04026097083296564, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.04026097083296564 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.020226862710039466, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.020226862710039466 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650147, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5324074074074074, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.5324074074074074, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3888268156424581, + "acc_stderr": 0.01630389953079613, + "acc_norm": 0.3888268156424581, + "acc_norm_stderr": 0.01630389953079613 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5, + "acc_stderr": 0.030372836961539352, + "acc_norm": 0.5, + "acc_norm_stderr": 0.030372836961539352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5959183673469388, + "acc_stderr": 0.031414708025865885, + "acc_norm": 0.5959183673469388, + "acc_norm_stderr": 0.031414708025865885 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4211212516297262, + "acc_stderr": 0.012610325733489903, + "acc_norm": 0.4211212516297262, + "acc_norm_stderr": 0.012610325733489903 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6323529411764706, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.6323529411764706, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.44920440636474906, + "mc1_stderr": 0.017412941986115316, + "mc2": 0.6087190042401585, + "mc2_stderr": 0.016024504257026027 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5336481700118064, + "acc_stderr": 0.017151384117131872, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.01712282914329264 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "davidkim205/komt-solar-10.7b-sft-v4", + "model_sha": "d1bcbbe102ff4ab86aec5e567e2dd09e1bb7d6f6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/davidkim205/komt-solar-10.7b-sft-v5/result_2024-03-06 05:29:31.json b/davidkim205/komt-solar-10.7b-sft-v5/result_2024-03-06 05:29:31.json new file mode 100644 index 0000000000000000000000000000000000000000..290cf90143e0b5602e81eeb86f6b9d6bc9a1a6af --- /dev/null +++ b/davidkim205/komt-solar-10.7b-sft-v5/result_2024-03-06 05:29:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5051194539249146, + "acc_stderr": 0.014610624890309154, + "acc_norm": 0.5708191126279863, + "acc_norm_stderr": 0.014464085894870651 + }, + "harness|ko_hellaswag|10": { + "acc": 0.505875323640709, + "acc_stderr": 0.004989436910754241, + "acc_norm": 0.696176060545708, + "acc_norm_stderr": 0.004589676274079075 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6140350877192983, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.6140350877192983, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6615581098339719, + "acc_stderr": 0.016920869586210675, + "acc_norm": 0.6615581098339719, + "acc_norm_stderr": 0.016920869586210675 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6109324758842444, + "acc_stderr": 0.027690337536485376, + "acc_norm": 0.6109324758842444, + "acc_norm_stderr": 0.027690337536485376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5829596412556054, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.5829596412556054, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6564885496183206, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.6564885496183206, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.03274287914026868, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.03274287914026868 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.048786087144669955, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.048786087144669955 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5692307692307692, + "acc_stderr": 0.02510682066053976, + "acc_norm": 0.5692307692307692, + "acc_norm_stderr": 0.02510682066053976 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5903225806451613, + "acc_stderr": 0.02797605491534737, + "acc_norm": 0.5903225806451613, + "acc_norm_stderr": 0.02797605491534737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273957, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273957 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.025424835086924006, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.025424835086924006 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.595679012345679, + "acc_stderr": 0.027306625297327677, + "acc_norm": 0.595679012345679, + "acc_norm_stderr": 0.027306625297327677 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.032922966391551414, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.032922966391551414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6844036697247706, + "acc_stderr": 0.01992611751386967, + "acc_norm": 0.6844036697247706, + "acc_norm_stderr": 0.01992611751386967 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.02827549015679146, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.02827549015679146 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041019, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041019 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5921052631578947, + "acc_stderr": 0.03999309712777474, + "acc_norm": 0.5921052631578947, + "acc_norm_stderr": 0.03999309712777474 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.020226862710039473, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.020226862710039473 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.03409386946992699, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.03409386946992699 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.4033519553072626, + "acc_stderr": 0.01640712303219525, + "acc_norm": 0.4033519553072626, + "acc_norm_stderr": 0.01640712303219525 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4889705882352941, + "acc_stderr": 0.030365446477275675, + "acc_norm": 0.4889705882352941, + "acc_norm_stderr": 0.030365446477275675 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6040816326530613, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.6040816326530613, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7426160337552743, + "acc_stderr": 0.02845882099146031, + "acc_norm": 0.7426160337552743, + "acc_norm_stderr": 0.02845882099146031 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44002607561929596, + "acc_stderr": 0.012678037478574511, + "acc_norm": 0.44002607561929596, + "acc_norm_stderr": 0.012678037478574511 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.03364487286088298, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.03364487286088298 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4944920440636475, + "mc1_stderr": 0.01750243899045107, + "mc2": 0.6751153395729821, + "mc2_stderr": 0.015762612692171007 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.551357733175915, + "acc_stderr": 0.0170994305147258, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.017077254131556214 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "davidkim205/komt-solar-10.7b-sft-v5", + "model_sha": "ccf4456f5e1564abc80d1985046f359e68b8304a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/davidkim205/komt-solar-10.7b-sft-v6/result_2024-03-07 04:53:20.json b/davidkim205/komt-solar-10.7b-sft-v6/result_2024-03-07 04:53:20.json new file mode 100644 index 0000000000000000000000000000000000000000..1b853a41024187245e3064d49b8bc59bf3636755 --- /dev/null +++ b/davidkim205/komt-solar-10.7b-sft-v6/result_2024-03-07 04:53:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43600682593856654, + "acc_stderr": 0.014491225699230918, + "acc_norm": 0.4991467576791809, + "acc_norm_stderr": 0.014611369529813276 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46813383788090024, + "acc_stderr": 0.004979637330230307, + "acc_norm": 0.6437960565624378, + "acc_norm_stderr": 0.00477897803138964 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6081871345029239, + "acc_stderr": 0.037439798259263996, + "acc_norm": 0.6081871345029239, + "acc_norm_stderr": 0.037439798259263996 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6883780332056194, + "acc_stderr": 0.016562433867284176, + "acc_norm": 0.6883780332056194, + "acc_norm_stderr": 0.016562433867284176 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033583, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033583 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.038922121953330446, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.038922121953330446 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6366559485530546, + "acc_stderr": 0.02731684767419271, + "acc_norm": 0.6366559485530546, + "acc_norm_stderr": 0.02731684767419271 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6278026905829597, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.6278026905829597, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6564885496183206, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.6564885496183206, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932046, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932046 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062947, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062947 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5717948717948718, + "acc_stderr": 0.025088301454694824, + "acc_norm": 0.5717948717948718, + "acc_norm_stderr": 0.025088301454694824 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6064516129032258, + "acc_stderr": 0.027791878753132264, + "acc_norm": 0.6064516129032258, + "acc_norm_stderr": 0.027791878753132264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.02777883590493543, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.02777883590493543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.40397350993377484, + "acc_stderr": 0.0400648568536534, + "acc_norm": 0.40397350993377484, + "acc_norm_stderr": 0.0400648568536534 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6865671641791045, + "acc_stderr": 0.032801882053486414, + "acc_norm": 0.6865671641791045, + "acc_norm_stderr": 0.032801882053486414 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.03807301726504511, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.03807301726504511 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42328042328042326, + "acc_stderr": 0.02544636563440678, + "acc_norm": 0.42328042328042326, + "acc_norm_stderr": 0.02544636563440678 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5208333333333334, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.5208333333333334, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.02677299065336182, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.02677299065336182 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6141975308641975, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.6141975308641975, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.032210245080411544, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.032210245080411544 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6880733944954128, + "acc_stderr": 0.019862967976707245, + "acc_norm": 0.6880733944954128, + "acc_norm_stderr": 0.019862967976707245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6078431372549019, + "acc_stderr": 0.02795604616542452, + "acc_norm": 0.6078431372549019, + "acc_norm_stderr": 0.02795604616542452 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.041733491480834994, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.041733491480834994 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5921052631578947, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.5921052631578947, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.020227402794434867, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.020227402794434867 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.028999080904806185, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.028999080904806185 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.03407632093854053, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.03407632093854053 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3128491620111732, + "acc_stderr": 0.015506892594647272, + "acc_norm": 0.3128491620111732, + "acc_norm_stderr": 0.015506892594647272 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5110294117647058, + "acc_stderr": 0.030365446477275675, + "acc_norm": 0.5110294117647058, + "acc_norm_stderr": 0.030365446477275675 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.03160106993449601, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.03160106993449601 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7763713080168776, + "acc_stderr": 0.027123298205229966, + "acc_norm": 0.7763713080168776, + "acc_norm_stderr": 0.027123298205229966 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41916558018252936, + "acc_stderr": 0.012602244505788219, + "acc_norm": 0.41916558018252936, + "acc_norm_stderr": 0.012602244505788219 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.03364487286088299, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.03364487286088299 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4589963280293758, + "mc1_stderr": 0.01744454444766119, + "mc2": 0.597177808097494, + "mc2_stderr": 0.015409968714601414 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4970484061393152, + "acc_stderr": 0.017190054580194694, + "acc_norm": 0.5242030696576151, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "davidkim205/komt-solar-10.7b-sft-v6", + "model_sha": "cad3b6cdadd9b647ee923d9631a82786ceecb68e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/davidkim205/komt-solar-10.7b-v1/result_2024-02-11 05:23:47.json b/davidkim205/komt-solar-10.7b-v1/result_2024-02-11 05:23:47.json new file mode 100644 index 0000000000000000000000000000000000000000..627da4e541999f74fd5c176eae053e10853bcbbf --- /dev/null +++ b/davidkim205/komt-solar-10.7b-v1/result_2024-02-11 05:23:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4658703071672355, + "acc_stderr": 0.014577311315231102, + "acc_norm": 0.5460750853242321, + "acc_norm_stderr": 0.014549221105171865 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46116311491734713, + "acc_stderr": 0.00497470642843429, + "acc_norm": 0.6367257518422625, + "acc_norm_stderr": 0.004799599840397383 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.038110796698335316, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.038110796698335316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6500638569604087, + "acc_stderr": 0.017055679797150426, + "acc_norm": 0.6500638569604087, + "acc_norm_stderr": 0.017055679797150426 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.639871382636656, + "acc_stderr": 0.027264297599804015, + "acc_norm": 0.639871382636656, + "acc_norm_stderr": 0.027264297599804015 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6053811659192825, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.6053811659192825, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.648854961832061, + "acc_stderr": 0.0418644516301375, + "acc_norm": 0.648854961832061, + "acc_norm_stderr": 0.0418644516301375 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7121212121212122, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.7121212121212122, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.634453781512605, + "acc_stderr": 0.03128217706368461, + "acc_norm": 0.634453781512605, + "acc_norm_stderr": 0.03128217706368461 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5692307692307692, + "acc_stderr": 0.02510682066053976, + "acc_norm": 0.5692307692307692, + "acc_norm_stderr": 0.02510682066053976 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.028040981380761533, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.028040981380761533 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.028120966503914394, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.028120966503914394 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919795, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.038073017265045125, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.038073017265045125 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137605, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137605 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6069364161849711, + "acc_stderr": 0.026296227915613674, + "acc_norm": 0.6069364161849711, + "acc_norm_stderr": 0.026296227915613674 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6265432098765432, + "acc_stderr": 0.026915003011380154, + "acc_norm": 0.6265432098765432, + "acc_norm_stderr": 0.026915003011380154 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7150259067357513, + "acc_stderr": 0.032577140777096614, + "acc_norm": 0.7150259067357513, + "acc_norm_stderr": 0.032577140777096614 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.046774730044912005, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.046774730044912005 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.673394495412844, + "acc_stderr": 0.0201069908899373, + "acc_norm": 0.673394495412844, + "acc_norm_stderr": 0.0201069908899373 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.02827549015679145, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.02827549015679145 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.02021703065318646, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.02021703065318646 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40070921985815605, + "acc_stderr": 0.02923346574557309, + "acc_norm": 0.40070921985815605, + "acc_norm_stderr": 0.02923346574557309 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2837988826815642, + "acc_stderr": 0.015078358970751755, + "acc_norm": 0.2837988826815642, + "acc_norm_stderr": 0.015078358970751755 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5110294117647058, + "acc_stderr": 0.030365446477275675, + "acc_norm": 0.5110294117647058, + "acc_norm_stderr": 0.030365446477275675 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.031717528240626645, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.031717528240626645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7805907172995781, + "acc_stderr": 0.026939106581553945, + "acc_norm": 0.7805907172995781, + "acc_norm_stderr": 0.026939106581553945 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4211212516297262, + "acc_stderr": 0.012610325733489903, + "acc_norm": 0.4211212516297262, + "acc_norm_stderr": 0.012610325733489903 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.03354092437591519, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.03354092437591519 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4418604651162791, + "mc1_stderr": 0.017384767478986218, + "mc2": 0.6010661067825942, + "mc2_stderr": 0.015934657775409665 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5761511216056671, + "acc_stderr": 0.016989810834628253, + "acc_norm": 0.58913813459268, + "acc_norm_stderr": 0.01691497276784105 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "davidkim205/komt-solar-10.7b-v1", + "model_sha": "467231dd51ba05bd83b55bff576b0279a6739fb4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/davidkim205/komt-solar-10.7b-v2/result_2024-02-11 14:34:50.json b/davidkim205/komt-solar-10.7b-v2/result_2024-02-11 14:34:50.json new file mode 100644 index 0000000000000000000000000000000000000000..bea92c29dc8428b6bd31f4c1b333efb4b8be8a7b --- /dev/null +++ b/davidkim205/komt-solar-10.7b-v2/result_2024-02-11 14:34:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42150170648464164, + "acc_stderr": 0.014430197069326028, + "acc_norm": 0.4991467576791809, + "acc_norm_stderr": 0.014611369529813286 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4508066122286397, + "acc_stderr": 0.00496557224680386, + "acc_norm": 0.6130252937661821, + "acc_norm_stderr": 0.004860623733461127 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6023391812865497, + "acc_stderr": 0.03753638955761691, + "acc_norm": 0.6023391812865497, + "acc_norm_stderr": 0.03753638955761691 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6500638569604087, + "acc_stderr": 0.017055679797150426, + "acc_norm": 0.6500638569604087, + "acc_norm_stderr": 0.017055679797150426 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4978723404255319, + "acc_stderr": 0.032685726586674915, + "acc_norm": 0.4978723404255319, + "acc_norm_stderr": 0.032685726586674915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6302250803858521, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.6302250803858521, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5605381165919282, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.5605381165919282, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6717557251908397, + "acc_stderr": 0.04118438565806299, + "acc_norm": 0.6717557251908397, + "acc_norm_stderr": 0.04118438565806299 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077636, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.03163145807552379, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.03163145807552379 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5641025641025641, + "acc_stderr": 0.025141801511177488, + "acc_norm": 0.5641025641025641, + "acc_norm_stderr": 0.025141801511177488 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5774193548387097, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.5774193548387097, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.028120966503914394, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.028120966503914394 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851295, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851295 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.02931820364520686, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.02931820364520686 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919795, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851112, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851112 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5208333333333334, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.5208333333333334, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5924855491329479, + "acc_stderr": 0.026454578146931505, + "acc_norm": 0.5924855491329479, + "acc_norm_stderr": 0.026454578146931505 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6080246913580247, + "acc_stderr": 0.027163686038271146, + "acc_norm": 0.6080246913580247, + "acc_norm_stderr": 0.027163686038271146 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.694300518134715, + "acc_stderr": 0.03324837939758159, + "acc_norm": 0.694300518134715, + "acc_norm_stderr": 0.03324837939758159 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583703, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583703 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.689908256880734, + "acc_stderr": 0.01983084968443975, + "acc_norm": 0.689908256880734, + "acc_norm_stderr": 0.01983084968443975 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.020227402794434864, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.020227402794434864 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4078014184397163, + "acc_stderr": 0.02931601177634356, + "acc_norm": 0.4078014184397163, + "acc_norm_stderr": 0.02931601177634356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.01489339173524962, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.01489339173524962 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5, + "acc_stderr": 0.030372836961539352, + "acc_norm": 0.5, + "acc_norm_stderr": 0.030372836961539352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7679324894514767, + "acc_stderr": 0.027479744550808517, + "acc_norm": 0.7679324894514767, + "acc_norm_stderr": 0.027479744550808517 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41916558018252936, + "acc_stderr": 0.012602244505788224, + "acc_norm": 0.41916558018252936, + "acc_norm_stderr": 0.012602244505788224 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.033321399446680854, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.033321399446680854 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.037563357751878954, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.037563357751878954 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.40024479804161567, + "mc1_stderr": 0.017151605555749135, + "mc2": 0.5666062021741995, + "mc2_stderr": 0.01578099863297735 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5820543093270366, + "acc_stderr": 0.016957292005279703, + "acc_norm": 0.5962219598583235, + "acc_norm_stderr": 0.016869031540298625 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "davidkim205/komt-solar-10.7b-v2", + "model_sha": "59426aa5acc9d2b84dd1b03485ce399574cba491", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/davidkim205/nox-solar-10.7b-v1/result_2024-03-15 06:36:15.json b/davidkim205/nox-solar-10.7b-v1/result_2024-03-15 06:36:15.json new file mode 100644 index 0000000000000000000000000000000000000000..6feedb32be422e20a5d8232d5461a6a29f92bb09 --- /dev/null +++ b/davidkim205/nox-solar-10.7b-v1/result_2024-03-15 06:36:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6902730375426621, + "acc_stderr": 0.013512058415238363, + "acc_norm": 0.7303754266211604, + "acc_norm_stderr": 0.012968040686869159 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4905397331208923, + "acc_stderr": 0.004988888194063278, + "acc_norm": 0.6340370444134634, + "acc_norm_stderr": 0.004807146925162061 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7368421052631579, + "acc_stderr": 0.03377310252209205, + "acc_norm": 0.7368421052631579, + "acc_norm_stderr": 0.03377310252209205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.04541609446503948, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.04541609446503948 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7343550446998723, + "acc_stderr": 0.01579430248788872, + "acc_norm": 0.7343550446998723, + "acc_norm_stderr": 0.01579430248788872 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5361702127659574, + "acc_stderr": 0.03260038511835771, + "acc_norm": 0.5361702127659574, + "acc_norm_stderr": 0.03260038511835771 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5301204819277109, + "acc_stderr": 0.03885425420866768, + "acc_norm": 0.5301204819277109, + "acc_norm_stderr": 0.03885425420866768 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6495176848874598, + "acc_stderr": 0.027098652621301747, + "acc_norm": 0.6495176848874598, + "acc_norm_stderr": 0.027098652621301747 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6547085201793722, + "acc_stderr": 0.03191100192835794, + "acc_norm": 0.6547085201793722, + "acc_norm_stderr": 0.03191100192835794 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.042258754519696386, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.042258754519696386 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7727272727272727, + "acc_stderr": 0.029857515673386417, + "acc_norm": 0.7727272727272727, + "acc_norm_stderr": 0.029857515673386417 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.03104194130405929, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.03104194130405929 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6461538461538462, + "acc_stderr": 0.02424378399406214, + "acc_norm": 0.6461538461538462, + "acc_norm_stderr": 0.02424378399406214 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.5073891625615764, + "acc_stderr": 0.035176035403610105, + "acc_norm": 0.5073891625615764, + "acc_norm_stderr": 0.035176035403610105 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6451612903225806, + "acc_stderr": 0.027218889773308757, + "acc_norm": 0.6451612903225806, + "acc_norm_stderr": 0.027218889773308757 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8589743589743589, + "acc_stderr": 0.022801382534597528, + "acc_norm": 0.8589743589743589, + "acc_norm_stderr": 0.022801382534597528 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6113207547169811, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.6113207547169811, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.04582004841505417, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.04582004841505417 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630886, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630886 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.039837983066598075, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.039837983066598075 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5838150289017341, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.5838150289017341, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43386243386243384, + "acc_stderr": 0.025525034382474887, + "acc_norm": 0.43386243386243384, + "acc_norm_stderr": 0.025525034382474887 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5347222222222222, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.5347222222222222, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.8, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.8, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.630057803468208, + "acc_stderr": 0.025992472029306383, + "acc_norm": 0.630057803468208, + "acc_norm_stderr": 0.025992472029306383 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6441717791411042, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.6441717791411042, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6172839506172839, + "acc_stderr": 0.027044538138402588, + "acc_norm": 0.6172839506172839, + "acc_norm_stderr": 0.027044538138402588 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.772020725388601, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.772020725388601, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594964, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594964 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7486238532110092, + "acc_stderr": 0.018599206360287415, + "acc_norm": 0.7486238532110092, + "acc_norm_stderr": 0.018599206360287415 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6535947712418301, + "acc_stderr": 0.02724561304721536, + "acc_norm": 0.6535947712418301, + "acc_norm_stderr": 0.02724561304721536 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6513157894736842, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.6513157894736842, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.020017629214213104, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.020017629214213104 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.029494827600144373, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.029494827600144373 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5324074074074074, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.5324074074074074, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.39776536312849164, + "acc_stderr": 0.016369204971262985, + "acc_norm": 0.39776536312849164, + "acc_norm_stderr": 0.016369204971262985 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5772058823529411, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.5772058823529411, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.03086214492108757, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.03086214492108757 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7426160337552743, + "acc_stderr": 0.028458820991460305, + "acc_norm": 0.7426160337552743, + "acc_norm_stderr": 0.028458820991460305 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.43415906127770537, + "acc_stderr": 0.01265903323706725, + "acc_norm": 0.43415906127770537, + "acc_norm_stderr": 0.01265903323706725 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.03364487286088298, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.03364487286088298 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6119951040391677, + "mc1_stderr": 0.017058761501347976, + "mc2": 0.7086797951654106, + "mc2_stderr": 0.01459154706118535 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5053128689492326, + "acc_stderr": 0.01718938362722971, + "acc_norm": 0.5242030696576151, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "davidkim205/nox-solar-10.7b-v1", + "model_sha": "3476c344dcf2efdba3975355f758e2bbfb655405", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/davidkim205/nox-solar-10.7b-v2/result_2024-03-15 07:03:43.json b/davidkim205/nox-solar-10.7b-v2/result_2024-03-15 07:03:43.json new file mode 100644 index 0000000000000000000000000000000000000000..62531ad51c78799e759692d588641173f4be3739 --- /dev/null +++ b/davidkim205/nox-solar-10.7b-v2/result_2024-03-15 07:03:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6936860068259386, + "acc_stderr": 0.013470584417276513, + "acc_norm": 0.734641638225256, + "acc_norm_stderr": 0.01290255476231397 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5207130053774148, + "acc_stderr": 0.00498549805519036, + "acc_norm": 0.6731726747659829, + "acc_norm_stderr": 0.004680949283855315 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7426900584795322, + "acc_stderr": 0.03352799844161865, + "acc_norm": 0.7426900584795322, + "acc_norm_stderr": 0.03352799844161865 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7281553398058253, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.7281553398058253, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7343550446998723, + "acc_stderr": 0.01579430248788872, + "acc_norm": 0.7343550446998723, + "acc_norm_stderr": 0.01579430248788872 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5319148936170213, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.5319148936170213, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5481927710843374, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.5481927710843374, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6463022508038585, + "acc_stderr": 0.027155208103200868, + "acc_norm": 0.6463022508038585, + "acc_norm_stderr": 0.027155208103200868 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.672645739910314, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.672645739910314, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.042258754519696386, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.042258754519696386 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.55, + "acc_stderr": 0.05000000000000001, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05000000000000001 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7676767676767676, + "acc_stderr": 0.030088629490217487, + "acc_norm": 0.7676767676767676, + "acc_norm_stderr": 0.030088629490217487 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6638655462184874, + "acc_stderr": 0.03068473711513536, + "acc_norm": 0.6638655462184874, + "acc_norm_stderr": 0.03068473711513536 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6307692307692307, + "acc_stderr": 0.024468615241478933, + "acc_norm": 0.6307692307692307, + "acc_norm_stderr": 0.024468615241478933 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301812, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301812 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.035107665979592154, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.035107665979592154 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.635483870967742, + "acc_stderr": 0.027379871229943238, + "acc_norm": 0.635483870967742, + "acc_norm_stderr": 0.027379871229943238 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8504273504273504, + "acc_stderr": 0.023365051491753722, + "acc_norm": 0.8504273504273504, + "acc_norm_stderr": 0.023365051491753722 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.630188679245283, + "acc_stderr": 0.029711421880107933, + "acc_norm": 0.630188679245283, + "acc_norm_stderr": 0.029711421880107933 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.029958249250082114, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.029958249250082114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555402, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555402 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.02563425811555496, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.02563425811555496 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5972222222222222, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.5972222222222222, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.8, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.8, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.653179190751445, + "acc_stderr": 0.025624723994030457, + "acc_norm": 0.653179190751445, + "acc_norm_stderr": 0.025624723994030457 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6319018404907976, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.6319018404907976, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6450617283950617, + "acc_stderr": 0.026624152478845853, + "acc_norm": 0.6450617283950617, + "acc_norm_stderr": 0.026624152478845853 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.030748905363909902, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.030748905363909902 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.045981880578165414, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.045981880578165414 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.781651376146789, + "acc_stderr": 0.017712600528722748, + "acc_norm": 0.781651376146789, + "acc_norm_stderr": 0.017712600528722748 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.026992544339297233, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.026992544339297233 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.03984979653302873, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.03984979653302873 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5849673202614379, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.5849673202614379, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4326241134751773, + "acc_stderr": 0.029555454236778855, + "acc_norm": 0.4326241134751773, + "acc_norm_stderr": 0.029555454236778855 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.38100558659217876, + "acc_stderr": 0.016242028834053613, + "acc_norm": 0.38100558659217876, + "acc_norm_stderr": 0.016242028834053613 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.029896163033125474, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.029896163033125474 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.03086214492108757, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.03086214492108757 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036416, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036416 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4511082138200782, + "acc_stderr": 0.012709037347346233, + "acc_norm": 0.4511082138200782, + "acc_norm_stderr": 0.012709037347346233 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.03296245110172229, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.03296245110172229 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6181150550795593, + "mc1_stderr": 0.017008101939163495, + "mc2": 0.7193567953210699, + "mc2_stderr": 0.014371874198410623 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5171192443919717, + "acc_stderr": 0.01718027524608563, + "acc_norm": 0.5548996458087367, + "acc_norm_stderr": 0.017086417431005464 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "davidkim205/nox-solar-10.7b-v2", + "model_sha": "5a3ad612138c5d1c8855b121a4f2d31d60c65a8b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/davidkim205/nox-solar-10.7b-v3/result_2024-03-15 21:41:15.json b/davidkim205/nox-solar-10.7b-v3/result_2024-03-15 21:41:15.json new file mode 100644 index 0000000000000000000000000000000000000000..b09aab445dbdad8c20c614e31bb228cc66484a2c --- /dev/null +++ b/davidkim205/nox-solar-10.7b-v3/result_2024-03-15 21:41:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6979522184300341, + "acc_stderr": 0.013417519144716413, + "acc_norm": 0.7457337883959044, + "acc_norm_stderr": 0.012724999945157734 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5613423620792671, + "acc_stderr": 0.004952087083128917, + "acc_norm": 0.7220673172674766, + "acc_norm_stderr": 0.004470644845242887 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7309941520467836, + "acc_stderr": 0.0340105262010409, + "acc_norm": 0.7309941520467836, + "acc_norm_stderr": 0.0340105262010409 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7184466019417476, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.7184466019417476, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7164750957854407, + "acc_stderr": 0.016117318166832293, + "acc_norm": 0.7164750957854407, + "acc_norm_stderr": 0.016117318166832293 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5361702127659574, + "acc_stderr": 0.0326003851183577, + "acc_norm": 0.5361702127659574, + "acc_norm_stderr": 0.0326003851183577 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.0274666102131401, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.0274666102131401 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6681614349775785, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.6681614349775785, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7727272727272727, + "acc_stderr": 0.029857515673386414, + "acc_norm": 0.7727272727272727, + "acc_norm_stderr": 0.029857515673386414 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.041443118108781526, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.041443118108781526 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.03149930577784906, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.03149930577784906 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6461538461538462, + "acc_stderr": 0.024243783994062146, + "acc_norm": 0.6461538461538462, + "acc_norm_stderr": 0.024243783994062146 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6483870967741936, + "acc_stderr": 0.027162537826948458, + "acc_norm": 0.6483870967741936, + "acc_norm_stderr": 0.027162537826948458 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8247863247863247, + "acc_stderr": 0.024904439098918214, + "acc_norm": 0.8247863247863247, + "acc_norm_stderr": 0.024904439098918214 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6037735849056604, + "acc_stderr": 0.030102793781791197, + "acc_norm": 0.6037735849056604, + "acc_norm_stderr": 0.030102793781791197 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.02925290592725198, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.02925290592725198 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.039837983066598075, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.039837983066598075 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7512437810945274, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.7512437810945274, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5895953757225434, + "acc_stderr": 0.03750757044895536, + "acc_norm": 0.5895953757225434, + "acc_norm_stderr": 0.03750757044895536 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4576719576719577, + "acc_stderr": 0.02565886886205832, + "acc_norm": 0.4576719576719577, + "acc_norm_stderr": 0.02565886886205832 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6458333333333334, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.6458333333333334, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.79, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.79, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6213872832369942, + "acc_stderr": 0.026113749361310345, + "acc_norm": 0.6213872832369942, + "acc_norm_stderr": 0.026113749361310345 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6257668711656442, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.6257668711656442, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.026041766202717156, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.026041766202717156 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7668393782383419, + "acc_stderr": 0.03051611137147601, + "acc_norm": 0.7668393782383419, + "acc_norm_stderr": 0.03051611137147601 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7651376146788991, + "acc_stderr": 0.018175110510343605, + "acc_norm": 0.7651376146788991, + "acc_norm_stderr": 0.018175110510343605 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6437908496732027, + "acc_stderr": 0.027420477662629235, + "acc_norm": 0.6437908496732027, + "acc_norm_stderr": 0.027420477662629235 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6578947368421053, + "acc_stderr": 0.038607315993160904, + "acc_norm": 0.6578947368421053, + "acc_norm_stderr": 0.038607315993160904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.019861155193829163, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.019861155193829163 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.029462189233370593, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.029462189233370593 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3843575418994413, + "acc_stderr": 0.0162690886639594, + "acc_norm": 0.3843575418994413, + "acc_norm_stderr": 0.0162690886639594 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.74, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.74, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.03016191193076711, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.03016191193076711 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6938775510204082, + "acc_stderr": 0.02950489645459597, + "acc_norm": 0.6938775510204082, + "acc_norm_stderr": 0.02950489645459597 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7426160337552743, + "acc_stderr": 0.0284588209914603, + "acc_norm": 0.7426160337552743, + "acc_norm_stderr": 0.0284588209914603 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.45371577574967403, + "acc_stderr": 0.012715404841277752, + "acc_norm": 0.45371577574967403, + "acc_norm_stderr": 0.012715404841277752 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.03283472056108561, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.03283472056108561 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6707466340269278, + "mc1_stderr": 0.01645126444006823, + "mc2": 0.7634844333623657, + "mc2_stderr": 0.013734259239965596 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.551357733175915, + "acc_stderr": 0.017099430514725785, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.01701403811929748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "davidkim205/nox-solar-10.7b-v3", + "model_sha": "4a112e2b4ebfa17dd582ab557d39febbeae2be38", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/davidkim205/nox-solar-10.7b-v4/result_2024-03-16 06:27:41.json b/davidkim205/nox-solar-10.7b-v4/result_2024-03-16 06:27:41.json new file mode 100644 index 0000000000000000000000000000000000000000..2278fbf9adf93f8ed860a7b19cb86869feb1df8b --- /dev/null +++ b/davidkim205/nox-solar-10.7b-v4/result_2024-03-16 06:27:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6962457337883959, + "acc_stderr": 0.013438909184778766, + "acc_norm": 0.735494880546075, + "acc_norm_stderr": 0.012889272949313366 + }, + "harness|ko_hellaswag|10": { + "acc": 0.569308902609042, + "acc_stderr": 0.004941609820763586, + "acc_norm": 0.7206731726747659, + "acc_norm_stderr": 0.00447751468132816 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7368421052631579, + "acc_stderr": 0.03377310252209205, + "acc_norm": 0.7368421052631579, + "acc_norm_stderr": 0.03377310252209205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7011494252873564, + "acc_stderr": 0.01636925681509311, + "acc_norm": 0.7011494252873564, + "acc_norm_stderr": 0.01636925681509311 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5191489361702127, + "acc_stderr": 0.0326620429906468, + "acc_norm": 0.5191489361702127, + "acc_norm_stderr": 0.0326620429906468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.639871382636656, + "acc_stderr": 0.027264297599804012, + "acc_norm": 0.639871382636656, + "acc_norm_stderr": 0.027264297599804012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6278026905829597, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.6278026905829597, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792399, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792399 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.797979797979798, + "acc_stderr": 0.028606204289229876, + "acc_norm": 0.797979797979798, + "acc_norm_stderr": 0.028606204289229876 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5655172413793104, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.5655172413793104, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6384615384615384, + "acc_stderr": 0.024359581465397007, + "acc_norm": 0.6384615384615384, + "acc_norm_stderr": 0.024359581465397007 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.035158955511656986, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.035158955511656986 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.635483870967742, + "acc_stderr": 0.02737987122994325, + "acc_norm": 0.635483870967742, + "acc_norm_stderr": 0.02737987122994325 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8290598290598291, + "acc_stderr": 0.024662496845209804, + "acc_norm": 0.8290598290598291, + "acc_norm_stderr": 0.024662496845209804 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5773584905660377, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.5773584905660377, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630882, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630882 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.746268656716418, + "acc_stderr": 0.03076944496729601, + "acc_norm": 0.746268656716418, + "acc_norm_stderr": 0.03076944496729601 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4708994708994709, + "acc_stderr": 0.025707658614154943, + "acc_norm": 0.4708994708994709, + "acc_norm_stderr": 0.025707658614154943 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6597222222222222, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.6597222222222222, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932263, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932263 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6069364161849711, + "acc_stderr": 0.026296227915613667, + "acc_norm": 0.6069364161849711, + "acc_norm_stderr": 0.026296227915613667 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6012269938650306, + "acc_stderr": 0.03847021420456024, + "acc_norm": 0.6012269938650306, + "acc_norm_stderr": 0.03847021420456024 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6697530864197531, + "acc_stderr": 0.026168298456732846, + "acc_norm": 0.6697530864197531, + "acc_norm_stderr": 0.026168298456732846 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.03074890536390989, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.03074890536390989 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7504587155963303, + "acc_stderr": 0.018553897629501617, + "acc_norm": 0.7504587155963303, + "acc_norm_stderr": 0.018553897629501617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.630718954248366, + "acc_stderr": 0.027634176689602656, + "acc_norm": 0.630718954248366, + "acc_norm_stderr": 0.027634176689602656 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6644736842105263, + "acc_stderr": 0.03842498559395268, + "acc_norm": 0.6644736842105263, + "acc_norm_stderr": 0.03842498559395268 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.576797385620915, + "acc_stderr": 0.01998780976948207, + "acc_norm": 0.576797385620915, + "acc_norm_stderr": 0.01998780976948207 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3971631205673759, + "acc_stderr": 0.029189805673587102, + "acc_norm": 0.3971631205673759, + "acc_norm_stderr": 0.029189805673587102 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.4245810055865922, + "acc_stderr": 0.01653117099327888, + "acc_norm": 0.4245810055865922, + "acc_norm_stderr": 0.01653117099327888 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5404411764705882, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.5404411764705882, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6816326530612244, + "acc_stderr": 0.02982253379398204, + "acc_norm": 0.6816326530612244, + "acc_norm_stderr": 0.02982253379398204 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7426160337552743, + "acc_stderr": 0.028458820991460302, + "acc_norm": 0.7426160337552743, + "acc_norm_stderr": 0.028458820991460302 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44589308996088656, + "acc_stderr": 0.01269524471137978, + "acc_norm": 0.44589308996088656, + "acc_norm_stderr": 0.01269524471137978 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6927784577723378, + "mc1_stderr": 0.016150201321323023, + "mc2": 0.7931933649114187, + "mc2_stderr": 0.013297739733108521 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5395513577331759, + "acc_stderr": 0.017136487626049846, + "acc_norm": 0.5596221959858324, + "acc_norm_stderr": 0.017067699774312984 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "davidkim205/nox-solar-10.7b-v4", + "model_sha": "611f44827581f0147e45a3570ba48b7facbfefa2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/dddsaty/KoSOLAR-10.7B_DPO_Adapter_Attach/result_2024-02-14 01:17:06.json b/dddsaty/KoSOLAR-10.7B_DPO_Adapter_Attach/result_2024-02-14 01:17:06.json new file mode 100644 index 0000000000000000000000000000000000000000..0d60b0e385c7d953433fce4e320336adcc3f490c --- /dev/null +++ b/dddsaty/KoSOLAR-10.7B_DPO_Adapter_Attach/result_2024-02-14 01:17:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4726962457337884, + "acc_stderr": 0.014589589101985993, + "acc_norm": 0.5332764505119454, + "acc_norm_stderr": 0.014578995859605813 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4651463851822346, + "acc_stderr": 0.004977643730848598, + "acc_norm": 0.6435968930491934, + "acc_norm_stderr": 0.004779574402771394 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7134502923976608, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.7134502923976608, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.04541609446503947, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.04541609446503947 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7254150702426565, + "acc_stderr": 0.015959829933084056, + "acc_norm": 0.7254150702426565, + "acc_norm_stderr": 0.015959829933084056 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5404255319148936, + "acc_stderr": 0.03257901482099834, + "acc_norm": 0.5404255319148936, + "acc_norm_stderr": 0.03257901482099834 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7474747474747475, + "acc_stderr": 0.03095405547036592, + "acc_norm": 0.7474747474747475, + "acc_norm_stderr": 0.03095405547036592 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383887, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383887 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6638655462184874, + "acc_stderr": 0.03068473711513536, + "acc_norm": 0.6638655462184874, + "acc_norm_stderr": 0.03068473711513536 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5666666666666667, + "acc_stderr": 0.025124653525885093, + "acc_norm": 0.5666666666666667, + "acc_norm_stderr": 0.025124653525885093 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978813, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978813 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.03502544650845872, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.03502544650845872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.635483870967742, + "acc_stderr": 0.02737987122994324, + "acc_norm": 0.635483870967742, + "acc_norm_stderr": 0.02737987122994324 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.027236013946196673, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.027236013946196673 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.030437794342983052, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.030437794342983052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555403, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555403 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.03807301726504513, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.03807301726504513 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520196, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520196 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5763888888888888, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.5763888888888888, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.8, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.8, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.615606936416185, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.615606936416185, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6419753086419753, + "acc_stderr": 0.02667561192603709, + "acc_norm": 0.6419753086419753, + "acc_norm_stderr": 0.02667561192603709 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7409326424870466, + "acc_stderr": 0.03161877917935411, + "acc_norm": 0.7409326424870466, + "acc_norm_stderr": 0.03161877917935411 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366596, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7155963302752294, + "acc_stderr": 0.019342036587702595, + "acc_norm": 0.7155963302752294, + "acc_norm_stderr": 0.019342036587702595 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768176, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768176 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6601307189542484, + "acc_stderr": 0.027121956071388852, + "acc_norm": 0.6601307189542484, + "acc_norm_stderr": 0.027121956071388852 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.042059539338841226, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.042059539338841226 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6118421052631579, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.6118421052631579, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.02020665318788479, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.02020665318788479 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.029049190342543458, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.029049190342543458 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21452513966480447, + "acc_stderr": 0.013728923407828856, + "acc_norm": 0.21452513966480447, + "acc_norm_stderr": 0.013728923407828856 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5698529411764706, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.5698529411764706, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6775510204081633, + "acc_stderr": 0.029923100563683913, + "acc_norm": 0.6775510204081633, + "acc_norm_stderr": 0.029923100563683913 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036416, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036416 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4061277705345502, + "acc_stderr": 0.012543154588412927, + "acc_norm": 0.4061277705345502, + "acc_norm_stderr": 0.012543154588412927 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7058823529411765, + "acc_stderr": 0.03198001660115071, + "acc_norm": 0.7058823529411765, + "acc_norm_stderr": 0.03198001660115071 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7212121212121212, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.7212121212121212, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559693, + "mc2": 0.45416281597841346, + "mc2_stderr": 0.015228997830055265 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6056670602125147, + "acc_stderr": 0.016802090674893223, + "acc_norm": 0.6245572609208973, + "acc_norm_stderr": 0.016648411589511088 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "dddsaty/KoSOLAR-10.7B_DPO_Adapter_Attach", + "model_sha": "b0bf8b5c019d0296c08a1998fa3f140b097d2260", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/dddsaty/Open_Ko_SOLAR_DPO_Merge_v0.1/result_2024-01-25 03:34:44.json b/dddsaty/Open_Ko_SOLAR_DPO_Merge_v0.1/result_2024-01-25 03:34:44.json new file mode 100644 index 0000000000000000000000000000000000000000..93925e914be24ce9f040e0603b6eaf80ae24e47a --- /dev/null +++ b/dddsaty/Open_Ko_SOLAR_DPO_Merge_v0.1/result_2024-01-25 03:34:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4445392491467577, + "acc_stderr": 0.014521226405627075, + "acc_norm": 0.5, + "acc_norm_stderr": 0.014611390804670088 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4462258514240191, + "acc_stderr": 0.004960839986099528, + "acc_norm": 0.6054570802628958, + "acc_norm_stderr": 0.004877534215987094 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280042, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280042 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6909323116219668, + "acc_stderr": 0.01652498891970219, + "acc_norm": 0.6909323116219668, + "acc_norm_stderr": 0.01652498891970219 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5852090032154341, + "acc_stderr": 0.02798268045975956, + "acc_norm": 0.5852090032154341, + "acc_norm_stderr": 0.02798268045975956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6565656565656566, + "acc_stderr": 0.03383201223244443, + "acc_norm": 0.6565656565656566, + "acc_norm_stderr": 0.03383201223244443 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.025275892070240655, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.025275892070240655 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998573, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998573 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5612903225806452, + "acc_stderr": 0.028229497320317213, + "acc_norm": 0.5612903225806452, + "acc_norm_stderr": 0.028229497320317213 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.028120966503914397, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.028120966503914397 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547306, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547306 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6616915422885572, + "acc_stderr": 0.03345563070339191, + "acc_norm": 0.6616915422885572, + "acc_norm_stderr": 0.03345563070339191 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307706, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307706 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.616580310880829, + "acc_stderr": 0.03508984236295341, + "acc_norm": 0.616580310880829, + "acc_norm_stderr": 0.03508984236295341 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070435, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070435 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6018348623853211, + "acc_stderr": 0.02098798942265426, + "acc_norm": 0.6018348623853211, + "acc_norm_stderr": 0.02098798942265426 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138293, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.02003639376835264, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.02003639376835264 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.014655780837497717, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.014655780837497717 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877753, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877753 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7046413502109705, + "acc_stderr": 0.02969633871342288, + "acc_norm": 0.7046413502109705, + "acc_norm_stderr": 0.02969633871342288 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.011977676704715993, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.011977676704715993 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608753, + "mc2": 0.43649220011102474, + "mc2_stderr": 0.014946654948648044 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5808736717827627, + "acc_stderr": 0.016963995010862796, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.01675692157106942 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "dddsaty/Open_Ko_SOLAR_DPO_Merge_v0.1", + "model_sha": "94acda37b62a5f19af558f921c06a296081b3e30", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/deepmodal/deepmodal-EEVE-Korean-10.8B-v1.0/result_2024-04-14 01:22:22.json b/deepmodal/deepmodal-EEVE-Korean-10.8B-v1.0/result_2024-04-14 01:22:22.json new file mode 100644 index 0000000000000000000000000000000000000000..40fc979bcd4331a745811e701800ee827f933d80 --- /dev/null +++ b/deepmodal/deepmodal-EEVE-Korean-10.8B-v1.0/result_2024-04-14 01:22:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4803754266211604, + "acc_stderr": 0.014600132075947084, + "acc_norm": 0.5409556313993175, + "acc_norm_stderr": 0.014562291073601236 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46126269667396935, + "acc_stderr": 0.004974783753309692, + "acc_norm": 0.6286596295558654, + "acc_norm_stderr": 0.004821757734156712 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7251461988304093, + "acc_stderr": 0.034240429246915824, + "acc_norm": 0.7251461988304093, + "acc_norm_stderr": 0.034240429246915824 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7669902912621359, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.7669902912621359, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7241379310344828, + "acc_stderr": 0.01598281477469563, + "acc_norm": 0.7241379310344828, + "acc_norm_stderr": 0.01598281477469563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5319148936170213, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.5319148936170213, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6591639871382636, + "acc_stderr": 0.02692084126077616, + "acc_norm": 0.6591639871382636, + "acc_norm_stderr": 0.02692084126077616 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6188340807174888, + "acc_stderr": 0.03259625118416827, + "acc_norm": 0.6188340807174888, + "acc_norm_stderr": 0.03259625118416827 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.029620227874790486, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.029620227874790486 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5655172413793104, + "acc_stderr": 0.04130740879555498, + "acc_norm": 0.5655172413793104, + "acc_norm_stderr": 0.04130740879555498 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5820512820512821, + "acc_stderr": 0.02500732988246121, + "acc_norm": 0.5820512820512821, + "acc_norm_stderr": 0.02500732988246121 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.7129629629629629, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.7129629629629629, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.03502544650845872, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.03502544650845872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.667741935483871, + "acc_stderr": 0.026795560848122787, + "acc_norm": 0.667741935483871, + "acc_norm_stderr": 0.026795560848122787 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.02514093595033543, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.02514093595033543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6037735849056604, + "acc_stderr": 0.030102793781791194, + "acc_norm": 0.6037735849056604, + "acc_norm_stderr": 0.030102793781791194 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.045820048415054174, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.045820048415054174 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.40397350993377484, + "acc_stderr": 0.040064856853653415, + "acc_norm": 0.40397350993377484, + "acc_norm_stderr": 0.040064856853653415 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919795, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.03804749744364763, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.03804749744364763 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.025634258115554965, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.025634258115554965 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932263, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932263 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.026261677607806642, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.026261677607806642 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6073619631901841, + "acc_stderr": 0.0383674090783103, + "acc_norm": 0.6073619631901841, + "acc_norm_stderr": 0.0383674090783103 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.026406145973625676, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.026406145973625676 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7772020725388601, + "acc_stderr": 0.03003114797764154, + "acc_norm": 0.7772020725388601, + "acc_norm_stderr": 0.03003114797764154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.04685473041907789, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.04685473041907789 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7247706422018348, + "acc_stderr": 0.019149093743155196, + "acc_norm": 0.7247706422018348, + "acc_norm_stderr": 0.019149093743155196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6437908496732027, + "acc_stderr": 0.027420477662629235, + "acc_norm": 0.6437908496732027, + "acc_norm_stderr": 0.027420477662629235 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001976, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001976 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7768595041322314, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.7768595041322314, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6710526315789473, + "acc_stderr": 0.03823428969926604, + "acc_norm": 0.6710526315789473, + "acc_norm_stderr": 0.03823428969926604 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5669934640522876, + "acc_stderr": 0.020045442473324227, + "acc_norm": 0.5669934640522876, + "acc_norm_stderr": 0.020045442473324227 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.43617021276595747, + "acc_stderr": 0.02958345203628407, + "acc_norm": 0.43617021276595747, + "acc_norm_stderr": 0.02958345203628407 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053757, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053757 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20335195530726258, + "acc_stderr": 0.01346135148750752, + "acc_norm": 0.20335195530726258, + "acc_norm_stderr": 0.01346135148750752 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.05000000000000001, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05000000000000001 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.030254372573976715, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.030254372573976715 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6204081632653061, + "acc_stderr": 0.03106721126287248, + "acc_norm": 0.6204081632653061, + "acc_norm_stderr": 0.03106721126287248 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7890295358649789, + "acc_stderr": 0.02655837250266192, + "acc_norm": 0.7890295358649789, + "acc_norm_stderr": 0.02655837250266192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.408735332464146, + "acc_stderr": 0.012555701346703387, + "acc_norm": 0.408735332464146, + "acc_norm_stderr": 0.012555701346703387 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7352941176470589, + "acc_stderr": 0.030964517926923393, + "acc_norm": 0.7352941176470589, + "acc_norm_stderr": 0.030964517926923393 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7212121212121212, + "acc_stderr": 0.0350143870629678, + "acc_norm": 0.7212121212121212, + "acc_norm_stderr": 0.0350143870629678 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3182374541003672, + "mc1_stderr": 0.01630598864892059, + "mc2": 0.47045257812715846, + "mc2_stderr": 0.015317766170967539 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45690672963400236, + "acc_stderr": 0.01712638909308678, + "acc_norm": 0.4946871310507674, + "acc_norm_stderr": 0.017189383627229687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "deepmodal/deepmodal-EEVE-Korean-10.8B-v1.0", + "model_sha": "a4520af201374a9418d9c069b8b9b57226ff5369", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/devhyun88/hyun-mistral-7b-orca-platypus-refine/result_2024-01-27 10:02:09.json b/devhyun88/hyun-mistral-7b-orca-platypus-refine/result_2024-01-27 10:02:09.json new file mode 100644 index 0000000000000000000000000000000000000000..97f84ea91c22e7256818d72b961fafbb7e10a158 --- /dev/null +++ b/devhyun88/hyun-mistral-7b-orca-platypus-refine/result_2024-01-27 10:02:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145687, + "acc_norm": 0.3856655290102389, + "acc_norm_stderr": 0.014224250973257175 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3721370244971121, + "acc_stderr": 0.004823867761332468, + "acc_norm": 0.4623580959968134, + "acc_norm_stderr": 0.004975621147406105 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.03771283107626544, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.03771283107626544 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4342273307790549, + "acc_stderr": 0.017724589389677785, + "acc_norm": 0.4342273307790549, + "acc_norm_stderr": 0.017724589389677785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745647, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745647 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.028125340983972718, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.028125340983972718 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.032363611119519416, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.032363611119519416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.02478431694215637, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.02478431694215637 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962945, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962945 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.029343114798094455, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.029343114798094455 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.38113207547169814, + "acc_stderr": 0.029890609686286627, + "acc_norm": 0.38113207547169814, + "acc_norm_stderr": 0.029890609686286627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.0355068398916558, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.0355068398916558 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833932, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833932 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41651376146788993, + "acc_stderr": 0.021136376504030874, + "acc_norm": 0.41651376146788993, + "acc_norm_stderr": 0.021136376504030874 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138286, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138286 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.019542101564854128, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.019542101564854128 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.027146271936625166, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.027146271936625166 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.03248197400511076, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.03248197400511076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33116036505867014, + "acc_stderr": 0.012020128195985745, + "acc_norm": 0.33116036505867014, + "acc_norm_stderr": 0.012020128195985745 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.01598359510181139, + "mc2": 0.47575285430985736, + "mc2_stderr": 0.015700960840725076 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42266824085005905, + "acc_stderr": 0.016983506079577604, + "acc_norm": 0.5076741440377804, + "acc_norm_stderr": 0.017188329219654273 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "devhyun88/hyun-mistral-7b-orca-platypus-refine", + "model_sha": "36ce54bfcb688dcdb70ba7870966eeb8e3b48a4c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/devhyun88/ku-mistral-7b-PGO-v1/result_2023-11-08 02:18:33.json b/devhyun88/ku-mistral-7b-PGO-v1/result_2023-11-08 02:18:33.json new file mode 100644 index 0000000000000000000000000000000000000000..e9e4a8d3865d998fb55ac5d5348c36daa3207cc4 --- /dev/null +++ b/devhyun88/ku-mistral-7b-PGO-v1/result_2023-11-08 02:18:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40187713310580203, + "acc_stderr": 0.014327268614578276, + "acc_norm": 0.4496587030716723, + "acc_norm_stderr": 0.014537144444284741 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40748854809798846, + "acc_stderr": 0.004903628887264535, + "acc_norm": 0.5290778729336786, + "acc_norm_stderr": 0.004981336318033644 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5363984674329502, + "acc_stderr": 0.017832524079593258, + "acc_norm": 0.5363984674329502, + "acc_norm_stderr": 0.017832524079593258 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863537, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863537 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234355, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234355 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883231, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883231 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942645, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942645 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458006, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458006 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.025279850397404904, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.025279850397404904 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539284, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539284 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.02861462475280544, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.02861462475280544 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874142, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874142 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.019886221037501862, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.019886221037501862 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.028838921471251458, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.028838921471251458 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.01467625200931947, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.01467625200931947 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.0290294228156814, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.0290294228156814 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.031376240725616185, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.031376240725616185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35723598435462844, + "acc_stderr": 0.01223861575031651, + "acc_norm": 0.35723598435462844, + "acc_norm_stderr": 0.01223861575031651 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394812, + "mc2": 0.45493566764145105, + "mc2_stderr": 0.01570789472718274 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41912632821723733, + "acc_stderr": 0.016963995010862792, + "acc_norm": 0.4427390791027155, + "acc_norm_stderr": 0.01707725413155622 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "devhyun88/ku-mistral-7b-PGO-v1", + "model_sha": "a4f1f7057b91704e9a3328beb2f95ff460339b53", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/devhyun88/ku-mistral-7b-PGO-v2/result_2023-11-13 01:55:37.json b/devhyun88/ku-mistral-7b-PGO-v2/result_2023-11-13 01:55:37.json new file mode 100644 index 0000000000000000000000000000000000000000..833f2c16848210c64e4075aa24038ebd30b3fae7 --- /dev/null +++ b/devhyun88/ku-mistral-7b-PGO-v2/result_2023-11-13 01:55:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39078498293515357, + "acc_stderr": 0.014258563880513782, + "acc_norm": 0.43600682593856654, + "acc_norm_stderr": 0.014491225699230916 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40460067715594505, + "acc_stderr": 0.004898115110975032, + "acc_norm": 0.5252937661820355, + "acc_norm_stderr": 0.004983392650570965 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.524904214559387, + "acc_stderr": 0.017857770704901035, + "acc_norm": 0.524904214559387, + "acc_norm_stderr": 0.017857770704901035 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424004, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.028333277109562804, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.028333277109562804 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5064516129032258, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.5064516129032258, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851105, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851105 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5247706422018349, + "acc_stderr": 0.021410999753635914, + "acc_norm": 0.5247706422018349, + "acc_norm_stderr": 0.021410999753635914 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.043758884927270605, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.043758884927270605 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138293, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.0199221156827867, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.0199221156827867 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802749, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3340782122905028, + "acc_stderr": 0.015774911422381622, + "acc_norm": 0.3340782122905028, + "acc_norm_stderr": 0.015774911422381622 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125468, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125468 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.012198140605353595, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.012198140605353595 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768545, + "mc2": 0.4557831168873756, + "mc2_stderr": 0.01553351013285851 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42384887839433294, + "acc_stderr": 0.01698981083462826, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.017161563949916345 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "devhyun88/ku-mistral-7b-PGO-v2", + "model_sha": "c60fc585fc6621d322bb309a3f6d3763c2409fa6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/devhyun88/ku-mistral-7b-PGO-v3/result_2023-11-20 02:12:33.json b/devhyun88/ku-mistral-7b-PGO-v3/result_2023-11-20 02:12:33.json new file mode 100644 index 0000000000000000000000000000000000000000..8ce4694c1ff90649fa0c03efb2eea91e6d99749f --- /dev/null +++ b/devhyun88/ku-mistral-7b-PGO-v3/result_2023-11-20 02:12:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.014049106564955, + "acc_norm": 0.4087030716723549, + "acc_norm_stderr": 0.014365750345427006 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3736307508464449, + "acc_stderr": 0.004827786289074851, + "acc_norm": 0.47769368651663013, + "acc_norm_stderr": 0.004984813391016205 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4112388250319285, + "acc_stderr": 0.017595971908056576, + "acc_norm": 0.4112388250319285, + "acc_norm_stderr": 0.017595971908056576 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.03571609230053481, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.03571609230053481 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894245, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894245 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.40404040404040403, + "acc_stderr": 0.03496130972056129, + "acc_norm": 0.40404040404040403, + "acc_norm_stderr": 0.03496130972056129 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.02466674491518724, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.02466674491518724 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536824, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536824 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.028100964724272638, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.028100964724272638 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3320754716981132, + "acc_stderr": 0.02898545565233439, + "acc_norm": 0.3320754716981132, + "acc_norm_stderr": 0.02898545565233439 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762602, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554859, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554859 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.40173410404624277, + "acc_stderr": 0.026394104177643627, + "acc_norm": 0.40173410404624277, + "acc_norm_stderr": 0.026394104177643627 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379424, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379424 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322004, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322004 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3908256880733945, + "acc_stderr": 0.020920058346111065, + "acc_norm": 0.3908256880733945, + "acc_norm_stderr": 0.020920058346111065 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617156, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617156 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04545454545454546, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04545454545454546 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.0378272898086547, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.0378272898086547 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.01955964680921593, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.01955964680921593 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828979, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828979 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553998, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553998 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.0279715413701706, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.0279715413701706 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32333767926988266, + "acc_stderr": 0.011946565758447202, + "acc_norm": 0.32333767926988266, + "acc_norm_stderr": 0.011946565758447202 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.034267123492472705, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.034267123492472705 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674098, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674098 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456418, + "mc2": 0.4157388974059479, + "mc2_stderr": 0.015224506818663186 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33766233766233766, + "acc_stderr": 0.016259075784754953, + "acc_norm": 0.3860684769775679, + "acc_norm_stderr": 0.016738130760321747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "devhyun88/ku-mistral-7b-PGO-v3", + "model_sha": "2def3ad0a50695d8b48b9b14350b35f8650a81e5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/devhyun88/ku-mistral-7b-PGO-v4/result_2023-11-24 00:42:44.json b/devhyun88/ku-mistral-7b-PGO-v4/result_2023-11-24 00:42:44.json new file mode 100644 index 0000000000000000000000000000000000000000..08a25fd8f3c2c91853b939db358ba67a6cac1993 --- /dev/null +++ b/devhyun88/ku-mistral-7b-PGO-v4/result_2023-11-24 00:42:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40102389078498296, + "acc_stderr": 0.014322255790719872, + "acc_norm": 0.44283276450511944, + "acc_norm_stderr": 0.014515573873348913 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39533957379008167, + "acc_stderr": 0.004879242848473461, + "acc_norm": 0.5145389364668392, + "acc_norm_stderr": 0.004987671478640939 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066165, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066165 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840674, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840674 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449296, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449296 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461224, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461224 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568385, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009812, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009812 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712177, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712177 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159664, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137282, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137282 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46422018348623856, + "acc_stderr": 0.0213823647757019, + "acc_norm": 0.46422018348623856, + "acc_norm_stderr": 0.0213823647757019 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749255, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611327, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611327 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767867, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767867 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.02858270975389844, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.02858270975389844 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.031996152328062855, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.031996152328062855 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3494132985658409, + "acc_stderr": 0.012177306252786683, + "acc_norm": 0.3494132985658409, + "acc_norm_stderr": 0.012177306252786683 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777298, + "mc2": 0.43552694859151936, + "mc2_stderr": 0.015531533195686252 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38961038961038963, + "acc_stderr": 0.016766161671893504, + "acc_norm": 0.43683589138134593, + "acc_norm_stderr": 0.017052633559856076 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "devhyun88/ku-mistral-7b-PGO-v4", + "model_sha": "e10de879bef89b759447acd6910fab94dc89f750", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/devhyun88/ku-mistral-7b-PGO-v5/result_2023-12-04 06:12:17.json b/devhyun88/ku-mistral-7b-PGO-v5/result_2023-12-04 06:12:17.json new file mode 100644 index 0000000000000000000000000000000000000000..660512d3c86f16d0f5b0b3cd6046be7e248c32db --- /dev/null +++ b/devhyun88/ku-mistral-7b-PGO-v5/result_2023-12-04 06:12:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3003412969283277, + "acc_stderr": 0.013395909309956999, + "acc_norm": 0.35238907849829354, + "acc_norm_stderr": 0.013960142600598675 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35879306910973907, + "acc_stderr": 0.004786660691181924, + "acc_norm": 0.44284007169886475, + "acc_norm_stderr": 0.004957068377516515 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.03424042924691583, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.03424042924691583 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36270753512132825, + "acc_stderr": 0.0171927086746023, + "acc_norm": 0.36270753512132825, + "acc_norm_stderr": 0.0171927086746023 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.036471685236832266, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.036471685236832266 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.026664410886937613, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.026664410886937613 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.31313131313131315, + "acc_stderr": 0.033042050878136525, + "acc_norm": 0.31313131313131315, + "acc_norm_stderr": 0.033042050878136525 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.039215453124671215, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.039215453124671215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.029472485833136088, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.029472485833136088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.021278393863586282, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.021278393863586282 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3258064516129032, + "acc_stderr": 0.026662010578567104, + "acc_norm": 0.3258064516129032, + "acc_norm_stderr": 0.026662010578567104 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5, + "acc_stderr": 0.03275608910402091, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03275608910402091 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712163, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712163 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.373134328358209, + "acc_stderr": 0.03419832608176007, + "acc_norm": 0.373134328358209, + "acc_norm_stderr": 0.03419832608176007 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.03496101481191179, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.03496101481191179 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02351729433596329, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02351729433596329 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.034370793441061344, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.034370793441061344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36127167630057805, + "acc_stderr": 0.025862201852277902, + "acc_norm": 0.36127167630057805, + "acc_norm_stderr": 0.025862201852277902 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292404, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292404 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.026571483480719967, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.026571483480719967 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3339449541284404, + "acc_stderr": 0.020220554196736407, + "acc_norm": 0.3339449541284404, + "acc_norm_stderr": 0.020220554196736407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.02685729466328141, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.02685729466328141 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.512396694214876, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.03761070869867479, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.03761070869867479 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.018492596536396955, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.018492596536396955 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.02804594694204241, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.02804594694204241 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.0302252261600124, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.0302252261600124 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.014487500852850412, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.014487500852850412 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329387, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329387 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.027682979522960238, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.027682979522960238 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4008438818565401, + "acc_stderr": 0.03190080389473236, + "acc_norm": 0.4008438818565401, + "acc_norm_stderr": 0.03190080389473236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2646675358539765, + "acc_stderr": 0.011267332992845542, + "acc_norm": 0.2646675358539765, + "acc_norm_stderr": 0.011267332992845542 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.03198001660115071, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.03198001660115071 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.01489627744104187, + "mc2": 0.4111411666560298, + "mc2_stderr": 0.015517895415930608 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31286894923258557, + "acc_stderr": 0.01594101011830266, + "acc_norm": 0.42502951593860683, + "acc_norm_stderr": 0.016996016308362883 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "devhyun88/ku-mistral-7b-PGO-v5", + "model_sha": "155245510057950127db75bedc014d1a144add5a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/devhyun88/kullama2-7b-ko-PGO/result_2023-11-02 02:24:24.json b/devhyun88/kullama2-7b-ko-PGO/result_2023-11-02 02:24:24.json new file mode 100644 index 0000000000000000000000000000000000000000..12d30ac2dba1ef4b2350fc0560f454becd6fb3ac --- /dev/null +++ b/devhyun88/kullama2-7b-ko-PGO/result_2023-11-02 02:24:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3319112627986348, + "acc_stderr": 0.013760988200880533, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.014252959848892887 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40290778729336785, + "acc_stderr": 0.0048948011198986134, + "acc_norm": 0.5275841465843457, + "acc_norm_stderr": 0.00498218232392356 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.388250319284802, + "acc_stderr": 0.017427673295544326, + "acc_norm": 0.388250319284802, + "acc_norm_stderr": 0.017427673295544326 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36012861736334406, + "acc_stderr": 0.027264297599804015, + "acc_norm": 0.36012861736334406, + "acc_norm_stderr": 0.027264297599804015 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.32323232323232326, + "acc_stderr": 0.03332299921070645, + "acc_norm": 0.32323232323232326, + "acc_norm_stderr": 0.03332299921070645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003336, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3025210084033613, + "acc_stderr": 0.029837962388291926, + "acc_norm": 0.3025210084033613, + "acc_norm_stderr": 0.029837962388291926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2512820512820513, + "acc_stderr": 0.021992016662370554, + "acc_norm": 0.2512820512820513, + "acc_norm_stderr": 0.021992016662370554 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.03127090713297697, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.03127090713297697 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.29354838709677417, + "acc_stderr": 0.025906087021319295, + "acc_norm": 0.29354838709677417, + "acc_norm_stderr": 0.025906087021319295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.032366121762202014, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.032366121762202014 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3320754716981132, + "acc_stderr": 0.02898545565233439, + "acc_norm": 0.3320754716981132, + "acc_norm_stderr": 0.02898545565233439 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712163, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712163 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.32338308457711445, + "acc_stderr": 0.03307615947979033, + "acc_norm": 0.32338308457711445, + "acc_norm_stderr": 0.03307615947979033 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.0220190800122179, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.0220190800122179 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566016, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.34971098265895956, + "acc_stderr": 0.025674281456531018, + "acc_norm": 0.34971098265895956, + "acc_norm_stderr": 0.025674281456531018 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.0360251131880677, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.0360251131880677 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.025842248700902175, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.025842248700902175 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.03161877917935411, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.03161877917935411 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.344954128440367, + "acc_stderr": 0.020380605405066952, + "acc_norm": 0.344954128440367, + "acc_norm_stderr": 0.020380605405066952 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242515, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242515 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.0275300784471103, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.0275300784471103 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.04537935177947879, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.04537935177947879 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.034260594244031654, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.034260594244031654 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.272875816993464, + "acc_stderr": 0.018020474148393577, + "acc_norm": 0.272875816993464, + "acc_norm_stderr": 0.018020474148393577 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467763, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467763 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.028765111718046955, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.028765111718046955 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.027678468642144696, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.027678468642144696 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3881856540084388, + "acc_stderr": 0.031722950043323296, + "acc_norm": 0.3881856540084388, + "acc_norm_stderr": 0.031722950043323296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2737940026075619, + "acc_stderr": 0.011388612167979392, + "acc_norm": 0.2737940026075619, + "acc_norm_stderr": 0.011388612167979392 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.031145570659486782, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.031145570659486782 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253597, + "mc2": 0.40682664044126005, + "mc2_stderr": 0.014892332644374185 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3293978748524203, + "acc_stderr": 0.016158746868147143, + "acc_norm": 0.44510035419126326, + "acc_norm_stderr": 0.01708641743100547 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "devhyun88/kullama2-7b-ko-PGO", + "model_sha": "63bbaf7382147cfaaee56a7c3126413288520e9c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/devhyun88/kullama2-7b-platypus-kogpt4/result_2023-10-30 07:47:57.json b/devhyun88/kullama2-7b-platypus-kogpt4/result_2023-10-30 07:47:57.json new file mode 100644 index 0000000000000000000000000000000000000000..a17b888ca841ba98d07794de038e0a87ee94ce50 --- /dev/null +++ b/devhyun88/kullama2-7b-platypus-kogpt4/result_2023-10-30 07:47:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3430034129692833, + "acc_stderr": 0.013872423223718173, + "acc_norm": 0.39590443686006827, + "acc_norm_stderr": 0.01429122839353659 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4023102967536347, + "acc_stderr": 0.00489361701497531, + "acc_norm": 0.5319657438757219, + "acc_norm_stderr": 0.0049795737655758615 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37292464878671777, + "acc_stderr": 0.01729286826945392, + "acc_norm": 0.37292464878671777, + "acc_norm_stderr": 0.01729286826945392 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.0402477840197711, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.0402477840197711 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071856, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071856 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3504823151125402, + "acc_stderr": 0.027098652621301747, + "acc_norm": 0.3504823151125402, + "acc_norm_stderr": 0.027098652621301747 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20707070707070707, + "acc_stderr": 0.028869778460267042, + "acc_norm": 0.20707070707070707, + "acc_norm_stderr": 0.028869778460267042 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2689655172413793, + "acc_stderr": 0.03695183311650232, + "acc_norm": 0.2689655172413793, + "acc_norm_stderr": 0.03695183311650232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416545, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416545 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2689075630252101, + "acc_stderr": 0.02880139219363128, + "acc_norm": 0.2689075630252101, + "acc_norm_stderr": 0.02880139219363128 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.022139081103971527, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.022139081103971527 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.03127090713297698, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.03127090713297698 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.02622648565255389, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.02622648565255389 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4017094017094017, + "acc_stderr": 0.03211693751051622, + "acc_norm": 0.4017094017094017, + "acc_norm_stderr": 0.03211693751051622 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3283018867924528, + "acc_stderr": 0.02890159361241178, + "acc_norm": 0.3283018867924528, + "acc_norm_stderr": 0.02890159361241178 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507383, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507383 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.035118075718047245, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.035118075718047245 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3482587064676617, + "acc_stderr": 0.03368787466115459, + "acc_norm": 0.3482587064676617, + "acc_norm_stderr": 0.03368787466115459 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.022101128787415415, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.022101128787415415 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080343, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080343 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3265895953757225, + "acc_stderr": 0.025248264774242832, + "acc_norm": 0.3265895953757225, + "acc_norm_stderr": 0.025248264774242832 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32098765432098764, + "acc_stderr": 0.02597656601086274, + "acc_norm": 0.32098765432098764, + "acc_norm_stderr": 0.02597656601086274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28623853211009176, + "acc_stderr": 0.019379436628919975, + "acc_norm": 0.28623853211009176, + "acc_norm_stderr": 0.019379436628919975 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.027184498909941616, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.027184498909941616 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.034260594244031654, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.034260594244031654 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.018249024411207664, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.018249024411207664 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340461004, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340461004 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.029886910547626964, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.029886910547626964 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21691176470588236, + "acc_stderr": 0.025035845227711254, + "acc_norm": 0.21691176470588236, + "acc_norm_stderr": 0.025035845227711254 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0289205832206756, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0289205832206756 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3755274261603376, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.3755274261603376, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2816166883963494, + "acc_stderr": 0.011487783272786694, + "acc_norm": 0.2816166883963494, + "acc_norm_stderr": 0.011487783272786694 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268049, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268049 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476189, + "mc2": 0.4026846131079194, + "mc2_stderr": 0.014939937441482552 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31641086186540734, + "acc_stderr": 0.015989617951065477, + "acc_norm": 0.4639905548996458, + "acc_norm_stderr": 0.017145715365486654 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "devhyun88/kullama2-7b-platypus-kogpt4", + "model_sha": "033fb6e8db347530e49449d888d780b777e48715", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/dltjdgh0928/lsh_finetune_v0.11/result_2023-11-01 01:32:31.json b/dltjdgh0928/lsh_finetune_v0.11/result_2023-11-01 01:32:31.json new file mode 100644 index 0000000000000000000000000000000000000000..2febda3da4f26acd564f4e72838e466c2df17dd4 --- /dev/null +++ b/dltjdgh0928/lsh_finetune_v0.11/result_2023-11-01 01:32:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3225255972696246, + "acc_stderr": 0.01365998089427737, + "acc_norm": 0.3703071672354949, + "acc_norm_stderr": 0.01411129875167495 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36058554072893845, + "acc_stderr": 0.004791890625834196, + "acc_norm": 0.4471220872336188, + "acc_norm_stderr": 0.004961799358836431 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.03771283107626545, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.03771283107626545 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.017612204084663775, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.017612204084663775 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357766, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357766 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.39228295819935693, + "acc_stderr": 0.027731258647011994, + "acc_norm": 0.39228295819935693, + "acc_norm_stderr": 0.027731258647011994 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.03314190222110656, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.03314190222110656 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.027709359675032495, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.027709359675032495 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791923, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.03056159042673183, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.03056159042673183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670238, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670238 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137288, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137288 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101806, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101806 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.02686462436675664, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.02686462436675664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.027163686038271233, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.027163686038271233 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3963302752293578, + "acc_stderr": 0.020971469947900525, + "acc_norm": 0.3963302752293578, + "acc_norm_stderr": 0.020971469947900525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557836, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557836 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283683, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283683 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.03878139888797611, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.03878139888797611 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.01911721391149516, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.01911721391149516 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169927, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169927 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372432, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372432 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.027971541370170598, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.027971541370170598 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.031067211262872492, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.031067211262872492 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.288135593220339, + "acc_stderr": 0.011567140661324563, + "acc_norm": 0.288135593220339, + "acc_norm_stderr": 0.011567140661324563 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.03426712349247272, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.03426712349247272 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559696, + "mc2": 0.5058382452993124, + "mc2_stderr": 0.015661402852943502 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4002361275088548, + "acc_stderr": 0.016844693510505056, + "acc_norm": 0.4911452184179457, + "acc_norm_stderr": 0.01718765819933673 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "dltjdgh0928/lsh_finetune_v0.11", + "model_sha": "37760736eef6004ed416dd27ffaaad7cfe5da106", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/dltjdgh0928/mistral_open_orca_ko/result_2023-10-30 07:56:53.json b/dltjdgh0928/mistral_open_orca_ko/result_2023-10-30 07:56:53.json new file mode 100644 index 0000000000000000000000000000000000000000..ca36ec701cb5872befbbd7cccda9ef9488aef7d5 --- /dev/null +++ b/dltjdgh0928/mistral_open_orca_ko/result_2023-10-30 07:56:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2832764505119454, + "acc_stderr": 0.013167478735134575, + "acc_norm": 0.33447098976109213, + "acc_norm_stderr": 0.013787460322441372 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33260306711810395, + "acc_stderr": 0.004701828071992634, + "acc_norm": 0.4108743278231428, + "acc_norm_stderr": 0.00490987000638884 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.03786720706234214, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.03786720706234214 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37420178799489145, + "acc_stderr": 0.017304805072252044, + "acc_norm": 0.37420178799489145, + "acc_norm_stderr": 0.017304805072252044 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785137, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785137 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824664, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824664 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4115755627009646, + "acc_stderr": 0.027950481494401273, + "acc_norm": 0.4115755627009646, + "acc_norm_stderr": 0.027950481494401273 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008732, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008732 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808779, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808779 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236153, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236153 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3435897435897436, + "acc_stderr": 0.024078696580635463, + "acc_norm": 0.3435897435897436, + "acc_norm_stderr": 0.024078696580635463 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.02786932057166464, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02786932057166464 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467506, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467506 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.02422996529842508, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.02422996529842508 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.03874102859818083, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.03874102859818083 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.026571483480719964, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.026571483480719964 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4036697247706422, + "acc_stderr": 0.02103570485657497, + "acc_norm": 0.4036697247706422, + "acc_norm_stderr": 0.02103570485657497 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775088, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.037385206761196686, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.037385206761196686 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.018492596536396955, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.018492596536396955 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460997, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460997 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010085, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010085 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48523206751054854, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.48523206751054854, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29139504563233376, + "acc_stderr": 0.011605720214257612, + "acc_norm": 0.29139504563233376, + "acc_norm_stderr": 0.011605720214257612 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.034107853389047184, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.034107853389047184 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3329253365973072, + "mc1_stderr": 0.016497402382012052, + "mc2": 0.5083138267031554, + "mc2_stderr": 0.015718960507609445 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32113341204250295, + "acc_stderr": 0.016052762579111573, + "acc_norm": 0.4085005903187721, + "acc_norm_stderr": 0.016900062879427115 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "dltjdgh0928/mistral_open_orca_ko", + "model_sha": "d8765c261f6eb7b3746e12b7d0c5cba2d0901653", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/dltjdgh0928/test_instruction/result_2023-11-01 23:54:10.json b/dltjdgh0928/test_instruction/result_2023-11-01 23:54:10.json new file mode 100644 index 0000000000000000000000000000000000000000..82ebf86c70612d5db6148640f24cb743c2c84093 --- /dev/null +++ b/dltjdgh0928/test_instruction/result_2023-11-01 23:54:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36177474402730375, + "acc_stderr": 0.014041957945038075, + "acc_norm": 0.4121160409556314, + "acc_norm_stderr": 0.014383915302225402 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3858793069109739, + "acc_stderr": 0.004858074013443988, + "acc_norm": 0.4956184027086238, + "acc_norm_stderr": 0.004989589816180235 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47381864623243936, + "acc_stderr": 0.017855434554041982, + "acc_norm": 0.47381864623243936, + "acc_norm_stderr": 0.017855434554041982 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.035094383488796295, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.035094383488796295 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.025230381238934833, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.025230381238934833 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502737, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.03047144586718323, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.03047144586718323 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673281, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673281 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562413, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562413 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668773, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668773 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4935779816513762, + "acc_stderr": 0.02143555482001308, + "acc_norm": 0.4935779816513762, + "acc_norm_stderr": 0.02143555482001308 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.01965992249362334, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.01965992249362334 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639893, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639893 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23016759776536314, + "acc_stderr": 0.01407833925342581, + "acc_norm": 0.23016759776536314, + "acc_norm_stderr": 0.01407833925342581 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898445, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898445 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.03190080389473236, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.03190080389473236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31681877444589307, + "acc_stderr": 0.011882349954723016, + "acc_norm": 0.31681877444589307, + "acc_norm_stderr": 0.011882349954723016 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768542, + "mc2": 0.4796330162483247, + "mc2_stderr": 0.015594823470032292 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.5230224321133412, + "acc_norm_stderr": 0.017172121546727627 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "dltjdgh0928/test_instruction", + "model_sha": "7850d81409e5abbe9170009f0b463eb25042313b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/dzakwan/dzakwan-MoE-4x7b-Beta/result_2024-08-05 12:19:34.json b/dzakwan/dzakwan-MoE-4x7b-Beta/result_2024-08-05 12:19:34.json new file mode 100644 index 0000000000000000000000000000000000000000..fc8b54058f60e3e0c7c8f59022e57ad2e2f26982 --- /dev/null +++ b/dzakwan/dzakwan-MoE-4x7b-Beta/result_2024-08-05 12:19:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3771331058020478, + "acc_stderr": 0.014163366896192596, + "acc_norm": 0.431740614334471, + "acc_norm_stderr": 0.0144745914271962 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39832702648874724, + "acc_stderr": 0.004885529674958329, + "acc_norm": 0.5209121688906593, + "acc_norm_stderr": 0.004985415250690907 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47381864623243936, + "acc_stderr": 0.017855434554041975, + "acc_norm": 0.47381864623243936, + "acc_norm_stderr": 0.017855434554041975 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.033141902221106564, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.033141902221106564 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.040233822736177455, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.040233822736177455 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.02533466708095496, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.02533466708095496 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602842, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5, + "acc_stderr": 0.028629916715693413, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028629916715693413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.019922115682786692, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.019922115682786692 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.02899908090480618, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.02899908090480618 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03372343271653062, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03372343271653062 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.015131608849963753, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.015131608849963753 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.03190080389473236, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.03190080389473236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34485006518904826, + "acc_stderr": 0.01213988100628707, + "acc_norm": 0.34485006518904826, + "acc_norm_stderr": 0.01213988100628707 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4112607099143207, + "mc1_stderr": 0.017225627083660853, + "mc2": 0.591611199182669, + "mc2_stderr": 0.016294366932617963 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41086186540731995, + "acc_stderr": 0.016914972767841062, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.01694358631307657 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "dzakwan/dzakwan-MoE-4x7b-Beta", + "model_sha": "e89f82f2afa1961335de5a6d6d05bd850d1d61d9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eclipsemint/kollama2-7b-v0.1/result_2023-10-31 10:42:08.json b/eclipsemint/kollama2-7b-v0.1/result_2023-10-31 10:42:08.json new file mode 100644 index 0000000000000000000000000000000000000000..58e5dc8fde8ff97b9873cd27815944a089ebf14f --- /dev/null +++ b/eclipsemint/kollama2-7b-v0.1/result_2023-10-31 10:42:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26791808873720135, + "acc_stderr": 0.012942030195136428, + "acc_norm": 0.31313993174061433, + "acc_norm_stderr": 0.013552671543623503 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3329018123879705, + "acc_stderr": 0.004702886273189405, + "acc_norm": 0.4117705636327425, + "acc_norm_stderr": 0.004911481830909236 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3090676883780332, + "acc_stderr": 0.01652498891970219, + "acc_norm": 0.3090676883780332, + "acc_norm_stderr": 0.01652498891970219 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800254, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800254 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.03097669299853443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.03097669299853443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818784, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818784 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677698, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677698 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23737373737373738, + "acc_stderr": 0.0303137105381989, + "acc_norm": 0.23737373737373738, + "acc_norm_stderr": 0.0303137105381989 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003336, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.029344572500634363, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.029344572500634363 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2512820512820513, + "acc_stderr": 0.02199201666237056, + "acc_norm": 0.2512820512820513, + "acc_norm_stderr": 0.02199201666237056 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.047128212574267705, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.047128212574267705 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34516129032258064, + "acc_stderr": 0.027045746573534327, + "acc_norm": 0.34516129032258064, + "acc_norm_stderr": 0.027045746573534327 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.032224140452411065, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.032224140452411065 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108614, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108614 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02534809746809783, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02534809746809783 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.34328358208955223, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.34328358208955223, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.03156809362703174, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.03156809362703174 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3265895953757225, + "acc_stderr": 0.025248264774242826, + "acc_norm": 0.3265895953757225, + "acc_norm_stderr": 0.025248264774242826 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.0360251131880677, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.0360251131880677 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.026041766202717167, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.026041766202717167 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.03410780251836184, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.03410780251836184 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518752, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518752 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22935779816513763, + "acc_stderr": 0.018025349724618684, + "acc_norm": 0.22935779816513763, + "acc_norm_stderr": 0.018025349724618684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.02591780611714716, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.02591780611714716 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.018635594034423976, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.018635594034423976 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.024562204314142317, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.024562204314142317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.030932858792789834, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.030932858792789834 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3206751054852321, + "acc_stderr": 0.030381931949990414, + "acc_norm": 0.3206751054852321, + "acc_norm_stderr": 0.030381931949990414 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26727509778357234, + "acc_stderr": 0.011302607515637528, + "acc_norm": 0.26727509778357234, + "acc_norm_stderr": 0.011302607515637528 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083291, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083291 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03588624800091707, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03588624800091707 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.01605899902610062, + "mc2": 0.48594348947345256, + "mc2_stderr": 0.015487528453498189 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2680047225501771, + "acc_stderr": 0.015227905796335145, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.016366945603281273 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eclipsemint/kollama2-7b-v0.1", + "model_sha": "875311380804f4022f56d6c45d2bdcee2a899f43", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eclipsemint/kollama2-7b-v0.3/result_2023-11-07 00:36:29.json b/eclipsemint/kollama2-7b-v0.3/result_2023-11-07 00:36:29.json new file mode 100644 index 0000000000000000000000000000000000000000..2cb0628d26030062ec7433749ea4160cd9b6adb0 --- /dev/null +++ b/eclipsemint/kollama2-7b-v0.3/result_2023-11-07 00:36:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2721843003412969, + "acc_stderr": 0.01300660040642371, + "acc_norm": 0.31143344709897613, + "acc_norm_stderr": 0.013532472099850944 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3320055765783708, + "acc_stderr": 0.00469970528097657, + "acc_norm": 0.4071898028281219, + "acc_norm_stderr": 0.004903066639761954 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.32950191570881227, + "acc_stderr": 0.01680832226174045, + "acc_norm": 0.32950191570881227, + "acc_norm_stderr": 0.01680832226174045 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.039446241625011175, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.039446241625011175 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.034605799075530255, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.034605799075530255 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31189710610932475, + "acc_stderr": 0.026311858071854155, + "acc_norm": 0.31189710610932475, + "acc_norm_stderr": 0.026311858071854155 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863776, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.022556551010132368, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.022556551010132368 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733555, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733555 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.02614868593067175, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.02614868593067175 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.032366121762202014, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.032366121762202014 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23018867924528302, + "acc_stderr": 0.02590789712240817, + "acc_norm": 0.23018867924528302, + "acc_norm_stderr": 0.02590789712240817 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804723, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804723 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3582089552238806, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.3582089552238806, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.03156809362703174, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.03156809362703174 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184756, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184756 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.024476994076247333, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.024476994076247333 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868066, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868066 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3160621761658031, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.3160621761658031, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25688073394495414, + "acc_stderr": 0.01873249292834245, + "acc_norm": 0.25688073394495414, + "acc_norm_stderr": 0.01873249292834245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.026716118380156837, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.026716118380156837 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.26973684210526316, + "acc_stderr": 0.036117805602848975, + "acc_norm": 0.26973684210526316, + "acc_norm_stderr": 0.036117805602848975 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.018249024411207664, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.018249024411207664 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21323529411764705, + "acc_stderr": 0.02488097151229427, + "acc_norm": 0.21323529411764705, + "acc_norm_stderr": 0.02488097151229427 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.031067211262872475, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.031067211262872475 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3080168776371308, + "acc_stderr": 0.0300523893356057, + "acc_norm": 0.3080168776371308, + "acc_norm_stderr": 0.0300523893356057 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26010430247718386, + "acc_stderr": 0.011204382887823829, + "acc_norm": 0.26010430247718386, + "acc_norm_stderr": 0.011204382887823829 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015008, + "mc2": 0.4265558352089997, + "mc2_stderr": 0.01527382517262586 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28689492325855964, + "acc_stderr": 0.015550809966781778, + "acc_norm": 0.38961038961038963, + "acc_norm_stderr": 0.0167661616718935 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eclipsemint/kollama2-7b-v0.3", + "model_sha": "1e45ebdd7fe58fa6c62eca0502aef2cf4383336c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eclipsemint/kollama2-7b-v0.4/result_2023-11-16 07:23:45.json b/eclipsemint/kollama2-7b-v0.4/result_2023-11-16 07:23:45.json new file mode 100644 index 0000000000000000000000000000000000000000..425d23fc2f062bf5ab86e0ecbae279197a7c7b05 --- /dev/null +++ b/eclipsemint/kollama2-7b-v0.4/result_2023-11-16 07:23:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2696245733788396, + "acc_stderr": 0.01296804068686916, + "acc_norm": 0.30631399317406144, + "acc_norm_stderr": 0.01347058441727651 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3350926110336586, + "acc_stderr": 0.004710581496639349, + "acc_norm": 0.410973909579765, + "acc_norm_stderr": 0.004910049928688086 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260594, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260594 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3524904214559387, + "acc_stderr": 0.01708415024408138, + "acc_norm": 0.3524904214559387, + "acc_norm_stderr": 0.01708415024408138 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357787, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357787 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.035294868015111135, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.035294868015111135 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3311897106109325, + "acc_stderr": 0.026730620728004903, + "acc_norm": 0.3311897106109325, + "acc_norm_stderr": 0.026730620728004903 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.041423137719966634, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.041423137719966634 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2878787878787879, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.2878787878787879, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.03996629574876718, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.03996629574876718 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.029597329730978107, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.029597329730978107 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.022282141204204423, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.022282141204204423 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4188034188034188, + "acc_stderr": 0.03232128912157792, + "acc_norm": 0.4188034188034188, + "acc_norm_stderr": 0.03232128912157792 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.02674989977124123, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.02674989977124123 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008937, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008937 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4079601990049751, + "acc_stderr": 0.034751163651940926, + "acc_norm": 0.4079601990049751, + "acc_norm_stderr": 0.034751163651940926 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.03368762932259431, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.03368762932259431 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525214, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525214 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869355, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869355 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.33236994219653176, + "acc_stderr": 0.025361168749688218, + "acc_norm": 0.33236994219653176, + "acc_norm_stderr": 0.025361168749688218 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.026725868809100786, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.026725868809100786 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29015544041450775, + "acc_stderr": 0.032752644677915166, + "acc_norm": 0.29015544041450775, + "acc_norm_stderr": 0.032752644677915166 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.037752050135836386, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.037752050135836386 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25871559633027524, + "acc_stderr": 0.01877605231961962, + "acc_norm": 0.25871559633027524, + "acc_norm_stderr": 0.01877605231961962 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.027057974624494382, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.027057974624494382 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.0180540274588152, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.0180540274588152 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.02678917235114023, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.02678917235114023 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.027146271936625176, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.027146271936625176 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935893, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935893 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.33755274261603374, + "acc_stderr": 0.030781549102026212, + "acc_norm": 0.33755274261603374, + "acc_norm_stderr": 0.030781549102026212 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539258, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539258 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.03182231867647554, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.03182231867647554 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219371, + "mc2": 0.4534153509461654, + "mc2_stderr": 0.015441392201137738 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.25737898465171194, + "acc_stderr": 0.015030899730346752, + "acc_norm": 0.34238488783943327, + "acc_norm_stderr": 0.016313907844146373 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eclipsemint/kollama2-7b-v0.4", + "model_sha": "cc48fd4ee8e59e4d067682819681358e4c265446", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eclipsemint/kollama2-7b-v0/result_2023-10-29 09:14:47.json b/eclipsemint/kollama2-7b-v0/result_2023-10-29 09:14:47.json new file mode 100644 index 0000000000000000000000000000000000000000..5cb01bf42127daf5210b10d29bd553a0b73c8e12 --- /dev/null +++ b/eclipsemint/kollama2-7b-v0/result_2023-10-29 09:14:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26791808873720135, + "acc_stderr": 0.012942030195136421, + "acc_norm": 0.310580204778157, + "acc_norm_stderr": 0.013522292098053057 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33389762995419237, + "acc_stderr": 0.004706398252382464, + "acc_norm": 0.4122684724158534, + "acc_norm_stderr": 0.004912370023913011 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.04498676320572922, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.04498676320572922 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.34738186462324394, + "acc_stderr": 0.01702667174865573, + "acc_norm": 0.34738186462324394, + "acc_norm_stderr": 0.01702667174865573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761923, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761923 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3440514469453376, + "acc_stderr": 0.026981478043648022, + "acc_norm": 0.3440514469453376, + "acc_norm_stderr": 0.026981478043648022 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.032443052830087304, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.032443052830087304 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847837, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847837 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.031544498882702866, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.031544498882702866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.037800192304380135, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.037800192304380135 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2773109243697479, + "acc_stderr": 0.029079374539480007, + "acc_norm": 0.2773109243697479, + "acc_norm_stderr": 0.029079374539480007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.0219169577092138, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.0219169577092138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.026148685930671746, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.026148685930671746 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.03222414045241107, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.03222414045241107 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.027495663683724057, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.027495663683724057 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.04653429807913508, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.04653429807913508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.025644108639267624, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.025644108639267624 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.03336767086567977, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.03336767086567977 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21693121693121692, + "acc_stderr": 0.021227082449445045, + "acc_norm": 0.21693121693121692, + "acc_norm_stderr": 0.021227082449445045 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.02402774515526502, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.02402774515526502 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.03487825168497892, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.03487825168497892 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.02577311116963045, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.02577311116963045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24403669724770644, + "acc_stderr": 0.018415286351416416, + "acc_norm": 0.24403669724770644, + "acc_norm_stderr": 0.018415286351416416 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242494, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242494 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.025829163272757475, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.025829163272757475 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.04537935177947879, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.04537935177947879 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.017952449196987862, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.017952449196987862 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.025892151156709405, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.025892151156709405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.029346665094372937, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.029346665094372937 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.025336848563332372, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.025336848563332372 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.28270042194092826, + "acc_stderr": 0.029312814153955924, + "acc_norm": 0.28270042194092826, + "acc_norm_stderr": 0.029312814153955924 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.010986307870045517, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.010986307870045517 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693254, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.0340150671524904, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.0340150671524904 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326905, + "mc2": 0.4649376014172755, + "mc2_stderr": 0.015443831068166118 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26564344746162927, + "acc_stderr": 0.015185107107791248, + "acc_norm": 0.35182998819362454, + "acc_norm_stderr": 0.016418206451218054 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eclipsemint/kollama2-7b-v0", + "model_sha": "e2a3ee343f997cca7ad3e25b5d970376d79c5b4e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eclipsemint/kollama2-7b-v1.1/result_2023-11-02 21:53:46.json b/eclipsemint/kollama2-7b-v1.1/result_2023-11-02 21:53:46.json new file mode 100644 index 0000000000000000000000000000000000000000..33b0d70cf77fd556e657ee3c891f1f15b01f5476 --- /dev/null +++ b/eclipsemint/kollama2-7b-v1.1/result_2023-11-02 21:53:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20051194539249148, + "acc_stderr": 0.011700318050499354, + "acc_norm": 0.25341296928327645, + "acc_norm_stderr": 0.012710896778378606 + }, + "harness|ko_hellaswag|10": { + "acc": 0.27614021111332404, + "acc_stderr": 0.004461732908157659, + "acc_norm": 0.29904401513642703, + "acc_norm_stderr": 0.004569034613332603 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21711366538952745, + "acc_stderr": 0.014743125394823297, + "acc_norm": 0.21711366538952745, + "acc_norm_stderr": 0.014743125394823297 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838746, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838746 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.03550920185689631, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.03550920185689631 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24437299035369775, + "acc_stderr": 0.024406162094668886, + "acc_norm": 0.24437299035369775, + "acc_norm_stderr": 0.024406162094668886 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.18834080717488788, + "acc_stderr": 0.026241132996407252, + "acc_norm": 0.18834080717488788, + "acc_norm_stderr": 0.026241132996407252 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596918, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596918 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.032586303838365555, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.032586303838365555 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31092436974789917, + "acc_stderr": 0.03006676158297794, + "acc_norm": 0.31092436974789917, + "acc_norm_stderr": 0.03006676158297794 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23846153846153847, + "acc_stderr": 0.021606294494647727, + "acc_norm": 0.23846153846153847, + "acc_norm_stderr": 0.021606294494647727 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21182266009852216, + "acc_stderr": 0.02874898368994106, + "acc_norm": 0.21182266009852216, + "acc_norm_stderr": 0.02874898368994106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.33225806451612905, + "acc_stderr": 0.02679556084812279, + "acc_norm": 0.33225806451612905, + "acc_norm_stderr": 0.02679556084812279 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.29056603773584905, + "acc_stderr": 0.02794321998933714, + "acc_norm": 0.29056603773584905, + "acc_norm_stderr": 0.02794321998933714 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624555, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624555 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.1994219653179191, + "acc_stderr": 0.021511900654252552, + "acc_norm": 0.1994219653179191, + "acc_norm_stderr": 0.021511900654252552 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24382716049382716, + "acc_stderr": 0.023891879541959614, + "acc_norm": 0.24382716049382716, + "acc_norm_stderr": 0.023891879541959614 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30091743119266057, + "acc_stderr": 0.01966475136680211, + "acc_norm": 0.30091743119266057, + "acc_norm_stderr": 0.01966475136680211 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.02367908986180772, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.02367908986180772 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.33884297520661155, + "acc_stderr": 0.043207678075366684, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.043207678075366684 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351585, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351585 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.016774672365468514, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.016774672365468514 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.024847921358063962, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.024847921358063962 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347018, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347018 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372432, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372432 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.031001209039894836, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.031001209039894836 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.02944377302259469, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.02944377302259469 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2522816166883963, + "acc_stderr": 0.011092789056875246, + "acc_norm": 0.2522816166883963, + "acc_norm_stderr": 0.011092789056875246 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693268, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693268 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511782, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511782 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2252141982864137, + "mc1_stderr": 0.014623240768023503, + "mc2": 0.45823651606631305, + "mc2_stderr": 0.01710273017399995 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22904368358913813, + "acc_stderr": 0.01444737227725382, + "acc_norm": 0.282172373081464, + "acc_norm_stderr": 0.015473271583988433 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eclipsemint/kollama2-7b-v1.1", + "model_sha": "02268afbed60e68ba0142404ddd5a2c0031a3420", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eclipsemint/kollama2-7b-v1.2/result_2023-11-04 06:39:14.json b/eclipsemint/kollama2-7b-v1.2/result_2023-11-04 06:39:14.json new file mode 100644 index 0000000000000000000000000000000000000000..2bb458a742a79739cc2e864df96be8b550a0910f --- /dev/null +++ b/eclipsemint/kollama2-7b-v1.2/result_2023-11-04 06:39:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20051194539249148, + "acc_stderr": 0.011700318050499361, + "acc_norm": 0.23976109215017063, + "acc_norm_stderr": 0.012476304127453956 + }, + "harness|ko_hellaswag|10": { + "acc": 0.280920135431189, + "acc_stderr": 0.004485300194072271, + "acc_norm": 0.3069109739095798, + "acc_norm_stderr": 0.00460269541675698 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.034678266857038245, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.034678266857038245 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.03760178006026621, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.03760178006026621 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26309067688378035, + "acc_stderr": 0.015745497169049057, + "acc_norm": 0.26309067688378035, + "acc_norm_stderr": 0.015745497169049057 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.19574468085106383, + "acc_stderr": 0.025937853139977148, + "acc_norm": 0.19574468085106383, + "acc_norm_stderr": 0.025937853139977148 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21084337349397592, + "acc_stderr": 0.031755547866299194, + "acc_norm": 0.21084337349397592, + "acc_norm_stderr": 0.031755547866299194 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818777, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818777 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.027157150479563824, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.027157150479563824 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533084, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533084 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.036186648199362466, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.036186648199362466 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277726, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277726 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.022282141204204416, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.022282141204204416 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.038260763248848646, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.038260763248848646 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358611, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358611 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.02509189237885928, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.02509189237885928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.27350427350427353, + "acc_stderr": 0.029202540153431166, + "acc_norm": 0.27350427350427353, + "acc_norm_stderr": 0.029202540153431166 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.02560423347089911, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.02560423347089911 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014652, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014652 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047875, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047875 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.02271746789770861, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.02271746789770861 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2630057803468208, + "acc_stderr": 0.023703099525258172, + "acc_norm": 0.2630057803468208, + "acc_norm_stderr": 0.023703099525258172 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.03559039531617342, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.03559039531617342 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02540719779889017, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02540719779889017 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.01822407811729907, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.01822407811729907 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.032684540130117436, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.032684540130117436 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.024739981355113592, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.024739981355113592 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.33884297520661155, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119667, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119667 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.017740899509177788, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.017740899509177788 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.02635806569888059, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.02635806569888059 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.0312803908432988, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.0312803908432988 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1948529411764706, + "acc_stderr": 0.024060599423487424, + "acc_norm": 0.1948529411764706, + "acc_norm_stderr": 0.024060599423487424 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.02783302387139968, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.02783302387139968 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24472573839662448, + "acc_stderr": 0.027985699387036413, + "acc_norm": 0.24472573839662448, + "acc_norm_stderr": 0.027985699387036413 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2646675358539765, + "acc_stderr": 0.011267332992845528, + "acc_norm": 0.2646675358539765, + "acc_norm_stderr": 0.011267332992845528 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03477691162163659, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03477691162163659 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23011015911872704, + "mc1_stderr": 0.014734557959807763, + "mc2": 0.4651387560988257, + "mc2_stderr": 0.0170466746202686 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.23730814639905548, + "acc_stderr": 0.01462667783718623, + "acc_norm": 0.3246753246753247, + "acc_norm_stderr": 0.016098883939346467 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eclipsemint/kollama2-7b-v1.2", + "model_sha": "c69cbcd522d1a49ae1576342c027ceadc57de738", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eclipsemint/kollama2-7b-v1.3/result_2023-11-05 03:47:02.json b/eclipsemint/kollama2-7b-v1.3/result_2023-11-05 03:47:02.json new file mode 100644 index 0000000000000000000000000000000000000000..51acf406eb716b398cfe676b5175ce4cbbf5ab83 --- /dev/null +++ b/eclipsemint/kollama2-7b-v1.3/result_2023-11-05 03:47:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19539249146757678, + "acc_stderr": 0.01158690718995291, + "acc_norm": 0.24829351535836178, + "acc_norm_stderr": 0.012624912868089755 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2832105158334993, + "acc_stderr": 0.0044963697421321076, + "acc_norm": 0.3134833698466441, + "acc_norm_stderr": 0.0046296088632722925 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1650485436893204, + "acc_stderr": 0.03675668832233188, + "acc_norm": 0.1650485436893204, + "acc_norm_stderr": 0.03675668832233188 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2681992337164751, + "acc_stderr": 0.015842430835269438, + "acc_norm": 0.2681992337164751, + "acc_norm_stderr": 0.015842430835269438 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066655, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066655 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2170212765957447, + "acc_stderr": 0.026947483121496252, + "acc_norm": 0.2170212765957447, + "acc_norm_stderr": 0.026947483121496252 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2572347266881029, + "acc_stderr": 0.024826171289250888, + "acc_norm": 0.2572347266881029, + "acc_norm_stderr": 0.024826171289250888 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.027157150479563824, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.027157150479563824 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596917, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596917 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.031353050095330834, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.031353050095330834 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.03921545312467122, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.03921545312467122 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2815126050420168, + "acc_stderr": 0.029213549414372174, + "acc_norm": 0.2815126050420168, + "acc_norm_stderr": 0.029213549414372174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2923076923076923, + "acc_stderr": 0.02306043838085774, + "acc_norm": 0.2923076923076923, + "acc_norm_stderr": 0.02306043838085774 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.037552658650371835, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.037552658650371835 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.031089826002937523, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.031089826002937523 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.0255606047210229, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.0255606047210229 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.25213675213675213, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.25213675213675213, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2, + "acc_stderr": 0.038313051408846006, + "acc_norm": 0.2, + "acc_norm_stderr": 0.038313051408846006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.031524391865554, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.031524391865554 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.035149425512674394, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.035149425512674394 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.02264421261552521, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.02264421261552521 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080342, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080342 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2976878612716763, + "acc_stderr": 0.024617055388677006, + "acc_norm": 0.2976878612716763, + "acc_norm_stderr": 0.024617055388677006 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868034, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868034 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3316062176165803, + "acc_stderr": 0.03397636541089116, + "acc_norm": 0.3316062176165803, + "acc_norm_stderr": 0.03397636541089116 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.018125669180861507, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.018125669180861507 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.032684540130117436, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.032684540130117436 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952924, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.017986615304030305, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.017986615304030305 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.026799562024887653, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.026799562024887653 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25316455696202533, + "acc_stderr": 0.028304657943035303, + "acc_norm": 0.25316455696202533, + "acc_norm_stderr": 0.028304657943035303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2588005215123859, + "acc_stderr": 0.01118610904656461, + "acc_norm": 0.2588005215123859, + "acc_norm_stderr": 0.01118610904656461 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23378212974296206, + "mc1_stderr": 0.01481619599193158, + "mc2": 0.46005718929477757, + "mc2_stderr": 0.016990439061351184 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.01484604496825225, + "acc_norm": 0.29161747343565525, + "acc_norm_stderr": 0.015626276690070242 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eclipsemint/kollama2-7b-v1.3", + "model_sha": "ba1caccde94a38f8e099177229e71b93a9aac534", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eclipsemint/kollama2-7b-v1/result_2023-11-01 09:12:02.json b/eclipsemint/kollama2-7b-v1/result_2023-11-01 09:12:02.json new file mode 100644 index 0000000000000000000000000000000000000000..42867a3f2c8e05e16e25dab50af7ad87a5d75b32 --- /dev/null +++ b/eclipsemint/kollama2-7b-v1/result_2023-11-01 09:12:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1962457337883959, + "acc_stderr": 0.011606019881416282, + "acc_norm": 0.22781569965870307, + "acc_norm_stderr": 0.012256708602326902 + }, + "harness|ko_hellaswag|10": { + "acc": 0.28032264489145586, + "acc_stderr": 0.004482388821388948, + "acc_norm": 0.31009759012148974, + "acc_norm_stderr": 0.0046158803527997444 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260595, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260595 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28735632183908044, + "acc_stderr": 0.0161824107306827, + "acc_norm": 0.28735632183908044, + "acc_norm_stderr": 0.0161824107306827 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683228, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683228 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2572347266881029, + "acc_stderr": 0.024826171289250888, + "acc_norm": 0.2572347266881029, + "acc_norm_stderr": 0.024826171289250888 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924811, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924811 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.038935425188248475, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.038935425188248475 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.031265112061730424, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.031265112061730424 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548574, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548574 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.02335736578587403, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.02335736578587403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735703, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735703 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124252, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124252 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.02405102973991225, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.02405102973991225 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.03197565821032499, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.03197565821032499 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24673202614379086, + "acc_stderr": 0.0174408203674025, + "acc_norm": 0.24673202614379086, + "acc_norm_stderr": 0.0174408203674025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714857, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714857 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.02866685779027465, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.02866685779027465 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23989569752281617, + "acc_stderr": 0.010906282617981652, + "acc_norm": 0.23989569752281617, + "acc_norm_stderr": 0.010906282617981652 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485083, + "mc2": 0.44801118006268165, + "mc2_stderr": 0.016262139478608006 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.20188902007083825, + "acc_stderr": 0.013800753895777422, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.015311853110300352 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eclipsemint/kollama2-7b-v1", + "model_sha": "d3271305724d054f37807dae60c6c875d0092362", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ehartford/dolphin-2.2.1-mistral-7b/result_2023-12-18 04:48:34.json b/ehartford/dolphin-2.2.1-mistral-7b/result_2023-12-18 04:48:34.json new file mode 100644 index 0000000000000000000000000000000000000000..221b2d5445caecd9224f0acce2e50720b7536547 --- /dev/null +++ b/ehartford/dolphin-2.2.1-mistral-7b/result_2023-12-18 04:48:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3430034129692833, + "acc_stderr": 0.013872423223718166, + "acc_norm": 0.39761092150170646, + "acc_norm_stderr": 0.014301752223279535 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3685520812587134, + "acc_stderr": 0.004814261966376846, + "acc_norm": 0.46484763991236805, + "acc_norm_stderr": 0.004977434505403359 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46360153256704983, + "acc_stderr": 0.01783252407959326, + "acc_norm": 0.46360153256704983, + "acc_norm_stderr": 0.01783252407959326 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236784, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236784 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.035315058793591834, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.035315058793591834 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.028447965476231022, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.028447965476231022 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.03461199429040014, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.03461199429040014 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149145, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149145 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225882, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225882 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529658, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529658 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.02878222756134725, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.02878222756134725 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.045723723587374296, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.045723723587374296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3016759776536313, + "acc_stderr": 0.015350767572220285, + "acc_norm": 0.3016759776536313, + "acc_norm_stderr": 0.015350767572220285 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.012134433741002575, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.012134433741002575 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03815494308688929, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03815494308688929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32313341493268055, + "mc1_stderr": 0.016371836286454607, + "mc2": 0.5137325149564673, + "mc2_stderr": 0.015714111156826572 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4085005903187721, + "acc_stderr": 0.01690006287942712, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.017090852631668332 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ehartford/dolphin-2.2.1-mistral-7b", + "model_sha": "001b48e9aebffb395c698af47b6b48364cc3cbe8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eldogbbhed/Peagle-9b/result_2024-05-17 15:01:58.json b/eldogbbhed/Peagle-9b/result_2024-05-17 15:01:58.json new file mode 100644 index 0000000000000000000000000000000000000000..6fe661ca1db7aa9876f3d460c6d0811db9dbfc93 --- /dev/null +++ b/eldogbbhed/Peagle-9b/result_2024-05-17 15:01:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36945392491467577, + "acc_stderr": 0.014104578366491894, + "acc_norm": 0.44112627986348124, + "acc_norm_stderr": 0.014509747749064666 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38637721569408484, + "acc_stderr": 0.004859236191579794, + "acc_norm": 0.5053774148575981, + "acc_norm_stderr": 0.004989492828168525 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4623243933588761, + "acc_stderr": 0.017829131764287194, + "acc_norm": 0.4623243933588761, + "acc_norm_stderr": 0.017829131764287194 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.04049122041702506, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.04049122041702506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.027778835904935434, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.027778835904935434 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796183, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02911661760608302, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02911661760608302 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874141, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874141 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449845, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449845 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.0289473388516141, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.0289473388516141 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.03338473403207401, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.03338473403207401 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31843575418994413, + "acc_stderr": 0.015581008080360274, + "acc_norm": 0.31843575418994413, + "acc_norm_stderr": 0.015581008080360274 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.02922719246003203, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.02922719246003203 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556165, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.012014142101842975, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.012014142101842975 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.39167686658506734, + "mc1_stderr": 0.01708779588176962, + "mc2": 0.5707746431476897, + "mc2_stderr": 0.016300775025842606 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3907910271546635, + "acc_stderr": 0.01677529846510826, + "acc_norm": 0.3990554899645809, + "acc_norm_stderr": 0.0168363772928493 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eldogbbhed/Peagle-9b", + "model_sha": "26e8229dbfb6c12d70ea7be34699c8007ad75228", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/eliceai/openchat-mevo-0626/result_2024-06-26 08:38:43.json b/eliceai/openchat-mevo-0626/result_2024-06-26 08:38:43.json new file mode 100644 index 0000000000000000000000000000000000000000..79d4b45b25b0c1bcd918aa51faab875e20224ff0 --- /dev/null +++ b/eliceai/openchat-mevo-0626/result_2024-06-26 08:38:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4197952218430034, + "acc_stderr": 0.014422181226303028, + "acc_norm": 0.4735494880546075, + "acc_norm_stderr": 0.014590931358120169 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4048994224258116, + "acc_stderr": 0.004898693652043317, + "acc_norm": 0.5347540330611432, + "acc_norm_stderr": 0.004977713073899311 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5964912280701754, + "acc_stderr": 0.03762738699917057, + "acc_norm": 0.5964912280701754, + "acc_norm_stderr": 0.03762738699917057 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.598978288633461, + "acc_stderr": 0.01752613315012457, + "acc_norm": 0.598978288633461, + "acc_norm_stderr": 0.01752613315012457 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5063829787234042, + "acc_stderr": 0.03268335899936336, + "acc_norm": 0.5063829787234042, + "acc_norm_stderr": 0.03268335899936336 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5466237942122186, + "acc_stderr": 0.02827435985489424, + "acc_norm": 0.5466237942122186, + "acc_norm_stderr": 0.02827435985489424 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.03437305501980619, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.03437305501980619 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.048786087144669955, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.048786087144669955 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5966386554621849, + "acc_stderr": 0.03186608121408832, + "acc_norm": 0.5966386554621849, + "acc_norm_stderr": 0.03186608121408832 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5512820512820513, + "acc_stderr": 0.02521731518484649, + "acc_norm": 0.5512820512820513, + "acc_norm_stderr": 0.02521731518484649 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.0487831731214563, + "acc_norm": 0.62, + "acc_norm_stderr": 0.0487831731214563 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5903225806451613, + "acc_stderr": 0.02797605491534737, + "acc_norm": 0.5903225806451613, + "acc_norm_stderr": 0.02797605491534737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5509433962264151, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.5509433962264151, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251976, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251976 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6865671641791045, + "acc_stderr": 0.032801882053486414, + "acc_norm": 0.6865671641791045, + "acc_norm_stderr": 0.032801882053486414 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.037786210790920545, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.037786210790920545 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41798941798941797, + "acc_stderr": 0.02540255550326091, + "acc_norm": 0.41798941798941797, + "acc_norm_stderr": 0.02540255550326091 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5635838150289018, + "acc_stderr": 0.026700545424943684, + "acc_norm": 0.5635838150289018, + "acc_norm_stderr": 0.026700545424943684 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5308641975308642, + "acc_stderr": 0.02776768960683393, + "acc_norm": 0.5308641975308642, + "acc_norm_stderr": 0.02776768960683393 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6528497409326425, + "acc_stderr": 0.03435696168361355, + "acc_norm": 0.6528497409326425, + "acc_norm_stderr": 0.03435696168361355 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.045796394220704355, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.045796394220704355 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6513761467889908, + "acc_stderr": 0.020431254090714328, + "acc_norm": 0.6513761467889908, + "acc_norm_stderr": 0.020431254090714328 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5522875816993464, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.5522875816993464, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.040633027314866725, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.040633027314866725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.02015468571259088, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.02015468571259088 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611327, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611327 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22681564245810057, + "acc_stderr": 0.014005843570897897, + "acc_norm": 0.22681564245810057, + "acc_norm_stderr": 0.014005843570897897 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4742647058823529, + "acc_stderr": 0.030332578094555033, + "acc_norm": 0.4742647058823529, + "acc_norm_stderr": 0.030332578094555033 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.031601069934496004, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.031601069934496004 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030685820596610812, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030685820596610812 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3774445893089961, + "acc_stderr": 0.012380680911165792, + "acc_norm": 0.3774445893089961, + "acc_norm_stderr": 0.012380680911165792 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.03471157907953426, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.03471157907953426 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768545, + "mc2": 0.4726835303488545, + "mc2_stderr": 0.0157590837700279 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5182998819362455, + "acc_stderr": 0.01717883663917775, + "acc_norm": 0.5419126328217237, + "acc_norm_stderr": 0.017129852117911147 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "eliceai/openchat-mevo-0626", + "model_sha": "acd3d03350684acd30da1d67a6239d30e0ce56bd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/er1123090/T3Q_SOLAR_DARETIES_v1.0/result_2024-05-23 01:33:02.json b/er1123090/T3Q_SOLAR_DARETIES_v1.0/result_2024-05-23 01:33:02.json new file mode 100644 index 0000000000000000000000000000000000000000..0e673921215f7a646b0c17e6ad828228aac69bf1 --- /dev/null +++ b/er1123090/T3Q_SOLAR_DARETIES_v1.0/result_2024-05-23 01:33:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7389078498293515, + "acc_stderr": 0.012835523909473854, + "acc_norm": 0.7627986348122867, + "acc_norm_stderr": 0.012430399829260858 + }, + "harness|ko_hellaswag|10": { + "acc": 0.7040430193188608, + "acc_stderr": 0.004555388371756651, + "acc_norm": 0.8012348137821151, + "acc_norm_stderr": 0.003982553164086223 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03615507630310936, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03615507630310936 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7184466019417476, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.7184466019417476, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6730523627075351, + "acc_stderr": 0.016774908180131495, + "acc_norm": 0.6730523627075351, + "acc_norm_stderr": 0.016774908180131495 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.032500536843658404, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.032500536843658404 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6237942122186495, + "acc_stderr": 0.02751392568354943, + "acc_norm": 0.6237942122186495, + "acc_norm_stderr": 0.02751392568354943 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6367713004484304, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.6367713004484304, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.041443118108781526, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.041443118108781526 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.04810840148082635, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.04810840148082635 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.031566630992154156, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.031566630992154156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6096774193548387, + "acc_stderr": 0.027751256636969576, + "acc_norm": 0.6096774193548387, + "acc_norm_stderr": 0.027751256636969576 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.02514093595033543, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.02514093595033543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5584905660377358, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.5584905660377358, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.047245774057315726, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.047245774057315726 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616255, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.030965903123573037, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.030965903123573037 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4470899470899471, + "acc_stderr": 0.025606723995777025, + "acc_norm": 0.4470899470899471, + "acc_norm_stderr": 0.025606723995777025 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5924855491329479, + "acc_stderr": 0.026454578146931505, + "acc_norm": 0.5924855491329479, + "acc_norm_stderr": 0.026454578146931505 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5828220858895705, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.5828220858895705, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.026571483480719974, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.026571483480719974 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7461139896373057, + "acc_stderr": 0.03141024780565318, + "acc_norm": 0.7461139896373057, + "acc_norm_stderr": 0.03141024780565318 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7174311926605504, + "acc_stderr": 0.01930424349770715, + "acc_norm": 0.7174311926605504, + "acc_norm_stderr": 0.01930424349770715 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5816993464052288, + "acc_stderr": 0.028245134024387296, + "acc_norm": 0.5816993464052288, + "acc_norm_stderr": 0.028245134024387296 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.625, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.625, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5866013071895425, + "acc_stderr": 0.01992211568278668, + "acc_norm": 0.5866013071895425, + "acc_norm_stderr": 0.01992211568278668 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4326241134751773, + "acc_stderr": 0.02955545423677885, + "acc_norm": 0.4326241134751773, + "acc_norm_stderr": 0.02955545423677885 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.35977653631284917, + "acc_stderr": 0.016051419760310267, + "acc_norm": 0.35977653631284917, + "acc_norm_stderr": 0.016051419760310267 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5183823529411765, + "acc_stderr": 0.030352303395351964, + "acc_norm": 0.5183823529411765, + "acc_norm_stderr": 0.030352303395351964 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6489795918367347, + "acc_stderr": 0.030555316755573644, + "acc_norm": 0.6489795918367347, + "acc_norm_stderr": 0.030555316755573644 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7215189873417721, + "acc_stderr": 0.02917868230484253, + "acc_norm": 0.7215189873417721, + "acc_norm_stderr": 0.02917868230484253 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.45241199478487615, + "acc_stderr": 0.012712265105889136, + "acc_norm": 0.45241199478487615, + "acc_norm_stderr": 0.012712265105889136 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.037937131711656344, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.037937131711656344 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7368421052631579, + "mc1_stderr": 0.015415241740237012, + "mc2": 0.8226561346387515, + "mc2_stderr": 0.013164902884597915 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45454545454545453, + "acc_stderr": 0.017119172208061504, + "acc_norm": 0.4923258559622196, + "acc_norm_stderr": 0.01718832921965428 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "er1123090/T3Q_SOLAR_DARETIES_v1.0", + "model_sha": "eb819fae96a21a317fec1bc58778f20603244a2e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/er1123090/T3Q_SOLAR_SLERP_v1.0/result_2024-05-23 14:36:21.json b/er1123090/T3Q_SOLAR_SLERP_v1.0/result_2024-05-23 14:36:21.json new file mode 100644 index 0000000000000000000000000000000000000000..19b2c5e2bdcc81a2329d515cead035ef57bd04cb --- /dev/null +++ b/er1123090/T3Q_SOLAR_SLERP_v1.0/result_2024-05-23 14:36:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7559726962457338, + "acc_stderr": 0.012551447627856255, + "acc_norm": 0.7832764505119454, + "acc_norm_stderr": 0.012040156713481189 + }, + "harness|ko_hellaswag|10": { + "acc": 0.7315275841465844, + "acc_stderr": 0.004422590262385078, + "acc_norm": 0.8222465644293966, + "acc_norm_stderr": 0.0038152372699611337 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6842105263157895, + "acc_stderr": 0.03565079670708311, + "acc_norm": 0.6842105263157895, + "acc_norm_stderr": 0.03565079670708311 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7864077669902912, + "acc_stderr": 0.040580420156460344, + "acc_norm": 0.7864077669902912, + "acc_norm_stderr": 0.040580420156460344 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6871008939974457, + "acc_stderr": 0.01658093594030403, + "acc_norm": 0.6871008939974457, + "acc_norm_stderr": 0.01658093594030403 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.03266204299064677, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.03266204299064677 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6302250803858521, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.6302250803858521, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6367713004484304, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.6367713004484304, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.031631458075523776, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.031631458075523776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6230769230769231, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.6230769230769231, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301811, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301811 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6193548387096774, + "acc_stderr": 0.02762171783290703, + "acc_norm": 0.6193548387096774, + "acc_norm_stderr": 0.02762171783290703 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8290598290598291, + "acc_stderr": 0.024662496845209804, + "acc_norm": 0.8290598290598291, + "acc_norm_stderr": 0.024662496845209804 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5811320754716981, + "acc_stderr": 0.030365050829115208, + "acc_norm": 0.5811320754716981, + "acc_norm_stderr": 0.030365050829115208 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.03794012674697029, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.03794012674697029 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4470899470899471, + "acc_stderr": 0.025606723995777025, + "acc_norm": 0.4470899470899471, + "acc_norm_stderr": 0.025606723995777025 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6319444444444444, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.6319444444444444, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5924855491329479, + "acc_stderr": 0.026454578146931505, + "acc_norm": 0.5924855491329479, + "acc_norm_stderr": 0.026454578146931505 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6134969325153374, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.6134969325153374, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6512345679012346, + "acc_stderr": 0.02651759772446501, + "acc_norm": 0.6512345679012346, + "acc_norm_stderr": 0.02651759772446501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.772020725388601, + "acc_stderr": 0.03027690994517826, + "acc_norm": 0.772020725388601, + "acc_norm_stderr": 0.03027690994517826 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7192660550458716, + "acc_stderr": 0.01926605504587161, + "acc_norm": 0.7192660550458716, + "acc_norm_stderr": 0.01926605504587161 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6013071895424836, + "acc_stderr": 0.028036092273891776, + "acc_norm": 0.6013071895424836, + "acc_norm_stderr": 0.028036092273891776 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849725, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5702614379084967, + "acc_stderr": 0.020027122784928554, + "acc_norm": 0.5702614379084967, + "acc_norm_stderr": 0.020027122784928554 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4397163120567376, + "acc_stderr": 0.02960991207559411, + "acc_norm": 0.4397163120567376, + "acc_norm_stderr": 0.02960991207559411 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3541899441340782, + "acc_stderr": 0.015995644947299232, + "acc_norm": 0.3541899441340782, + "acc_norm_stderr": 0.015995644947299232 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5110294117647058, + "acc_stderr": 0.030365446477275668, + "acc_norm": 0.5110294117647058, + "acc_norm_stderr": 0.030365446477275668 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6530612244897959, + "acc_stderr": 0.030472526026726496, + "acc_norm": 0.6530612244897959, + "acc_norm_stderr": 0.030472526026726496 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7383966244725738, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.7383966244725738, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4439374185136897, + "acc_stderr": 0.012689708167787679, + "acc_norm": 0.4439374185136897, + "acc_norm_stderr": 0.012689708167787679 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.03283472056108561, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.03283472056108561 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03756335775187896, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03756335775187896 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7747858017135862, + "mc1_stderr": 0.014623240768023505, + "mc2": 0.8439812762223845, + "mc2_stderr": 0.012380276065639064 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5041322314049587, + "acc_stderr": 0.017189767032130817, + "acc_norm": 0.5230224321133412, + "acc_norm_stderr": 0.017172121546727634 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "er1123090/T3Q_SOLAR_SLERP_v1.0", + "model_sha": "b0f2de9e646961cb76101f4657dc6fb16367a03d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/SOLAR-10.7B-merge-dpo/result_2024-03-04 01:34:09.json b/etri-xainlp/SOLAR-10.7B-merge-dpo/result_2024-03-04 01:34:09.json new file mode 100644 index 0000000000000000000000000000000000000000..9522e03add10c09f7ee6344a5dca14d87d2f653e --- /dev/null +++ b/etri-xainlp/SOLAR-10.7B-merge-dpo/result_2024-03-04 01:34:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5938566552901023, + "acc_stderr": 0.014351656690097865, + "acc_norm": 0.6646757679180887, + "acc_norm_stderr": 0.013796182947785562 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3868751244771958, + "acc_stderr": 0.0048603930119746775, + "acc_norm": 0.4998008364867556, + "acc_norm_stderr": 0.004989781015595467 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.03833185275213026, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.03833185275213026 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.046561471100123514, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.046561471100123514 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6206896551724138, + "acc_stderr": 0.01735126811754445, + "acc_norm": 0.6206896551724138, + "acc_norm_stderr": 0.01735126811754445 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.028173917761762906, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.028173917761762906 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.043285772152629715, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.043285772152629715 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.03345678422756776, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.03345678422756776 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.04858083574266345, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.04858083574266345 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5966386554621849, + "acc_stderr": 0.03186608121408831, + "acc_norm": 0.5966386554621849, + "acc_norm_stderr": 0.03186608121408831 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5487179487179488, + "acc_stderr": 0.02523038123893484, + "acc_norm": 0.5487179487179488, + "acc_norm_stderr": 0.02523038123893484 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145631, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145631 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.0466840803302493, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.0466840803302493 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.028441638233540515, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.028441638233540515 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.02812096650391439, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.02812096650391439 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.045820048415054174, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.045820048415054174 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131143, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131143 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6865671641791045, + "acc_stderr": 0.032801882053486435, + "acc_norm": 0.6865671641791045, + "acc_norm_stderr": 0.032801882053486435 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851105, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851105 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.02689704999638286, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.02689704999638286 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860807, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366596, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.653211009174312, + "acc_stderr": 0.020406097104093027, + "acc_norm": 0.653211009174312, + "acc_norm_stderr": 0.020406097104093027 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.5238095238095238, + "acc_stderr": 0.04467062628403273, + "acc_norm": 0.5238095238095238, + "acc_norm_stderr": 0.04467062628403273 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.565359477124183, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.565359477124183, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874143, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874143 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.020175488765484036, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.020175488765484036 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281288, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281288 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5601851851851852, + "acc_stderr": 0.03385177976044812, + "acc_norm": 0.5601851851851852, + "acc_norm_stderr": 0.03385177976044812 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3452513966480447, + "acc_stderr": 0.015901432608930354, + "acc_norm": 0.3452513966480447, + "acc_norm_stderr": 0.015901432608930354 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5625, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.5625, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6122448979591837, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.6122448979591837, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.42698826597131684, + "acc_stderr": 0.012633353557534418, + "acc_norm": 0.42698826597131684, + "acc_norm_stderr": 0.012633353557534418 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5397796817625459, + "mc1_stderr": 0.01744801722396088, + "mc2": 0.6491369387548298, + "mc2_stderr": 0.015408798895197046 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4592680047225502, + "acc_stderr": 0.017133218276537677, + "acc_norm": 0.577331759149941, + "acc_norm_stderr": 0.016983506079577607 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/SOLAR-10.7B-merge-dpo", + "model_sha": "2461725b0b09d18fe8739f56d09717d4139341af", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/SOLAR-10.7B-merge-dpo_v1/result_2024-03-22 06:44:26.json b/etri-xainlp/SOLAR-10.7B-merge-dpo_v1/result_2024-03-22 06:44:26.json new file mode 100644 index 0000000000000000000000000000000000000000..cc2210507d36caeee609f84bd9b57fb409966942 --- /dev/null +++ b/etri-xainlp/SOLAR-10.7B-merge-dpo_v1/result_2024-03-22 06:44:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5571672354948806, + "acc_stderr": 0.014515573873348918, + "acc_norm": 0.6271331058020477, + "acc_norm_stderr": 0.014131176760131167 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4398526190001992, + "acc_stderr": 0.004953546708512323, + "acc_norm": 0.5872336188010356, + "acc_norm_stderr": 0.004913253031155696 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6526181353767561, + "acc_stderr": 0.017026671748655714, + "acc_norm": 0.6526181353767561, + "acc_norm_stderr": 0.017026671748655714 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.502127659574468, + "acc_stderr": 0.03268572658667492, + "acc_norm": 0.502127659574468, + "acc_norm_stderr": 0.03268572658667492 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.03191178226713546, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.03191178226713546 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6638655462184874, + "acc_stderr": 0.03068473711513536, + "acc_norm": 0.6638655462184874, + "acc_norm_stderr": 0.03068473711513536 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6230769230769231, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.6230769230769231, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.47783251231527096, + "acc_stderr": 0.035145285621750094, + "acc_norm": 0.47783251231527096, + "acc_norm_stderr": 0.035145285621750094 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6258064516129033, + "acc_stderr": 0.0275289042998457, + "acc_norm": 0.6258064516129033, + "acc_norm_stderr": 0.0275289042998457 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.026246772946890474, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.026246772946890474 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389184, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871923, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871923 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.030965903123573037, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.030965903123573037 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.025355741263055266, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.025355741263055266 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542124, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542124 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6184971098265896, + "acc_stderr": 0.0261521986197268, + "acc_norm": 0.6184971098265896, + "acc_norm_stderr": 0.0261521986197268 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.558282208588957, + "acc_stderr": 0.039015918258361836, + "acc_norm": 0.558282208588957, + "acc_norm_stderr": 0.039015918258361836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5864197530864198, + "acc_stderr": 0.027402042040269962, + "acc_norm": 0.5864197530864198, + "acc_norm_stderr": 0.027402042040269962 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.689119170984456, + "acc_stderr": 0.03340361906276585, + "acc_norm": 0.689119170984456, + "acc_norm_stderr": 0.03340361906276585 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6972477064220184, + "acc_stderr": 0.019698711434756353, + "acc_norm": 0.6972477064220184, + "acc_norm_stderr": 0.019698711434756353 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.5, + "acc_stderr": 0.04472135954999579, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04472135954999579 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.028074158947600666, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.028074158947600666 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.020212274976302957, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.020212274976302957 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.02949482760014437, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.02949482760014437 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.033723432716530645, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.033723432716530645 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3094972067039106, + "acc_stderr": 0.01546116900237154, + "acc_norm": 0.3094972067039106, + "acc_norm_stderr": 0.01546116900237154 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932261, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932261 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.030306257722468317, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.030306257722468317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7383966244725738, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.7383966244725738, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.42242503259452413, + "acc_stderr": 0.012615600475734928, + "acc_norm": 0.42242503259452413, + "acc_norm_stderr": 0.012615600475734928 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6862745098039216, + "acc_stderr": 0.03256685484460388, + "acc_norm": 0.6862745098039216, + "acc_norm_stderr": 0.03256685484460388 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.037131580674819135, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.037131580674819135 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5152998776009792, + "mc1_stderr": 0.017495304473187902, + "mc2": 0.6505497402208059, + "mc2_stderr": 0.015606899222015132 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4911452184179457, + "acc_stderr": 0.017187658199336736, + "acc_norm": 0.49940968122786306, + "acc_norm_stderr": 0.01719034212344866 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/SOLAR-10.7B-merge-dpo_v1", + "model_sha": "c0a8a173af635ea13db4d4327f3e4fb9458a7fe4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/SOLAR-10.7B-sft-dpo-v1/result_2024-04-16 01:12:54.json b/etri-xainlp/SOLAR-10.7B-sft-dpo-v1/result_2024-04-16 01:12:54.json new file mode 100644 index 0000000000000000000000000000000000000000..845c5076465179e87eacb63acb097a77b627604f --- /dev/null +++ b/etri-xainlp/SOLAR-10.7B-sft-dpo-v1/result_2024-04-16 01:12:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7030716723549488, + "acc_stderr": 0.013352025976725225, + "acc_norm": 0.7448805460750854, + "acc_norm_stderr": 0.012739038695202109 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5685122485560645, + "acc_stderr": 0.0049427160919960945, + "acc_norm": 0.7251543517227644, + "acc_norm_stderr": 0.00445524075581159 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7251461988304093, + "acc_stderr": 0.034240429246915824, + "acc_norm": 0.7251461988304093, + "acc_norm_stderr": 0.034240429246915824 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7572815533980582, + "acc_stderr": 0.042450224863844935, + "acc_norm": 0.7572815533980582, + "acc_norm_stderr": 0.042450224863844935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7075351213282248, + "acc_stderr": 0.016267000684598645, + "acc_norm": 0.7075351213282248, + "acc_norm_stderr": 0.016267000684598645 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5276595744680851, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.5276595744680851, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.0274666102131401, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.0274666102131401 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6547085201793722, + "acc_stderr": 0.03191100192835794, + "acc_norm": 0.6547085201793722, + "acc_norm_stderr": 0.03191100192835794 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7727272727272727, + "acc_stderr": 0.029857515673386414, + "acc_norm": 0.7727272727272727, + "acc_norm_stderr": 0.029857515673386414 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.03142946637883708, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.03142946637883708 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.617948717948718, + "acc_stderr": 0.024635549163908237, + "acc_norm": 0.617948717948718, + "acc_norm_stderr": 0.024635549163908237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.7037037037037037, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.7037037037037037, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4876847290640394, + "acc_stderr": 0.035169204442208966, + "acc_norm": 0.4876847290640394, + "acc_norm_stderr": 0.035169204442208966 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6290322580645161, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.6290322580645161, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8333333333333334, + "acc_stderr": 0.02441494730454368, + "acc_norm": 0.8333333333333334, + "acc_norm_stderr": 0.02441494730454368 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5924528301886792, + "acc_stderr": 0.030242233800854494, + "acc_norm": 0.5924528301886792, + "acc_norm_stderr": 0.030242233800854494 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.046075820907199756, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.046075820907199756 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465076, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465076 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.025680564640056882, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.025680564640056882 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6458333333333334, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.6458333333333334, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.79, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.79, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6184971098265896, + "acc_stderr": 0.0261521986197268, + "acc_norm": 0.6184971098265896, + "acc_norm_stderr": 0.0261521986197268 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6073619631901841, + "acc_stderr": 0.038367409078310294, + "acc_norm": 0.6073619631901841, + "acc_norm_stderr": 0.038367409078310294 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6697530864197531, + "acc_stderr": 0.026168298456732846, + "acc_norm": 0.6697530864197531, + "acc_norm_stderr": 0.026168298456732846 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7512953367875648, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.7512953367875648, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.047028804320496165, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.047028804320496165 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7504587155963303, + "acc_stderr": 0.018553897629501617, + "acc_norm": 0.7504587155963303, + "acc_norm_stderr": 0.018553897629501617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.47619047619047616, + "acc_stderr": 0.04467062628403273, + "acc_norm": 0.47619047619047616, + "acc_norm_stderr": 0.04467062628403273 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.630718954248366, + "acc_stderr": 0.027634176689602656, + "acc_norm": 0.630718954248366, + "acc_norm_stderr": 0.027634176689602656 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6644736842105263, + "acc_stderr": 0.03842498559395268, + "acc_norm": 0.6644736842105263, + "acc_norm_stderr": 0.03842498559395268 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5898692810457516, + "acc_stderr": 0.019898412717635892, + "acc_norm": 0.5898692810457516, + "acc_norm_stderr": 0.019898412717635892 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.02946218923337059, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.02946218923337059 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.38324022346368714, + "acc_stderr": 0.016260159604429128, + "acc_norm": 0.38324022346368714, + "acc_norm_stderr": 0.016260159604429128 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.03030625772246831, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.03030625772246831 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6938775510204082, + "acc_stderr": 0.02950489645459597, + "acc_norm": 0.6938775510204082, + "acc_norm_stderr": 0.02950489645459597 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7468354430379747, + "acc_stderr": 0.0283046579430353, + "acc_norm": 0.7468354430379747, + "acc_norm_stderr": 0.0283046579430353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.45241199478487615, + "acc_stderr": 0.012712265105889136, + "acc_norm": 0.45241199478487615, + "acc_norm_stderr": 0.012712265105889136 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.0332057461294543, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.0332057461294543 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7270501835985312, + "mc1_stderr": 0.015594753632006535, + "mc2": 0.8103624224425455, + "mc2_stderr": 0.012889381056726101 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5301062573789846, + "acc_stderr": 0.017159163590170216, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.017115418225226865 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/SOLAR-10.7B-sft-dpo-v1", + "model_sha": "03bc92ba77ea2467217c59f169003120a7484e33", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/kor-llama2-13b-dpo/result_2024-04-24 02:03:06.json b/etri-xainlp/kor-llama2-13b-dpo/result_2024-04-24 02:03:06.json new file mode 100644 index 0000000000000000000000000000000000000000..17138778c372418da7ef749f79f0d8df991e90cb --- /dev/null +++ b/etri-xainlp/kor-llama2-13b-dpo/result_2024-04-24 02:03:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2909556313993174, + "acc_stderr": 0.013273077865907583, + "acc_norm": 0.34726962457337884, + "acc_norm_stderr": 0.013913034529620444 + }, + "harness|ko_hellaswag|10": { + "acc": 0.28589922326229833, + "acc_stderr": 0.004509181919322832, + "acc_norm": 0.35909181437960563, + "acc_norm_stderr": 0.004787537385153022 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.03743979825926401, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.03743979825926401 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.04944901092973779, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.04944901092973779 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4125159642401022, + "acc_stderr": 0.01760414910867194, + "acc_norm": 0.4125159642401022, + "acc_norm_stderr": 0.01760414910867194 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.029644006577009618, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.029644006577009618 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.035509201856896294, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.035509201856896294 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4180064308681672, + "acc_stderr": 0.02801365189199507, + "acc_norm": 0.4180064308681672, + "acc_norm_stderr": 0.02801365189199507 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.03526552724601198, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.03526552724601198 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3717948717948718, + "acc_stderr": 0.024503472557110946, + "acc_norm": 0.3717948717948718, + "acc_norm_stderr": 0.024503472557110946 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3903225806451613, + "acc_stderr": 0.027751256636969576, + "acc_norm": 0.3903225806451613, + "acc_norm_stderr": 0.027751256636969576 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.03271298896811159, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.03271298896811159 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.030052580579557838, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.030052580579557838 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736412, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.026636539741116065, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.026636539741116065 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.026869490744815257, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.026869490744815257 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860807, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.037752050135836386, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.037752050135836386 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3522935779816514, + "acc_stderr": 0.020480568843998997, + "acc_norm": 0.3522935779816514, + "acc_norm_stderr": 0.020480568843998997 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.028074158947600656, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.028074158947600656 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.315359477124183, + "acc_stderr": 0.018798086284886887, + "acc_norm": 0.315359477124183, + "acc_norm_stderr": 0.018798086284886887 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347019, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347019 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898428, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898428 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3183673469387755, + "acc_stderr": 0.02982253379398207, + "acc_norm": 0.3183673469387755, + "acc_norm_stderr": 0.02982253379398207 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4092827004219409, + "acc_stderr": 0.032007041833595914, + "acc_norm": 0.4092827004219409, + "acc_norm_stderr": 0.032007041833595914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3005215123859192, + "acc_stderr": 0.011709918883039117, + "acc_norm": 0.3005215123859192, + "acc_norm_stderr": 0.011709918883039117 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.0319800166011507, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.0319800166011507 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624335, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624335 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22766217870257038, + "mc1_stderr": 0.014679255032111068, + "mc2": 0.3846998228340343, + "mc2_stderr": 0.014896695453770783 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22195985832349469, + "acc_stderr": 0.014287394616821165, + "acc_norm": 0.3400236127508855, + "acc_norm_stderr": 0.016286717220737677 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/kor-llama2-13b-dpo", + "model_sha": "f64d753da71bac879d5e234e2cd1389b093c05ca", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/llama2-12.8b_lora-dpo_v1/result_2024-01-17 02:23:13.json b/etri-xainlp/llama2-12.8b_lora-dpo_v1/result_2024-01-17 02:23:13.json new file mode 100644 index 0000000000000000000000000000000000000000..b3a0ace37cfa6ae0bcedef08d0901d7e1147ca1a --- /dev/null +++ b/etri-xainlp/llama2-12.8b_lora-dpo_v1/result_2024-01-17 02:23:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3532423208191126, + "acc_stderr": 0.013967822714840053, + "acc_norm": 0.39761092150170646, + "acc_norm_stderr": 0.014301752223279533 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3646683927504481, + "acc_stderr": 0.004803533333364227, + "acc_norm": 0.47460665206134234, + "acc_norm_stderr": 0.004983342213776255 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48531289910600256, + "acc_stderr": 0.017872248024429115, + "acc_norm": 0.48531289910600256, + "acc_norm_stderr": 0.017872248024429115 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.41479099678456594, + "acc_stderr": 0.027982680459759553, + "acc_norm": 0.41479099678456594, + "acc_norm_stderr": 0.027982680459759553 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4595959595959596, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.4595959595959596, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.040434618619167466, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.040434618619167466 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36134453781512604, + "acc_stderr": 0.031204691225150016, + "acc_norm": 0.36134453781512604, + "acc_norm_stderr": 0.031204691225150016 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.02475600038213094, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.02475600038213094 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.02790615082604114, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.02790615082604114 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.03053333843046751, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.03053333843046751 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.02620276653465215, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.02620276653465215 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.023000086859068646, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.023000086859068646 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.026772990653361826, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.026772990653361826 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4104938271604938, + "acc_stderr": 0.027371350925124768, + "acc_norm": 0.4104938271604938, + "acc_norm_stderr": 0.027371350925124768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.0358701498607566, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.0358701498607566 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44770642201834865, + "acc_stderr": 0.021319754962425462, + "acc_norm": 0.44770642201834865, + "acc_norm_stderr": 0.021319754962425462 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.028036092273891776, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.028036092273891776 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.018999707383162666, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.018999707383162666 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467761, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467761 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353604, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353604 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.030932858792789855, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.030932858792789855 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4388185654008439, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.4388185654008439, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28748370273794005, + "acc_stderr": 0.011559337355708502, + "acc_norm": 0.28748370273794005, + "acc_norm_stderr": 0.011559337355708502 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.033744993563193555, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.033744993563193555 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834562, + "mc2": 0.4240028394155252, + "mc2_stderr": 0.015261226819032392 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44037780401416765, + "acc_stderr": 0.017067699774312987, + "acc_norm": 0.5336481700118064, + "acc_norm_stderr": 0.017151384117131862 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/llama2-12.8b_lora-dpo_v1", + "model_sha": "4d96e9636e7048735ca610056a493ee92474a65a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/llama2-13b-dpo-test/result_2023-12-18 08:27:20.json b/etri-xainlp/llama2-13b-dpo-test/result_2023-12-18 08:27:20.json new file mode 100644 index 0000000000000000000000000000000000000000..7588deb475eca5005260012ee4ac068e1eb9f428 --- /dev/null +++ b/etri-xainlp/llama2-13b-dpo-test/result_2023-12-18 08:27:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3395904436860068, + "acc_stderr": 0.01383903976282016, + "acc_norm": 0.39505119453924914, + "acc_norm_stderr": 0.014285898292938177 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3646683927504481, + "acc_stderr": 0.004803533333364228, + "acc_norm": 0.46932881896036643, + "acc_norm_stderr": 0.004980384575535392 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.017867695938429778, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.017867695938429778 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288087, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288087 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.03561625488673745, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.03561625488673745 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3739495798319328, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.3739495798319328, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.024537591572830524, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.024537591572830524 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5897435897435898, + "acc_stderr": 0.03222414045241107, + "acc_norm": 0.5897435897435898, + "acc_norm_stderr": 0.03222414045241107 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389174, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389174 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267439, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267439 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.02345603738398203, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.02345603738398203 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.026882643434022885, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.026882643434022885 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4104938271604938, + "acc_stderr": 0.027371350925124764, + "acc_norm": 0.4104938271604938, + "acc_norm_stderr": 0.027371350925124764 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.036072280610477486, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.036072280610477486 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43119266055045874, + "acc_stderr": 0.021233365030319563, + "acc_norm": 0.43119266055045874, + "acc_norm_stderr": 0.021233365030319563 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147125, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147125 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874142, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874142 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.315359477124183, + "acc_stderr": 0.018798086284886887, + "acc_norm": 0.315359477124183, + "acc_norm_stderr": 0.018798086284886887 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.02657786094330786, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.02657786094330786 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.02806499816704009, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.02806499816704009 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4050632911392405, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.4050632911392405, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.01175993961808546, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.01175993961808546 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.034107853389047184, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.034107853389047184 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.3995235998430406, + "mc2_stderr": 0.01495528755082175 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.017177301992342558, + "acc_norm": 0.5407319952774499, + "acc_norm_stderr": 0.017133218276537673 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/llama2-13b-dpo-test", + "model_sha": "de694283acd14414b7309a9874a3d033926b22a5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/llama2-13b-lima-sft-dpo/result_2024-02-26 03:23:30.json b/etri-xainlp/llama2-13b-lima-sft-dpo/result_2024-02-26 03:23:30.json new file mode 100644 index 0000000000000000000000000000000000000000..f42bc681b7b39f79b937432b583ad4e3166a40e2 --- /dev/null +++ b/etri-xainlp/llama2-13b-lima-sft-dpo/result_2024-02-26 03:23:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.01421244498065189, + "acc_norm": 0.4325938566552901, + "acc_norm_stderr": 0.01447800569418253 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4051981676956781, + "acc_stderr": 0.004899270310557995, + "acc_norm": 0.5107548297151961, + "acc_norm_stderr": 0.0049886269781730915 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370608, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370608 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5057471264367817, + "acc_stderr": 0.017878782326129227, + "acc_norm": 0.5057471264367817, + "acc_norm_stderr": 0.017878782326129227 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.031410821975962386, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.031410821975962386 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793254, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793254 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.0302850092590098, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.0302850092590098 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.03468343295111126, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.03468343295111126 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655816, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655816 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334384, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334384 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607715, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607715 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.03602573571288442, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.03602573571288442 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.037752050135836386, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.037752050135836386 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5137614678899083, + "acc_stderr": 0.02142920208987408, + "acc_norm": 0.5137614678899083, + "acc_norm_stderr": 0.02142920208987408 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749255, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.02861462475280544, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.02861462475280544 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34477124183006536, + "acc_stderr": 0.019228322018696647, + "acc_norm": 0.34477124183006536, + "acc_norm_stderr": 0.019228322018696647 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3139664804469274, + "acc_stderr": 0.015521923933523644, + "acc_norm": 0.3139664804469274, + "acc_norm_stderr": 0.015521923933523644 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483927, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483927 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.011977676704715999, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.011977676704715999 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133022, + "mc2": 0.4513551866701914, + "mc2_stderr": 0.01635798343677223 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36717827626918537, + "acc_stderr": 0.016572727807458592, + "acc_norm": 0.3837072018890201, + "acc_norm_stderr": 0.016718924637231822 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/llama2-13b-lima-sft-dpo", + "model_sha": "c07120cc874d98a95899dfb03806dc87634e9268", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/llama2-13b-sft-dpo/result_2024-01-30 01:09:26.json b/etri-xainlp/llama2-13b-sft-dpo/result_2024-01-30 01:09:26.json new file mode 100644 index 0000000000000000000000000000000000000000..c07e487b66d05aab6078d5af2b62807929a4ef30 --- /dev/null +++ b/etri-xainlp/llama2-13b-sft-dpo/result_2024-01-30 01:09:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41552901023890787, + "acc_stderr": 0.014401366641216384, + "acc_norm": 0.4539249146757679, + "acc_norm_stderr": 0.014549221105171867 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42471619199362676, + "acc_stderr": 0.004932896472460568, + "acc_norm": 0.5493925512846046, + "acc_norm_stderr": 0.004965375341643131 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5031928480204342, + "acc_stderr": 0.017879598945933085, + "acc_norm": 0.5031928480204342, + "acc_norm_stderr": 0.017879598945933085 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.032025630761017346, + "acc_norm": 0.4, + "acc_norm_stderr": 0.032025630761017346 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.035476014940069384, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.035476014940069384 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028337, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028337 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983693, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983693 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253252, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253252 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.02413015829976262, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.02413015829976262 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303128, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303128 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138293, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.01964380155792481, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.01964380155792481 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.027374128882631157, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.027374128882631157 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225612, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225612 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.02928941340940319, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.02928941340940319 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.01218777337074152, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.01218777337074152 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768545, + "mc2": 0.44977419920178113, + "mc2_stderr": 0.015650664629093206 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4734356552538371, + "acc_stderr": 0.017166075717577747, + "acc_norm": 0.5312868949232585, + "acc_norm_stderr": 0.017156666859785466 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/llama2-13b-sft-dpo", + "model_sha": "7d67d9e20e49e71a8a69fdd07f9be93a5856b841", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/llama2-ko-13b-instruct-v1.1/result_2023-11-26 05:24:47.json b/etri-xainlp/llama2-ko-13b-instruct-v1.1/result_2023-11-26 05:24:47.json new file mode 100644 index 0000000000000000000000000000000000000000..87760816a80b067460f9c60aff07df6584b2d2df --- /dev/null +++ b/etri-xainlp/llama2-ko-13b-instruct-v1.1/result_2023-11-26 05:24:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.013888816286782112, + "acc_norm": 0.39761092150170646, + "acc_norm_stderr": 0.014301752223279531 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3676558454491137, + "acc_stderr": 0.004811815959388828, + "acc_norm": 0.46574387572196774, + "acc_norm_stderr": 0.004978056798794866 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4661558109833972, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.4661558109833972, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.039446241625011175, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.039446241625011175 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745647, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745647 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824665, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824665 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40836012861736337, + "acc_stderr": 0.027917050748484627, + "acc_norm": 0.40836012861736337, + "acc_norm_stderr": 0.027917050748484627 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459157, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459157 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.43434343434343436, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.43434343434343436, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.037245636197746325, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.037245636197746325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.02737987122994324, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.02737987122994324 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.594017094017094, + "acc_stderr": 0.03217180182641087, + "acc_norm": 0.594017094017094, + "acc_norm_stderr": 0.03217180182641087 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33584905660377357, + "acc_stderr": 0.029067220146644823, + "acc_norm": 0.33584905660377357, + "acc_norm_stderr": 0.029067220146644823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165581, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165581 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.02357760479165581, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.02357760479165581 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.38439306358381503, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.38439306358381503, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.02733954664066273, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.02733954664066273 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46972477064220186, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.46972477064220186, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.027582811415159596, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.027582811415159596 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.01866335967146367, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.01866335967146367 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.0316746870682898, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.0316746870682898 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681407, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681407 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3673469387755102, + "acc_stderr": 0.030862144921087565, + "acc_norm": 0.3673469387755102, + "acc_norm_stderr": 0.030862144921087565 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.0325446201076786, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.0325446201076786 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741523, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741523 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.0346022832723917, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.0346022832723917 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557956, + "mc2": 0.4195624279144106, + "mc2_stderr": 0.015536654449711767 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3541912632821724, + "acc_stderr": 0.016443175749214757, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.016819438642971404 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/llama2-ko-13b-instruct-v1.1", + "model_sha": "159f0e387cfaff0e87278a95af4803ae9dd8b718", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/llama2-ko-13b-instruct-v1.2/result_2023-11-28 02:12:30.json b/etri-xainlp/llama2-ko-13b-instruct-v1.2/result_2023-11-28 02:12:30.json new file mode 100644 index 0000000000000000000000000000000000000000..8ea95a96c8ed3c9bbd1da10d68a82f3ea7c2c8f2 --- /dev/null +++ b/etri-xainlp/llama2-ko-13b-instruct-v1.2/result_2023-11-28 02:12:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3378839590443686, + "acc_stderr": 0.013822047922283509, + "acc_norm": 0.37542662116040953, + "acc_norm_stderr": 0.01415063143511173 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36317466640111534, + "acc_stderr": 0.004799317209902019, + "acc_norm": 0.45867357100179246, + "acc_norm_stderr": 0.004972708369656541 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.34502923976608185, + "acc_stderr": 0.03645981377388806, + "acc_norm": 0.34502923976608185, + "acc_norm_stderr": 0.03645981377388806 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47126436781609193, + "acc_stderr": 0.017850410794380166, + "acc_norm": 0.47126436781609193, + "acc_norm_stderr": 0.017850410794380166 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.0350729543137052, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.0350729543137052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3890675241157556, + "acc_stderr": 0.027690337536485376, + "acc_norm": 0.3890675241157556, + "acc_norm_stderr": 0.027690337536485376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467766, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467766 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.41414141414141414, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.41414141414141414, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.0394170763206489, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.0394170763206489 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.031918633744784645, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.031918633744784645 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.024756000382130945, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.024756000382130945 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.032826493853041504, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.032826493853041504 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.027379871229943238, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.027379871229943238 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5, + "acc_stderr": 0.03275608910402091, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03275608910402091 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.029445175328199586, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.029445175328199586 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073824, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073824 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736411, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736411 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3439306358381503, + "acc_stderr": 0.025574123786546672, + "acc_norm": 0.3439306358381503, + "acc_norm_stderr": 0.025574123786546672 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.038020681028996146, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.038020681028996146 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.02723741509459247, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.02723741509459247 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43853211009174314, + "acc_stderr": 0.02127471307395458, + "acc_norm": 0.43853211009174314, + "acc_norm_stderr": 0.02127471307395458 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824096, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824096 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4793388429752066, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.4793388429752066, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.01897542792050721, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.01897542792050721 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.032259413526312945, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.032259413526312945 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966339, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966339 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.02725720260611495, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.02725720260611495 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.031512360446742806, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.031512360446742806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48523206751054854, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.48523206751054854, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30964797913950454, + "acc_stderr": 0.011808598262503316, + "acc_norm": 0.30964797913950454, + "acc_norm_stderr": 0.011808598262503316 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.39420490667593977, + "mc2_stderr": 0.015249702539058304 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.35537190082644626, + "acc_stderr": 0.01645549600031452, + "acc_norm": 0.3789846517119244, + "acc_norm_stderr": 0.01667926068422929 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/llama2-ko-13b-instruct-v1.2", + "model_sha": "3f79d4ea5fd24ad29521814ce0f8462a9f6828dd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/llama2-ko-13b-instruct-v1/result_2023-10-30 03:35:54.json b/etri-xainlp/llama2-ko-13b-instruct-v1/result_2023-10-30 03:35:54.json new file mode 100644 index 0000000000000000000000000000000000000000..b882201a850ce514c6036117935eee568a30ed9c --- /dev/null +++ b/etri-xainlp/llama2-ko-13b-instruct-v1/result_2023-10-30 03:35:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4087030716723549, + "acc_stderr": 0.014365750345427006, + "acc_norm": 0.4445392491467577, + "acc_norm_stderr": 0.014521226405627072 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42053375821549493, + "acc_stderr": 0.00492635856449457, + "acc_norm": 0.5438159729137622, + "acc_norm_stderr": 0.004970585328297623 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5031928480204342, + "acc_stderr": 0.017879598945933085, + "acc_norm": 0.5031928480204342, + "acc_norm_stderr": 0.017879598945933085 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424004, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.035476014940069384, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.035476014940069384 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764187, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764187 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836928, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.030351527323344944, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.030351527323344944 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.02413015829976262, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.02413015829976262 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.026907849856282532, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.026907849856282532 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5247706422018349, + "acc_stderr": 0.021410999753635914, + "acc_norm": 0.5247706422018349, + "acc_norm_stderr": 0.021410999753635914 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225875, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225875 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.01965992249362335, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.01965992249362335 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952683, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952683 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553974, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553974 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.02928941340940319, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.02928941340940319 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741518, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741518 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.01574402724825605, + "mc2": 0.44031892549959717, + "mc2_stderr": 0.015641862520853814 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.017175671279836446, + "acc_norm": 0.5324675324675324, + "acc_norm_stderr": 0.017154073716682868 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/llama2-ko-13b-instruct-v1", + "model_sha": "79d4bd9490cf7cc0015f950aeed3e5798c662ea2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/llama2-ko-13b-instruct/result_2023-10-06 10:52:22.json b/etri-xainlp/llama2-ko-13b-instruct/result_2023-10-06 10:52:22.json new file mode 100644 index 0000000000000000000000000000000000000000..8952d6dbabbb4513b503253312c9a9dd4467b4f1 --- /dev/null +++ b/etri-xainlp/llama2-ko-13b-instruct/result_2023-10-06 10:52:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4087030716723549, + "acc_stderr": 0.014365750345427006, + "acc_norm": 0.44795221843003413, + "acc_norm_stderr": 0.01453201149821167 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4176458872734515, + "acc_stderr": 0.00492163264510238, + "acc_norm": 0.5456084445329615, + "acc_norm_stderr": 0.004968979259738337 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47381864623243936, + "acc_stderr": 0.017855434554041982, + "acc_norm": 0.47381864623243936, + "acc_norm_stderr": 0.017855434554041982 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0314108219759624, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0314108219759624 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.02836504154256457, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.02836504154256457 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461227, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461227 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051448, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051448 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761005, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761005 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.02413015829976262, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.02413015829976262 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43641618497109824, + "acc_stderr": 0.026700545424943684, + "acc_norm": 0.43641618497109824, + "acc_norm_stderr": 0.026700545424943684 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4012345679012346, + "acc_stderr": 0.027272582849839796, + "acc_norm": 0.4012345679012346, + "acc_norm_stderr": 0.027272582849839796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.036080032255696545, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.036080032255696545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.01948802574552967, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.01948802574552967 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190714, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125474, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125474 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.03197694118713672, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.03197694118713672 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.459915611814346, + "acc_stderr": 0.03244246810187914, + "acc_norm": 0.459915611814346, + "acc_norm_stderr": 0.03244246810187914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3135593220338983, + "acc_stderr": 0.011849234291459315, + "acc_norm": 0.3135593220338983, + "acc_norm_stderr": 0.011849234291459315 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024623, + "mc2": 0.4417936176466885, + "mc2_stderr": 0.015776414620892073 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4085005903187721, + "acc_stderr": 0.01690006287942712, + "acc_norm": 0.4592680047225502, + "acc_norm_stderr": 0.01713321827653767 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/llama2-ko-13b-instruct", + "model_sha": "5be30496ddc86d18eff1df9aab04e5c246fb2d86", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/llama3-8b-dpo_v1/result_2024-05-07 01:10:16.json b/etri-xainlp/llama3-8b-dpo_v1/result_2024-05-07 01:10:16.json new file mode 100644 index 0000000000000000000000000000000000000000..dcbe65941f5d0f5a51fce448dd0278b702972b52 --- /dev/null +++ b/etri-xainlp/llama3-8b-dpo_v1/result_2024-05-07 01:10:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3822525597269625, + "acc_stderr": 0.014200454049979279, + "acc_norm": 0.44283276450511944, + "acc_norm_stderr": 0.01451557387334891 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3948416650069707, + "acc_stderr": 0.004878176541703569, + "acc_norm": 0.5364469229237204, + "acc_norm_stderr": 0.004976507121076271 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6432748538011696, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.6432748538011696, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.04656147110012351, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.04656147110012351 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.01777922523339422, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.01777922523339422 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.028125340983972708, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.028125340983972708 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6414141414141414, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.6414141414141414, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5798319327731093, + "acc_stderr": 0.032061837832361516, + "acc_norm": 0.5798319327731093, + "acc_norm_stderr": 0.032061837832361516 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.025323990861736253, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.025323990861736253 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5516129032258065, + "acc_stderr": 0.028292056830112728, + "acc_norm": 0.5516129032258065, + "acc_norm_stderr": 0.028292056830112728 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.027236013946196673, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.027236013946196673 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41005291005291006, + "acc_stderr": 0.025331202438944427, + "acc_norm": 0.41005291005291006, + "acc_norm_stderr": 0.025331202438944427 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.04177578950739993, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.04177578950739993 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5432098765432098, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.5432098765432098, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6528497409326425, + "acc_stderr": 0.03435696168361355, + "acc_norm": 0.6528497409326425, + "acc_norm_stderr": 0.03435696168361355 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070435, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070435 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5853211009174312, + "acc_stderr": 0.021122903208602592, + "acc_norm": 0.5853211009174312, + "acc_norm_stderr": 0.021122903208602592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5359477124183006, + "acc_stderr": 0.028555827516528777, + "acc_norm": 0.5359477124183006, + "acc_norm_stderr": 0.028555827516528777 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874144, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874144 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.020071257886886525, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.020071257886886525 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.35083798882681566, + "acc_stderr": 0.015961036675230963, + "acc_norm": 0.35083798882681566, + "acc_norm_stderr": 0.015961036675230963 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6624472573839663, + "acc_stderr": 0.030781549102026216, + "acc_norm": 0.6624472573839663, + "acc_norm_stderr": 0.030781549102026216 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3494132985658409, + "acc_stderr": 0.012177306252786683, + "acc_norm": 0.3494132985658409, + "acc_norm_stderr": 0.012177306252786683 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5931372549019608, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.5931372549019608, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.037131580674819135, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.037131580674819135 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.4210397526293543, + "mc2_stderr": 0.015019234928723788 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43919716646989376, + "acc_stderr": 0.017062775744780705, + "acc_norm": 0.5348288075560803, + "acc_norm_stderr": 0.017148598015747422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/llama3-8b-dpo_v1", + "model_sha": "f815a104d94943c536cd39e112dbc0c0632efedd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/etri-xainlp/polyglot-ko-12.8b-instruct/result_2023-10-05 00:56:42.json b/etri-xainlp/polyglot-ko-12.8b-instruct/result_2023-10-05 00:56:42.json new file mode 100644 index 0000000000000000000000000000000000000000..03f20af68dcbe26a6b13c81010a7bd34e33eaa20 --- /dev/null +++ b/etri-xainlp/polyglot-ko-12.8b-instruct/result_2023-10-05 00:56:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31143344709897613, + "acc_stderr": 0.013532472099850947, + "acc_norm": 0.3464163822525597, + "acc_norm_stderr": 0.013905011180063247 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4026090420235013, + "acc_stderr": 0.0048942100113032105, + "acc_norm": 0.5198167695678152, + "acc_norm_stderr": 0.004985860853427639 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457922, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457922 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.040580420156460344, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.040580420156460344 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2822477650063857, + "acc_stderr": 0.01609530296987857, + "acc_norm": 0.2822477650063857, + "acc_norm_stderr": 0.01609530296987857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03820169914517905, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03820169914517905 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.02937917046412483, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.02937917046412483 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2289156626506024, + "acc_stderr": 0.03270745277352477, + "acc_norm": 0.2289156626506024, + "acc_norm_stderr": 0.03270745277352477 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.21864951768488747, + "acc_stderr": 0.023475581417861113, + "acc_norm": 0.21864951768488747, + "acc_norm_stderr": 0.023475581417861113 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.26905829596412556, + "acc_stderr": 0.02976377940687498, + "acc_norm": 0.26905829596412556, + "acc_norm_stderr": 0.02976377940687498 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378947, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378947 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868956, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23846153846153847, + "acc_stderr": 0.021606294494647727, + "acc_norm": 0.23846153846153847, + "acc_norm_stderr": 0.021606294494647727 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094632, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094632 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.029678333141444465, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.029678333141444465 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.29914529914529914, + "acc_stderr": 0.029996951858349476, + "acc_norm": 0.29914529914529914, + "acc_norm_stderr": 0.029996951858349476 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.02761116340239972, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.02761116340239972 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.041723430387053825, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.041723430387053825 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275794, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275794 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804723, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804723 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.02970528405677244, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.02970528405677244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3092485549132948, + "acc_stderr": 0.02488314057007176, + "acc_norm": 0.3092485549132948, + "acc_norm_stderr": 0.02488314057007176 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.025251173936495026, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.025251173936495026 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.02977866303775296, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.02977866303775296 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.038351539543994194, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.038351539543994194 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.20917431192660552, + "acc_stderr": 0.01743793717334323, + "acc_norm": 0.20917431192660552, + "acc_norm_stderr": 0.01743793717334323 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.040261875275912046, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.040261875275912046 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013317, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013317 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.017917974069594726, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.017917974069594726 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23049645390070922, + "acc_stderr": 0.025123739226872416, + "acc_norm": 0.23049645390070922, + "acc_norm_stderr": 0.025123739226872416 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03246887243637649, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03246887243637649 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.02736586113151381, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.02736586113151381 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.025409301953225678, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.025409301953225678 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.22362869198312235, + "acc_stderr": 0.027123298205229972, + "acc_norm": 0.22362869198312235, + "acc_norm_stderr": 0.027123298205229972 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26401564537157757, + "acc_stderr": 0.011258435537723812, + "acc_norm": 0.26401564537157757, + "acc_norm_stderr": 0.011258435537723812 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083291, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083291 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885416, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156472, + "mc2": 0.4202272328082401, + "mc2_stderr": 0.016142378134497877 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30342384887839435, + "acc_stderr": 0.01580607271790957, + "acc_norm": 0.3447461629279811, + "acc_norm_stderr": 0.01634064990541869 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "etri-xainlp/polyglot-ko-12.8b-instruct", + "model_sha": "ec0113994052a77ef4741cf14d7a9af887b2e1d5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/failspy/Codestral-22B-v0.1-abliterated-v3/result_2024-07-02 15:29:32.json b/failspy/Codestral-22B-v0.1-abliterated-v3/result_2024-07-02 15:29:32.json new file mode 100644 index 0000000000000000000000000000000000000000..b465017e46626edb815fe350ec51b24025729be8 --- /dev/null +++ b/failspy/Codestral-22B-v0.1-abliterated-v3/result_2024-07-02 15:29:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43686006825938567, + "acc_stderr": 0.014494421584256534, + "acc_norm": 0.4991467576791809, + "acc_norm_stderr": 0.014611369529813276 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43905596494722166, + "acc_stderr": 0.004952576863315226, + "acc_norm": 0.58743278231428, + "acc_norm_stderr": 0.004912900450370837 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7281553398058253, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.7281553398058253, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45977011494252873, + "acc_stderr": 0.017821994096933535, + "acc_norm": 0.45977011494252873, + "acc_norm_stderr": 0.017821994096933535 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255099, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255099 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.35172413793103446, + "acc_stderr": 0.0397923663749741, + "acc_norm": 0.35172413793103446, + "acc_norm_stderr": 0.0397923663749741 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.02533466708095495, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.02533466708095495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162933, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162933 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849738, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849738 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.02860595370200424, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.02860595370200424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524586, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524586 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.025424835086924006, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.025424835086924006 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.026613350840261733, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.026613350840261733 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.03919415545048411, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.03919415545048411 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470867, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470867 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.581651376146789, + "acc_stderr": 0.021149548596443885, + "acc_norm": 0.581651376146789, + "acc_norm_stderr": 0.021149548596443885 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.04463112720677171, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.04463112720677171 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169924, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169924 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5509259259259259, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.5509259259259259, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.01485499393801009, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.01485499393801009 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483927, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483927 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6122448979591837, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.6122448979591837, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.03195514741370671, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.03195514741370671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34810951760104303, + "acc_stderr": 0.012166738993698197, + "acc_norm": 0.34810951760104303, + "acc_norm_stderr": 0.012166738993698197 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606787, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606787 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.386780905752754, + "mc1_stderr": 0.01704885701051511, + "mc2": 0.5532242331163536, + "mc2_stderr": 0.015822049216251792 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6387249114521841, + "acc_stderr": 0.01651546302241203, + "acc_norm": 0.6469893742621016, + "acc_norm_stderr": 0.016430745982427157 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "failspy/Codestral-22B-v0.1-abliterated-v3", + "model_sha": "ce64ab19702acc673d6369f7bb018b2b1407e3c2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/failspy/Meta-Llama-3-8B-Instruct-abliterated-v3/result_2024-05-28 05:33:42.json b/failspy/Meta-Llama-3-8B-Instruct-abliterated-v3/result_2024-05-28 05:33:42.json new file mode 100644 index 0000000000000000000000000000000000000000..69750ded6c07810726dcd99b8a7d4086eaf9946f --- /dev/null +++ b/failspy/Meta-Llama-3-8B-Instruct-abliterated-v3/result_2024-05-28 05:33:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3677474402730375, + "acc_stderr": 0.014090995618168482, + "acc_norm": 0.4325938566552901, + "acc_norm_stderr": 0.014478005694182533 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3575980880302729, + "acc_stderr": 0.004783133725599501, + "acc_norm": 0.46255725951005777, + "acc_norm_stderr": 0.004975770805464643 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.038110796698335316, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.038110796698335316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4521072796934866, + "acc_stderr": 0.017797751493865623, + "acc_norm": 0.4521072796934866, + "acc_norm_stderr": 0.017797751493865623 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.032662042990646775, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.032662042990646775 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5793103448275863, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.5793103448275863, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929778, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.03228410626716391, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.03228410626716391 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4729064039408867, + "acc_stderr": 0.03512819077876106, + "acc_norm": 0.4729064039408867, + "acc_norm_stderr": 0.03512819077876106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5064516129032258, + "acc_stderr": 0.028441638233540505, + "acc_norm": 0.5064516129032258, + "acc_norm_stderr": 0.028441638233540505 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.028120966503914394, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.028120966503914394 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.029318203645206865, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.029318203645206865 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.03983798306659809, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.03983798306659809 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520203, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520203 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303118, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303118 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138936, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138936 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.03587014986075658, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.03587014986075658 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5798165137614679, + "acc_stderr": 0.0211624200482735, + "acc_norm": 0.5798165137614679, + "acc_norm_stderr": 0.0211624200482735 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.040675331363091746, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.040675331363091746 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02010258389588718, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02010258389588718 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.0338517797604481, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.0338517797604481 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3240223463687151, + "acc_stderr": 0.01565254249642113, + "acc_norm": 0.3240223463687151, + "acc_norm_stderr": 0.01565254249642113 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.03093285879278986, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.03093285879278986 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36310299869621904, + "acc_stderr": 0.012282264406018765, + "acc_norm": 0.36310299869621904, + "acc_norm_stderr": 0.012282264406018765 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.03495624522015476, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.03495624522015476 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03756335775187896, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03756335775187896 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30966952264381886, + "mc1_stderr": 0.0161857443551449, + "mc2": 0.4868072819342882, + "mc2_stderr": 0.015837279372252558 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4510035419126328, + "acc_stderr": 0.01710761885954935, + "acc_norm": 0.4982290436835891, + "acc_norm_stderr": 0.017190246276231867 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "failspy/Meta-Llama-3-8B-Instruct-abliterated-v3", + "model_sha": "85a25be002841fe738a5267b6806473f36f86715", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/failspy/Phi-3-medium-4k-instruct-abliterated-v3/result_2024-07-29 22:35:04.json b/failspy/Phi-3-medium-4k-instruct-abliterated-v3/result_2024-07-29 22:35:04.json new file mode 100644 index 0000000000000000000000000000000000000000..e4b67c1507c5c371d4143e157e41e8d21991038f --- /dev/null +++ b/failspy/Phi-3-medium-4k-instruct-abliterated-v3/result_2024-07-29 22:35:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33276450511945393, + "acc_stderr": 0.013769863046192305, + "acc_norm": 0.363481228668942, + "acc_norm_stderr": 0.014056207319068285 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3225453096992631, + "acc_stderr": 0.004664950168300709, + "acc_norm": 0.39693288189603665, + "acc_norm_stderr": 0.0048826194841666 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4355044699872286, + "acc_stderr": 0.017730589927926612, + "acc_norm": 0.4355044699872286, + "acc_norm_stderr": 0.017730589927926612 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595852, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595852 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.032600385118357715, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.032600385118357715 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4115755627009646, + "acc_stderr": 0.027950481494401255, + "acc_norm": 0.4115755627009646, + "acc_norm_stderr": 0.027950481494401255 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042335, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042335 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286102, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286102 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.030039842454069283, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.030039842454069283 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.037336266553835096, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.037336266553835096 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5873015873015873, + "acc_stderr": 0.02535574126305527, + "acc_norm": 0.5873015873015873, + "acc_norm_stderr": 0.02535574126305527 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3959537572254335, + "acc_stderr": 0.026329813341946243, + "acc_norm": 0.3959537572254335, + "acc_norm_stderr": 0.026329813341946243 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.03889066619112723, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.03889066619112723 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.035780381650085846, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.035780381650085846 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46972477064220186, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.46972477064220186, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.49206349206349204, + "acc_stderr": 0.044715725362943486, + "acc_norm": 0.49206349206349204, + "acc_norm_stderr": 0.044715725362943486 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490435, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490435 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.01945076843250551, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.01945076843250551 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.02904919034254345, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.02904919034254345 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802748, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2849162011173184, + "acc_stderr": 0.015096222302469799, + "acc_norm": 0.2849162011173184, + "acc_norm_stderr": 0.015096222302469799 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.027971541370170598, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.027971541370170598 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.03200682020163909, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.03200682020163909 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4936708860759494, + "acc_stderr": 0.03254462010767859, + "acc_norm": 0.4936708860759494, + "acc_norm_stderr": 0.03254462010767859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32333767926988266, + "acc_stderr": 0.011946565758447204, + "acc_norm": 0.32333767926988266, + "acc_norm_stderr": 0.011946565758447204 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.0346022832723917, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.0346022832723917 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.21052631578947367, + "mc1_stderr": 0.0142717406459642, + "mc2": 0.35458851632243527, + "mc2_stderr": 0.015230521359958909 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31286894923258557, + "acc_stderr": 0.01594101011830266, + "acc_norm": 0.3530106257378985, + "acc_norm_stderr": 0.016430745982427126 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "failspy/Phi-3-medium-4k-instruct-abliterated-v3", + "model_sha": "959b09eacf6cae85a8eb21b25e998addc89a367b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/fearlessdots/WizardLM-2-7B-abliterated/result_2024-05-25 13:37:53.json b/fearlessdots/WizardLM-2-7B-abliterated/result_2024-05-25 13:37:53.json new file mode 100644 index 0000000000000000000000000000000000000000..1d3909cef0182b931e8cf4ba0e5c7113b8f3e058 --- /dev/null +++ b/fearlessdots/WizardLM-2-7B-abliterated/result_2024-05-25 13:37:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35494880546075086, + "acc_stderr": 0.013983036904094095, + "acc_norm": 0.4112627986348123, + "acc_norm_stderr": 0.014379441068522082 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38259310894244175, + "acc_stderr": 0.004850268986903353, + "acc_norm": 0.4810794662417845, + "acc_norm_stderr": 0.004986207581862929 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3946360153256705, + "acc_stderr": 0.017478464305911542, + "acc_norm": 0.3946360153256705, + "acc_norm_stderr": 0.017478464305911542 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.030363582197238167, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.030363582197238167 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.02832032583010591, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.02832032583010591 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.032473902765696686, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.032473902765696686 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.02512465352588513, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.02512465352588513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280458, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280458 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.028156036538233217, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.028156036538233217 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149138, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.02689704999638286, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.02689704999638286 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.027563010971606683, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.027563010971606683 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680814, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680814 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.01943177567703731, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.01943177567703731 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.02866382014719949, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.02866382014719949 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.014676252009319482, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.014676252009319482 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29044117647058826, + "acc_stderr": 0.0275764686227405, + "acc_norm": 0.29044117647058826, + "acc_norm_stderr": 0.0275764686227405 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614193, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614193 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.011989936640666546, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.011989936640666546 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03308611113236435, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03308611113236435 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.34761321909424725, + "mc1_stderr": 0.016670769188897306, + "mc2": 0.5461172108179434, + "mc2_stderr": 0.016374028430073344 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40731995277449823, + "acc_stderr": 0.01689245669519127, + "acc_norm": 0.42384887839433294, + "acc_norm_stderr": 0.016989810834628253 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "fearlessdots/WizardLM-2-7B-abliterated", + "model_sha": "c329338a5d8371dd7c1bf965ab54487a1854673f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/fiveflow/ATOMM-v0.1/result_2024-04-09 06:10:01.json b/fiveflow/ATOMM-v0.1/result_2024-04-09 06:10:01.json new file mode 100644 index 0000000000000000000000000000000000000000..947a4ed6016dd7ca4c72c1f66a3f5b92e95db32c --- /dev/null +++ b/fiveflow/ATOMM-v0.1/result_2024-04-09 06:10:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32081911262798635, + "acc_stderr": 0.013640943091946526, + "acc_norm": 0.35580204778157, + "acc_norm_stderr": 0.013990571137918762 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35590519816769567, + "acc_stderr": 0.004778081784542411, + "acc_norm": 0.4561840270862378, + "acc_norm_stderr": 0.0049705853282976204 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.037712831076265434, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.037712831076265434 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4355044699872286, + "acc_stderr": 0.017730589927926605, + "acc_norm": 0.4355044699872286, + "acc_norm_stderr": 0.017730589927926605 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.031068985963122155, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.031068985963122155 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197426, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.02443301646605245, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.02443301646605245 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.0483036602463533, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.0483036602463533 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.027666182075539638, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.027666182075539638 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.023393826500484875, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.023393826500484875 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624555, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624555 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.026680134761679214, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.026680134761679214 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.038470214204560246, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.038470214204560246 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38580246913580246, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.38580246913580246, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42018348623853213, + "acc_stderr": 0.02116242004827352, + "acc_norm": 0.42018348623853213, + "acc_norm_stderr": 0.02116242004827352 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626567, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626567 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775088, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119669, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119669 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.018521756215423024, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.018521756215423024 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101362, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101362 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608043, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3016759776536313, + "acc_stderr": 0.015350767572220286, + "acc_norm": 0.3016759776536313, + "acc_norm_stderr": 0.015350767572220286 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398864, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398864 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.0318421386668758, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.0318421386668758 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2842242503259452, + "acc_stderr": 0.011519880596516074, + "acc_norm": 0.2842242503259452, + "acc_norm_stderr": 0.011519880596516074 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.032282103870378935, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.032282103870378935 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03588624800091708, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03588624800091708 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.46277872435725287, + "mc2_stderr": 0.015366930810987021 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36835891381345925, + "acc_stderr": 0.016583858982639074, + "acc_norm": 0.45218417945690675, + "acc_norm_stderr": 0.017111567130916782 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "fiveflow/ATOMM-v0.1", + "model_sha": "a53e4f6a500767dfb96bd86aeb103bd6957068b2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/fiveflow/ATOMM-v0.18/result_2024-04-17 08:58:55.json b/fiveflow/ATOMM-v0.18/result_2024-04-17 08:58:55.json new file mode 100644 index 0000000000000000000000000000000000000000..2697232a0c1cdba8b742791ffcb17d23fe294dad --- /dev/null +++ b/fiveflow/ATOMM-v0.18/result_2024-04-17 08:58:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.01385583128749772, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.014252959848892893 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37562238597888864, + "acc_stderr": 0.0048329345291207955, + "acc_norm": 0.4881497709619598, + "acc_norm_stderr": 0.004988379805261158 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49680715197956576, + "acc_stderr": 0.017879598945933068, + "acc_norm": 0.49680715197956576, + "acc_norm_stderr": 0.017879598945933068 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596239, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596239 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562786, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562786 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4128205128205128, + "acc_stderr": 0.024962683564331817, + "acc_norm": 0.4128205128205128, + "acc_norm_stderr": 0.024962683564331817 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836925, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836925 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173085, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173085 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.030656748696739428, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.030656748696739428 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616265, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616265 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.02475747390275206, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.02475747390275206 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379414, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379414 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48256880733944957, + "acc_stderr": 0.02142429187185315, + "acc_norm": 0.48256880733944957, + "acc_norm_stderr": 0.02142429187185315 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.02856869975222588, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.02856869975222588 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.01972205893961806, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.01972205893961806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639882, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639882 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.0443280405529152, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.0443280405529152 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602158, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602158 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.015131608849963753, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.015131608849963753 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983572, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983572 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31029986962190353, + "acc_stderr": 0.011815439293469825, + "acc_norm": 0.31029986962190353, + "acc_norm_stderr": 0.011815439293469825 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133033, + "mc2": 0.47764821240544986, + "mc2_stderr": 0.01561688253091318 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4085005903187721, + "acc_stderr": 0.016900062879427115, + "acc_norm": 0.46162927981109797, + "acc_norm_stderr": 0.017139660221845567 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "fiveflow/ATOMM-v0.18", + "model_sha": "7fed121a5d159afdbdbfea49a3c494c3d52829a6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/fiveflow/KoLlama-3-8B-Instruct/result_2024-04-30 09:44:01.json b/fiveflow/KoLlama-3-8B-Instruct/result_2024-04-30 09:44:01.json new file mode 100644 index 0000000000000000000000000000000000000000..b5f8a32f32d26af58723e651bdc96d53adec4a03 --- /dev/null +++ b/fiveflow/KoLlama-3-8B-Instruct/result_2024-04-30 09:44:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35494880546075086, + "acc_stderr": 0.01398303690409409, + "acc_norm": 0.41638225255972694, + "acc_norm_stderr": 0.014405618279436174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3646683927504481, + "acc_stderr": 0.004803533333364227, + "acc_norm": 0.4731129257120096, + "acc_norm_stderr": 0.004982561815214123 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.03786720706234214, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.03786720706234214 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4891443167305236, + "acc_stderr": 0.017875748840242418, + "acc_norm": 0.4891443167305236, + "acc_norm_stderr": 0.017875748840242418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.032600385118357715, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.032600385118357715 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.02839442137098453, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.02839442137098453 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828064, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828064 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.04122737111370332, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.04122737111370332 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006715, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006715 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736125, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736125 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465076, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465076 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.03400598505599015, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.03400598505599015 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334382, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334382 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5154320987654321, + "acc_stderr": 0.027807490044276215, + "acc_norm": 0.5154320987654321, + "acc_norm_stderr": 0.027807490044276215 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5266055045871559, + "acc_stderr": 0.021406952688151577, + "acc_norm": 0.5266055045871559, + "acc_norm_stderr": 0.021406952688151577 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925355, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925355 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2770949720670391, + "acc_stderr": 0.014968772435812142, + "acc_norm": 0.2770949720670391, + "acc_norm_stderr": 0.014968772435812142 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556166, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556166 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.0317229500433233, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.0317229500433233 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35853976531942633, + "acc_stderr": 0.012248487319682744, + "acc_norm": 0.35853976531942633, + "acc_norm_stderr": 0.012248487319682744 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.03495624522015476, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.03495624522015476 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32558139534883723, + "mc1_stderr": 0.016403989469907815, + "mc2": 0.49165229345930406, + "mc2_stderr": 0.01566561229038012 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.017168187201429253, + "acc_norm": 0.51357733175915, + "acc_norm_stderr": 0.01718401506040146 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "fiveflow/KoLlama-3-8B-Instruct", + "model_sha": "65fdc2d56fa64066449254621572dad3b291b17e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/fiveflow/KoSOLAR-10.7B-Instruct-v0.1/result_2024-02-21 06:21:06.json b/fiveflow/KoSOLAR-10.7B-Instruct-v0.1/result_2024-02-21 06:21:06.json new file mode 100644 index 0000000000000000000000000000000000000000..d74e9d266c51f7752f72441e59bd14b47d4b431b --- /dev/null +++ b/fiveflow/KoSOLAR-10.7B-Instruct-v0.1/result_2024-02-21 06:21:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4539249146757679, + "acc_stderr": 0.01454922110517187, + "acc_norm": 0.507679180887372, + "acc_norm_stderr": 0.014609667440892577 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4567815176259709, + "acc_stderr": 0.004971106265046565, + "acc_norm": 0.6223859788886676, + "acc_norm_stderr": 0.004837995637638548 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6602809706257982, + "acc_stderr": 0.016936394114301635, + "acc_norm": 0.6602809706257982, + "acc_norm_stderr": 0.016936394114301635 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5063829787234042, + "acc_stderr": 0.03268335899936335, + "acc_norm": 0.5063829787234042, + "acc_norm_stderr": 0.03268335899936335 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6045016077170418, + "acc_stderr": 0.027770918531427838, + "acc_norm": 0.6045016077170418, + "acc_norm_stderr": 0.027770918531427838 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.04243869242230524, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.04243869242230524 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7070707070707071, + "acc_stderr": 0.032424979581788194, + "acc_norm": 0.7070707070707071, + "acc_norm_stderr": 0.032424979581788194 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6008403361344538, + "acc_stderr": 0.031811100324139245, + "acc_norm": 0.6008403361344538, + "acc_norm_stderr": 0.031811100324139245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.02534267129380724, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.02534267129380724 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5967741935483871, + "acc_stderr": 0.027906150826041146, + "acc_norm": 0.5967741935483871, + "acc_norm_stderr": 0.027906150826041146 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8076923076923077, + "acc_stderr": 0.02581923325648371, + "acc_norm": 0.8076923076923077, + "acc_norm_stderr": 0.02581923325648371 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251976, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251976 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.03220024104534205, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.03220024104534205 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067877, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067877 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6127167630057804, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.6127167630057804, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6172839506172839, + "acc_stderr": 0.027044538138402602, + "acc_norm": 0.6172839506172839, + "acc_norm_stderr": 0.027044538138402602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.03292296639155141, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.03292296639155141 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6605504587155964, + "acc_stderr": 0.020302109342662352, + "acc_norm": 0.6605504587155964, + "acc_norm_stderr": 0.020302109342662352 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.04026097083296563, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.04026097083296563 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5, + "acc_stderr": 0.020227834851568375, + "acc_norm": 0.5, + "acc_norm_stderr": 0.020227834851568375 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41843971631205673, + "acc_stderr": 0.02942799403941999, + "acc_norm": 0.41843971631205673, + "acc_norm_stderr": 0.02942799403941999 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.034076320938540516, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.034076320938540516 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2201117318435754, + "acc_stderr": 0.013856994024227175, + "acc_norm": 0.2201117318435754, + "acc_norm_stderr": 0.013856994024227175 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.030254372573976694, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.030254372573976694 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6571428571428571, + "acc_stderr": 0.030387262919547717, + "acc_norm": 0.6571428571428571, + "acc_norm_stderr": 0.030387262919547717 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3878748370273794, + "acc_stderr": 0.01244499830967562, + "acc_norm": 0.3878748370273794, + "acc_norm_stderr": 0.01244499830967562 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7009803921568627, + "acc_stderr": 0.03213325717373618, + "acc_norm": 0.7009803921568627, + "acc_norm_stderr": 0.03213325717373618 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3072215422276622, + "mc1_stderr": 0.016150201321323002, + "mc2": 0.46640283409910266, + "mc2_stderr": 0.015290498007665954 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5903187721369539, + "acc_stderr": 0.01690756819221948, + "acc_norm": 0.6174734356552538, + "acc_norm_stderr": 0.01670916538722883 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "fiveflow/KoSOLAR-10.7B-Instruct-v0.1", + "model_sha": "62517164e47ec7337a38d864f1450b3808f1624e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/fiveflow/koMistral-v0.1-neftune/result_2023-11-20 14:32:29.json b/fiveflow/koMistral-v0.1-neftune/result_2023-11-20 14:32:29.json new file mode 100644 index 0000000000000000000000000000000000000000..1430ec9a13b6fb3dbffe901f1481d5c66fd9653b --- /dev/null +++ b/fiveflow/koMistral-v0.1-neftune/result_2023-11-20 14:32:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1885665529010239, + "acc_stderr": 0.011430897647675832, + "acc_norm": 0.23208191126279865, + "acc_norm_stderr": 0.012336718284948856 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2704640509858594, + "acc_stderr": 0.004432917403755054, + "acc_norm": 0.28589922326229833, + "acc_norm_stderr": 0.004509181919322858 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3567251461988304, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.3567251461988304, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1650485436893204, + "acc_stderr": 0.03675668832233188, + "acc_norm": 0.1650485436893204, + "acc_norm_stderr": 0.03675668832233188 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26181353767560667, + "acc_stderr": 0.015720838678445273, + "acc_norm": 0.26181353767560667, + "acc_norm_stderr": 0.015720838678445273 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.03633384414073465, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.03633384414073465 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2170212765957447, + "acc_stderr": 0.026947483121496238, + "acc_norm": 0.2170212765957447, + "acc_norm_stderr": 0.026947483121496238 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944967, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944967 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2057877813504823, + "acc_stderr": 0.022961339906764244, + "acc_norm": 0.2057877813504823, + "acc_norm_stderr": 0.022961339906764244 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.03021683101150876, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.03021683101150876 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.026265024608275882, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.026265024608275882 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371383, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371383 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.031089826002937523, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.031089826002937523 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2161290322580645, + "acc_stderr": 0.02341529343356852, + "acc_norm": 0.2161290322580645, + "acc_norm_stderr": 0.02341529343356852 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727756, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727756 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.21851851851851853, + "acc_stderr": 0.025195752251823796, + "acc_norm": 0.21851851851851853, + "acc_norm_stderr": 0.025195752251823796 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.030631145539198816, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.030631145539198816 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21693121693121692, + "acc_stderr": 0.02122708244944504, + "acc_norm": 0.21693121693121692, + "acc_norm_stderr": 0.02122708244944504 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2147239263803681, + "acc_stderr": 0.032262193772867744, + "acc_norm": 0.2147239263803681, + "acc_norm_stderr": 0.032262193772867744 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02313237623454334, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02313237623454334 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19170984455958548, + "acc_stderr": 0.028408953626245258, + "acc_norm": 0.19170984455958548, + "acc_norm_stderr": 0.028408953626245258 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1908256880733945, + "acc_stderr": 0.016847676400091105, + "acc_norm": 0.1908256880733945, + "acc_norm_stderr": 0.016847676400091105 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.038522733649243156, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.038522733649243156 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02380518652488814, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02380518652488814 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516303, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516303 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.03197565821032501, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.03197565821032501 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19117647058823528, + "acc_stderr": 0.023886881922440355, + "acc_norm": 0.19117647058823528, + "acc_norm_stderr": 0.023886881922440355 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.01100597139992723, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.01100597139992723 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087304, + "mc2": 0.46232982151436586, + "mc2_stderr": 0.01648243139543783 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.20425029515938606, + "acc_stderr": 0.013860675878176822, + "acc_norm": 0.4014167650531287, + "acc_norm_stderr": 0.01685290785872906 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "fiveflow/koMistral-v0.1-neftune", + "model_sha": "44a5ba8db203f2982dfcb5c416a45c5b737b6898", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/fiveflow/kolong-llama-v0.1/result_2023-10-10 02:31:12.json b/fiveflow/kolong-llama-v0.1/result_2023-10-10 02:31:12.json new file mode 100644 index 0000000000000000000000000000000000000000..4d721ac9f7d617205d4c42edb7685e398502c733 --- /dev/null +++ b/fiveflow/kolong-llama-v0.1/result_2023-10-10 02:31:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27474402730375425, + "acc_stderr": 0.013044617212771227, + "acc_norm": 0.32081911262798635, + "acc_norm_stderr": 0.013640943091946526 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35660227046405096, + "acc_stderr": 0.0047801698733328435, + "acc_norm": 0.45717984465245964, + "acc_norm_stderr": 0.004971449552787173 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.035650796707083106, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.035650796707083106 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.30268199233716475, + "acc_stderr": 0.016428781581749367, + "acc_norm": 0.30268199233716475, + "acc_norm_stderr": 0.016428781581749367 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939101, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939101 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.028957342788342343, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.028957342788342343 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663925, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663925 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3183279742765273, + "acc_stderr": 0.026457225067811018, + "acc_norm": 0.3183279742765273, + "acc_norm_stderr": 0.026457225067811018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.28699551569506726, + "acc_stderr": 0.030360379710291954, + "acc_norm": 0.28699551569506726, + "acc_norm_stderr": 0.030360379710291954 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30808080808080807, + "acc_stderr": 0.03289477330098614, + "acc_norm": 0.30808080808080807, + "acc_norm_stderr": 0.03289477330098614 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.03664666337225256, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.03664666337225256 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380558, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380558 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.021278393863586282, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.021278393863586282 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243838, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243838 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22167487684729065, + "acc_stderr": 0.029225575892489607, + "acc_norm": 0.22167487684729065, + "acc_norm_stderr": 0.029225575892489607 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.267741935483871, + "acc_stderr": 0.02518900666021238, + "acc_norm": 0.267741935483871, + "acc_norm_stderr": 0.02518900666021238 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.29914529914529914, + "acc_stderr": 0.029996951858349476, + "acc_norm": 0.29914529914529914, + "acc_norm_stderr": 0.029996951858349476 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.028254200344438665, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.028254200344438665 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.044942908662520896, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.044942908662520896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935555, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935555 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.02454761779480383, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.02454761779480383 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.024569223600460845, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.024569223600460845 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.031618779179354094, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.031618779179354094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29541284403669726, + "acc_stderr": 0.019560619182976, + "acc_norm": 0.29541284403669726, + "acc_norm_stderr": 0.019560619182976 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.034550710191021475, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.034550710191021475 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.026716118380156837, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.026716118380156837 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.19834710743801653, + "acc_stderr": 0.03640118271990945, + "acc_norm": 0.19834710743801653, + "acc_norm_stderr": 0.03640118271990945 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.034597776068105365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.034597776068105365 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.018185218954318075, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.018185218954318075 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.029346665094372944, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.029346665094372944 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409163, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409163 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411127, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411127 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.288135593220339, + "acc_stderr": 0.011567140661324561, + "acc_norm": 0.288135593220339, + "acc_norm_stderr": 0.011567140661324561 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.031822318676475544, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.031822318676475544 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.21542227662178703, + "mc1_stderr": 0.01439190265242768, + "mc2": 0.37745653236553117, + "mc2_stderr": 0.015551417113340219 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.17355371900826447, + "acc_stderr": 0.013020842794398262, + "acc_norm": 0.2408500590318772, + "acc_norm_stderr": 0.014701172662583915 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "fiveflow/kolong-llama-v0.1", + "model_sha": "e9ed499df932c04d7d3106603136f469c2f57aaa", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/freewheelin/free-llama3-dpo-v0.2/result_2024-05-09 03:15:33.json b/freewheelin/free-llama3-dpo-v0.2/result_2024-05-09 03:15:33.json new file mode 100644 index 0000000000000000000000000000000000000000..4c68ffe431aab185d67048dfe8416e1afbdb875c --- /dev/null +++ b/freewheelin/free-llama3-dpo-v0.2/result_2024-05-09 03:15:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.014150631435111728, + "acc_norm": 0.44368600682593856, + "acc_norm_stderr": 0.014518421825670445 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3897629954192392, + "acc_stderr": 0.004866997110388194, + "acc_norm": 0.5230033857797252, + "acc_norm_stderr": 0.004984497871025245 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6608187134502924, + "acc_stderr": 0.036310534964889056, + "acc_norm": 0.6608187134502924, + "acc_norm_stderr": 0.036310534964889056 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.04354631077260597, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.04354631077260597 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5708812260536399, + "acc_stderr": 0.017699388483126785, + "acc_norm": 0.5708812260536399, + "acc_norm_stderr": 0.017699388483126785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5434083601286174, + "acc_stderr": 0.028290869054197604, + "acc_norm": 0.5434083601286174, + "acc_norm_stderr": 0.028290869054197604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999998, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999998 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6919191919191919, + "acc_stderr": 0.03289477330098614, + "acc_norm": 0.6919191919191919, + "acc_norm_stderr": 0.03289477330098614 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.025317649726448673, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.025317649726448673 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5387096774193548, + "acc_stderr": 0.028358634859836942, + "acc_norm": 0.5387096774193548, + "acc_norm_stderr": 0.028358634859836942 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417607, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699958, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699958 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5401234567901234, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.5401234567901234, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6476683937823834, + "acc_stderr": 0.03447478286414357, + "acc_norm": 0.6476683937823834, + "acc_norm_stderr": 0.03447478286414357 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6, + "acc_stderr": 0.021004201260420075, + "acc_norm": 0.6, + "acc_norm_stderr": 0.021004201260420075 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.565359477124183, + "acc_stderr": 0.02838425670488304, + "acc_norm": 0.565359477124183, + "acc_norm_stderr": 0.02838425670488304 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249036, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.020087362076702857, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.020087362076702857 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639875, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639875 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219589, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219589 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.03407632093854051, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.03407632093854051 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.288268156424581, + "acc_stderr": 0.01514913286020942, + "acc_norm": 0.288268156424581, + "acc_norm_stderr": 0.01514913286020942 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001663, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001663 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.030486039389105313, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.030486039389105313 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3683181225554107, + "acc_stderr": 0.012319403369564644, + "acc_norm": 0.3683181225554107, + "acc_norm_stderr": 0.012319403369564644 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6078431372549019, + "acc_stderr": 0.03426712349247273, + "acc_norm": 0.6078431372549019, + "acc_norm_stderr": 0.03426712349247273 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165633, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165633 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842881, + "mc2": 0.44585727259274194, + "mc2_stderr": 0.015140086437435859 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46635182998819363, + "acc_stderr": 0.017151384117131865, + "acc_norm": 0.5879574970484062, + "acc_norm_stderr": 0.01692227673852836 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "freewheelin/free-llama3-dpo-v0.2", + "model_sha": "2caf1189046172cce115824313971aee5a429df3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/freewheelin/free-solar-dpo-v0.2/result_2024-03-19 12:35:20.json b/freewheelin/free-solar-dpo-v0.2/result_2024-03-19 12:35:20.json new file mode 100644 index 0000000000000000000000000000000000000000..6bdc9308ee68e69ccf1c13d52d711c75eccb8d95 --- /dev/null +++ b/freewheelin/free-solar-dpo-v0.2/result_2024-03-19 12:35:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6843003412969283, + "acc_stderr": 0.013582571095815293, + "acc_norm": 0.735494880546075, + "acc_norm_stderr": 0.012889272949313366 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4609639514041028, + "acc_stderr": 0.004974551179483938, + "acc_norm": 0.6194981079466242, + "acc_norm_stderr": 0.004845180034271625 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7251461988304093, + "acc_stderr": 0.03424042924691583, + "acc_norm": 0.7251461988304093, + "acc_norm_stderr": 0.03424042924691583 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7184466019417476, + "acc_stderr": 0.04453254836326468, + "acc_norm": 0.7184466019417476, + "acc_norm_stderr": 0.04453254836326468 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7049808429118773, + "acc_stderr": 0.016308363772932717, + "acc_norm": 0.7049808429118773, + "acc_norm_stderr": 0.016308363772932717 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5234042553191489, + "acc_stderr": 0.03265019475033581, + "acc_norm": 0.5234042553191489, + "acc_norm_stderr": 0.03265019475033581 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.038913644958358175, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.038913644958358175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.02736807824397163, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.02736807824397163 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6502242152466368, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.6502242152466368, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956909, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956909 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7525252525252525, + "acc_stderr": 0.030746300742124505, + "acc_norm": 0.7525252525252525, + "acc_norm_stderr": 0.030746300742124505 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.634453781512605, + "acc_stderr": 0.031282177063684614, + "acc_norm": 0.634453781512605, + "acc_norm_stderr": 0.031282177063684614 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6512820512820513, + "acc_stderr": 0.024162780284017724, + "acc_norm": 0.6512820512820513, + "acc_norm_stderr": 0.024162780284017724 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6193548387096774, + "acc_stderr": 0.027621717832907036, + "acc_norm": 0.6193548387096774, + "acc_norm_stderr": 0.027621717832907036 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8247863247863247, + "acc_stderr": 0.02490443909891822, + "acc_norm": 0.8247863247863247, + "acc_norm_stderr": 0.02490443909891822 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5622641509433962, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.5622641509433962, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6818181818181818, + "acc_stderr": 0.044612721759105085, + "acc_norm": 0.6818181818181818, + "acc_norm_stderr": 0.044612721759105085 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.02956070739246571, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.02956070739246571 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719197, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719197 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.746268656716418, + "acc_stderr": 0.03076944496729601, + "acc_norm": 0.746268656716418, + "acc_norm_stderr": 0.03076944496729601 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.025487187147859372, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.025487187147859372 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5486111111111112, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.5486111111111112, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.81, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.81, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.615606936416185, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.615606936416185, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6687116564417178, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.6687116564417178, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6450617283950617, + "acc_stderr": 0.026624152478845853, + "acc_norm": 0.6450617283950617, + "acc_norm_stderr": 0.026624152478845853 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7927461139896373, + "acc_stderr": 0.02925282329180363, + "acc_norm": 0.7927461139896373, + "acc_norm_stderr": 0.02925282329180363 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070435, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070435 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7431192660550459, + "acc_stderr": 0.018732492928342448, + "acc_norm": 0.7431192660550459, + "acc_norm_stderr": 0.018732492928342448 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.02768418188330289, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.02768418188330289 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7603305785123967, + "acc_stderr": 0.03896878985070416, + "acc_norm": 0.7603305785123967, + "acc_norm_stderr": 0.03896878985070416 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6578947368421053, + "acc_stderr": 0.03860731599316091, + "acc_norm": 0.6578947368421053, + "acc_norm_stderr": 0.03860731599316091 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5898692810457516, + "acc_stderr": 0.019898412717635903, + "acc_norm": 0.5898692810457516, + "acc_norm_stderr": 0.019898412717635903 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4148936170212766, + "acc_stderr": 0.0293922365846125, + "acc_norm": 0.4148936170212766, + "acc_norm_stderr": 0.0293922365846125 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31508379888268156, + "acc_stderr": 0.01553685085247364, + "acc_norm": 0.31508379888268156, + "acc_norm_stderr": 0.01553685085247364 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6816326530612244, + "acc_stderr": 0.029822533793982038, + "acc_norm": 0.6816326530612244, + "acc_norm_stderr": 0.029822533793982038 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.02798569938703642, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.02798569938703642 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4315514993481095, + "acc_stderr": 0.0126500079994639, + "acc_norm": 0.4315514993481095, + "acc_norm_stderr": 0.0126500079994639 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03681050869161549, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03681050869161549 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5128518971848225, + "mc1_stderr": 0.017497717944299825, + "mc2": 0.6285777564864972, + "mc2_stderr": 0.01515796213143053 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3955135773317591, + "acc_stderr": 0.016810815902206042, + "acc_norm": 0.41204250295159384, + "acc_norm_stderr": 0.01692227673852836 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "freewheelin/free-solar-dpo-v0.2", + "model_sha": "18bcf517b11275af43598162d032c8a9d7603fab", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/freewheelin/free-solar-dpo-v0.6/result_2024-04-04 01:54:40.json b/freewheelin/free-solar-dpo-v0.6/result_2024-04-04 01:54:40.json new file mode 100644 index 0000000000000000000000000000000000000000..c4dedb7bf8337402cea102b95a173013a11a1a80 --- /dev/null +++ b/freewheelin/free-solar-dpo-v0.6/result_2024-04-04 01:54:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7005119453924915, + "acc_stderr": 0.013385021637313577, + "acc_norm": 0.7329351535836177, + "acc_norm_stderr": 0.012928933196496352 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5230033857797252, + "acc_stderr": 0.004984497871025244, + "acc_norm": 0.6796454889464251, + "acc_norm_stderr": 0.004656591678606743 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7309941520467836, + "acc_stderr": 0.0340105262010409, + "acc_norm": 0.7309941520467836, + "acc_norm_stderr": 0.0340105262010409 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7088122605363985, + "acc_stderr": 0.016246087069701404, + "acc_norm": 0.7088122605363985, + "acc_norm_stderr": 0.016246087069701404 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5361702127659574, + "acc_stderr": 0.0326003851183577, + "acc_norm": 0.5361702127659574, + "acc_norm_stderr": 0.0326003851183577 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6302250803858521, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.6302250803858521, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6502242152466368, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.6502242152466368, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.8080808080808081, + "acc_stderr": 0.028057791672989017, + "acc_norm": 0.8080808080808081, + "acc_norm_stderr": 0.028057791672989017 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6386554621848739, + "acc_stderr": 0.03120469122515002, + "acc_norm": 0.6386554621848739, + "acc_norm_stderr": 0.03120469122515002 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6358974358974359, + "acc_stderr": 0.024396672985094753, + "acc_norm": 0.6358974358974359, + "acc_norm_stderr": 0.024396672985094753 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.046166311118017146, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.046166311118017146 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6483870967741936, + "acc_stderr": 0.027162537826948458, + "acc_norm": 0.6483870967741936, + "acc_norm_stderr": 0.027162537826948458 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8290598290598291, + "acc_stderr": 0.0246624968452098, + "acc_norm": 0.8290598290598291, + "acc_norm_stderr": 0.0246624968452098 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5811320754716981, + "acc_stderr": 0.030365050829115205, + "acc_norm": 0.5811320754716981, + "acc_norm_stderr": 0.030365050829115205 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.04494290866252091, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.04494290866252091 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37407407407407406, + "acc_stderr": 0.02950286112895529, + "acc_norm": 0.37407407407407406, + "acc_norm_stderr": 0.02950286112895529 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7562189054726368, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.7562189054726368, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.0256700806369092, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.0256700806369092 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.625, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.625, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932263, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932263 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5953757225433526, + "acc_stderr": 0.02642481659400985, + "acc_norm": 0.5953757225433526, + "acc_norm_stderr": 0.02642481659400985 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5950920245398773, + "acc_stderr": 0.038566721635489125, + "acc_norm": 0.5950920245398773, + "acc_norm_stderr": 0.038566721635489125 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.026041766202717156, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.026041766202717156 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7668393782383419, + "acc_stderr": 0.03051611137147601, + "acc_norm": 0.7668393782383419, + "acc_norm_stderr": 0.03051611137147601 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7522935779816514, + "acc_stderr": 0.018508143602547798, + "acc_norm": 0.7522935779816514, + "acc_norm_stderr": 0.018508143602547798 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6405228758169934, + "acc_stderr": 0.027475969910660952, + "acc_norm": 0.6405228758169934, + "acc_norm_stderr": 0.027475969910660952 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6710526315789473, + "acc_stderr": 0.038234289699266046, + "acc_norm": 0.6710526315789473, + "acc_norm_stderr": 0.038234289699266046 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.019944914136873586, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.019944914136873586 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41843971631205673, + "acc_stderr": 0.029427994039419994, + "acc_norm": 0.41843971631205673, + "acc_norm_stderr": 0.029427994039419994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.41787709497206704, + "acc_stderr": 0.016495400635820084, + "acc_norm": 0.41787709497206704, + "acc_norm_stderr": 0.016495400635820084 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.7, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5551470588235294, + "acc_stderr": 0.030187532060329387, + "acc_norm": 0.5551470588235294, + "acc_norm_stderr": 0.030187532060329387 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6816326530612244, + "acc_stderr": 0.02982253379398204, + "acc_norm": 0.6816326530612244, + "acc_norm_stderr": 0.02982253379398204 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598018, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598018 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4439374185136897, + "acc_stderr": 0.012689708167787679, + "acc_norm": 0.4439374185136897, + "acc_norm_stderr": 0.012689708167787679 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.0332057461294543, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.0332057461294543 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6572827417380661, + "mc1_stderr": 0.016614949385347026, + "mc2": 0.7571515869707225, + "mc2_stderr": 0.013803003444350486 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48288075560802834, + "acc_stderr": 0.017180275246085626, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.01718506973267653 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "freewheelin/free-solar-dpo-v0.6", + "model_sha": "293131f478e2091ce9dff3912d37c1f047cce718", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/freewheelin/free-solar-dpo-v0.9/result_2024-04-18 21:40:19.json b/freewheelin/free-solar-dpo-v0.9/result_2024-04-18 21:40:19.json new file mode 100644 index 0000000000000000000000000000000000000000..0767af03038565b9d355e7d792abaaa6edd1b93c --- /dev/null +++ b/freewheelin/free-solar-dpo-v0.9/result_2024-04-18 21:40:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7320819112627986, + "acc_stderr": 0.01294203019513643, + "acc_norm": 0.7627986348122867, + "acc_norm_stderr": 0.012430399829260854 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6889065923122883, + "acc_stderr": 0.004619948037222894, + "acc_norm": 0.808105954989046, + "acc_norm_stderr": 0.003929854025800969 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.685823754789272, + "acc_stderr": 0.01659929173588493, + "acc_norm": 0.685823754789272, + "acc_norm_stderr": 0.01659929173588493 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033583, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033583 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.027368078243971646, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.027368078243971646 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6457399103139013, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.6457399103139013, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7626262626262627, + "acc_stderr": 0.030313710538198906, + "acc_norm": 0.7626262626262627, + "acc_norm_stderr": 0.030313710538198906 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.047840607041056527, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.047840607041056527 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.03142946637883708, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.03142946637883708 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.617948717948718, + "acc_stderr": 0.024635549163908237, + "acc_norm": 0.617948717948718, + "acc_norm_stderr": 0.024635549163908237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.046166311118017146, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.046166311118017146 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.03510766597959217, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.03510766597959217 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5935483870967742, + "acc_stderr": 0.027941727346256304, + "acc_norm": 0.5935483870967742, + "acc_norm_stderr": 0.027941727346256304 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8376068376068376, + "acc_stderr": 0.024161618127987745, + "acc_norm": 0.8376068376068376, + "acc_norm_stderr": 0.024161618127987745 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5547169811320755, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.5547169811320755, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.03809342081273957, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.03809342081273957 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4497354497354497, + "acc_stderr": 0.02562085704293665, + "acc_norm": 0.4497354497354497, + "acc_norm_stderr": 0.02562085704293665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6458333333333334, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.6458333333333334, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5635838150289018, + "acc_stderr": 0.026700545424943677, + "acc_norm": 0.5635838150289018, + "acc_norm_stderr": 0.026700545424943677 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6012269938650306, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.6012269938650306, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.654320987654321, + "acc_stderr": 0.026462487777001862, + "acc_norm": 0.654320987654321, + "acc_norm_stderr": 0.026462487777001862 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7461139896373057, + "acc_stderr": 0.03141024780565318, + "acc_norm": 0.7461139896373057, + "acc_norm_stderr": 0.03141024780565318 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7192660550458716, + "acc_stderr": 0.019266055045871606, + "acc_norm": 0.7192660550458716, + "acc_norm_stderr": 0.019266055045871606 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.02830457667314111, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.02830457667314111 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.020007912739359368, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.020007912739359368 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4326241134751773, + "acc_stderr": 0.02955545423677885, + "acc_norm": 0.4326241134751773, + "acc_norm_stderr": 0.02955545423677885 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.0340763209385405, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.0340763209385405 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3318435754189944, + "acc_stderr": 0.015748421208187303, + "acc_norm": 0.3318435754189944, + "acc_norm_stderr": 0.015748421208187303 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4963235294117647, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.4963235294117647, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6653061224489796, + "acc_stderr": 0.030209235226242304, + "acc_norm": 0.6653061224489796, + "acc_norm_stderr": 0.030209235226242304 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7341772151898734, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.7341772151898734, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44784876140808344, + "acc_stderr": 0.012700582404768228, + "acc_norm": 0.44784876140808344, + "acc_norm_stderr": 0.012700582404768228 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6813725490196079, + "acc_stderr": 0.032702871814820816, + "acc_norm": 0.6813725490196079, + "acc_norm_stderr": 0.032702871814820816 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205983, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205983 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7613219094247246, + "mc1_stderr": 0.014922629695456418, + "mc2": 0.8436020494803503, + "mc2_stderr": 0.012103168488091225 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.017188904359077314, + "acc_norm": 0.5419126328217237, + "acc_norm_stderr": 0.017129852117911147 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "freewheelin/free-solar-dpo-v0.9", + "model_sha": "7ce84ecc50b8a41b0c30d252031eb077c70b4fde", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/freewheelin/free-solar-evo-v0.10/result_2024-04-24 05:39:11.json b/freewheelin/free-solar-evo-v0.10/result_2024-04-24 05:39:11.json new file mode 100644 index 0000000000000000000000000000000000000000..02e9c7865359ad02f2efdb0305d0a51c0b92c5bf --- /dev/null +++ b/freewheelin/free-solar-evo-v0.10/result_2024-04-24 05:39:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7312286689419796, + "acc_stderr": 0.012955065963710686, + "acc_norm": 0.7670648464163823, + "acc_norm_stderr": 0.01235250704261741 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6978689504082852, + "acc_stderr": 0.004582433109636474, + "acc_norm": 0.8106950806612229, + "acc_norm_stderr": 0.003909500159884881 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7572815533980582, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.7572815533980582, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6934865900383141, + "acc_stderr": 0.01648695289304151, + "acc_norm": 0.6934865900383141, + "acc_norm_stderr": 0.01648695289304151 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.04319223625811331, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.04319223625811331 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.032662042990646775, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.032662042990646775 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.027368078243971646, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.027368078243971646 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6367713004484304, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.6367713004484304, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7525252525252525, + "acc_stderr": 0.03074630074212451, + "acc_norm": 0.7525252525252525, + "acc_norm_stderr": 0.03074630074212451 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.0246667449151872, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.0246667449151872 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.046166311118017146, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.046166311118017146 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6064516129032258, + "acc_stderr": 0.027791878753132264, + "acc_norm": 0.6064516129032258, + "acc_norm_stderr": 0.027791878753132264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8290598290598291, + "acc_stderr": 0.024662496845209804, + "acc_norm": 0.8290598290598291, + "acc_norm_stderr": 0.024662496845209804 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5773584905660377, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.5773584905660377, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02911661760608301, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02911661760608301 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.032200241045342054, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.032200241045342054 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.03794012674697029, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.03794012674697029 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4497354497354497, + "acc_stderr": 0.02562085704293665, + "acc_norm": 0.4497354497354497, + "acc_norm_stderr": 0.02562085704293665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6319444444444444, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.6319444444444444, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6040462427745664, + "acc_stderr": 0.02632981334194625, + "acc_norm": 0.6040462427745664, + "acc_norm_stderr": 0.02632981334194625 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6073619631901841, + "acc_stderr": 0.038367409078310294, + "acc_norm": 0.6073619631901841, + "acc_norm_stderr": 0.038367409078310294 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.026406145973625682, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.026406145973625682 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7461139896373057, + "acc_stderr": 0.03141024780565318, + "acc_norm": 0.7461139896373057, + "acc_norm_stderr": 0.03141024780565318 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7247706422018348, + "acc_stderr": 0.019149093743155196, + "acc_norm": 0.7247706422018348, + "acc_norm_stderr": 0.019149093743155196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6447368421052632, + "acc_stderr": 0.03894734487013317, + "acc_norm": 0.6447368421052632, + "acc_norm_stderr": 0.03894734487013317 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.019977422600227477, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.019977422600227477 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4397163120567376, + "acc_stderr": 0.02960991207559411, + "acc_norm": 0.4397163120567376, + "acc_norm_stderr": 0.02960991207559411 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.0340763209385405, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.0340763209385405 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.33854748603351953, + "acc_stderr": 0.01582670009648135, + "acc_norm": 0.33854748603351953, + "acc_norm_stderr": 0.01582670009648135 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5257352941176471, + "acc_stderr": 0.030332578094555026, + "acc_norm": 0.5257352941176471, + "acc_norm_stderr": 0.030332578094555026 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6857142857142857, + "acc_stderr": 0.029719329422417458, + "acc_norm": 0.6857142857142857, + "acc_norm_stderr": 0.029719329422417458 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.02904133351059802, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.02904133351059802 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44328552803129073, + "acc_stderr": 0.012687818419599917, + "acc_norm": 0.44328552803129073, + "acc_norm_stderr": 0.012687818419599917 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.032962451101722294, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.032962451101722294 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.03713158067481913, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.03713158067481913 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7723378212974297, + "mc1_stderr": 0.014679255032111068, + "mc2": 0.8398597140858357, + "mc2_stderr": 0.012345273176101335 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.01718890435907731, + "acc_norm": 0.525383707201889, + "acc_norm_stderr": 0.017168187201429253 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "freewheelin/free-solar-evo-v0.10", + "model_sha": "0b5382988869e500b811b0b7c1c792805a3b5b87", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/freewheelin/free-solar-evo-v0.11/result_2024-04-24 06:41:13.json b/freewheelin/free-solar-evo-v0.11/result_2024-04-24 06:41:13.json new file mode 100644 index 0000000000000000000000000000000000000000..d2867d56c48990dd74a1e4cac93d42796d808527 --- /dev/null +++ b/freewheelin/free-solar-evo-v0.11/result_2024-04-24 06:41:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7372013651877133, + "acc_stderr": 0.01286252317535133, + "acc_norm": 0.7721843003412969, + "acc_norm_stderr": 0.012256708602326916 + }, + "harness|ko_hellaswag|10": { + "acc": 0.7042421828321052, + "acc_stderr": 0.004554499409290686, + "acc_norm": 0.8126867157936666, + "acc_norm_stderr": 0.0038936542666334058 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.695906432748538, + "acc_stderr": 0.03528211258245231, + "acc_norm": 0.695906432748538, + "acc_norm_stderr": 0.03528211258245231 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7572815533980582, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.7572815533980582, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6896551724137931, + "acc_stderr": 0.01654378502604833, + "acc_norm": 0.6896551724137931, + "acc_norm_stderr": 0.01654378502604833 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4851063829787234, + "acc_stderr": 0.03267151848924777, + "acc_norm": 0.4851063829787234, + "acc_norm_stderr": 0.03267151848924777 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6366559485530546, + "acc_stderr": 0.027316847674192717, + "acc_norm": 0.6366559485530546, + "acc_norm_stderr": 0.027316847674192717 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6457399103139013, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.6457399103139013, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383887, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383887 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6050420168067226, + "acc_stderr": 0.03175367846096625, + "acc_norm": 0.6050420168067226, + "acc_norm_stderr": 0.03175367846096625 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6076923076923076, + "acc_stderr": 0.02475600038213096, + "acc_norm": 0.6076923076923076, + "acc_norm_stderr": 0.02475600038213096 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301812, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301812 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.03510766597959217, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.03510766597959217 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.603225806451613, + "acc_stderr": 0.027831231605767955, + "acc_norm": 0.603225806451613, + "acc_norm_stderr": 0.027831231605767955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8290598290598291, + "acc_stderr": 0.024662496845209804, + "acc_norm": 0.8290598290598291, + "acc_norm_stderr": 0.024662496845209804 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5811320754716981, + "acc_stderr": 0.030365050829115205, + "acc_norm": 0.5811320754716981, + "acc_norm_stderr": 0.030365050829115205 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.032200241045342054, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.032200241045342054 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.03807301726504511, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.03807301726504511 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.025634258115554965, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.025634258115554965 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5895953757225434, + "acc_stderr": 0.026483392042098174, + "acc_norm": 0.5895953757225434, + "acc_norm_stderr": 0.026483392042098174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6012269938650306, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.6012269938650306, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.026406145973625682, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.026406145973625682 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7247706422018348, + "acc_stderr": 0.019149093743155196, + "acc_norm": 0.7247706422018348, + "acc_norm_stderr": 0.019149093743155196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5816993464052288, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.5816993464052288, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4397163120567376, + "acc_stderr": 0.02960991207559411, + "acc_norm": 0.4397163120567376, + "acc_norm_stderr": 0.02960991207559411 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.034086558679777494, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.034086558679777494 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.34413407821229053, + "acc_stderr": 0.015889221313307094, + "acc_norm": 0.34413407821229053, + "acc_norm_stderr": 0.015889221313307094 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6775510204081633, + "acc_stderr": 0.02992310056368391, + "acc_norm": 0.6775510204081633, + "acc_norm_stderr": 0.02992310056368391 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.729957805907173, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.729957805907173, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44654498044328556, + "acc_stderr": 0.012697046024399663, + "acc_norm": 0.44654498044328556, + "acc_norm_stderr": 0.012697046024399663 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.032962451101722294, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.032962451101722294 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7662178702570379, + "mc1_stderr": 0.014816195991931584, + "mc2": 0.8381368987751295, + "mc2_stderr": 0.012433568065008772 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5029515938606848, + "acc_stderr": 0.017190054580194694, + "acc_norm": 0.526564344746163, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "freewheelin/free-solar-evo-v0.11", + "model_sha": "17fc24a557bd3c3836abc9f6a367c803cba0cccd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/freewheelin/free-solar-evo-v0.13/result_2024-04-28 04:04:12.json b/freewheelin/free-solar-evo-v0.13/result_2024-04-28 04:04:12.json new file mode 100644 index 0000000000000000000000000000000000000000..ae4d9ee8f7bcf6461ce6d6f22a3048a4301b2fb2 --- /dev/null +++ b/freewheelin/free-solar-evo-v0.13/result_2024-04-28 04:04:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7337883959044369, + "acc_stderr": 0.012915774781523214, + "acc_norm": 0.7670648464163823, + "acc_norm_stderr": 0.01235250704261741 + }, + "harness|ko_hellaswag|10": { + "acc": 0.7006572395937064, + "acc_stderr": 0.004570342034463261, + "acc_norm": 0.8108942441744672, + "acc_norm_stderr": 0.0039079230108405895 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7017543859649122, + "acc_stderr": 0.035087719298245626, + "acc_norm": 0.7017543859649122, + "acc_norm_stderr": 0.035087719298245626 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7572815533980582, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.7572815533980582, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6934865900383141, + "acc_stderr": 0.01648695289304151, + "acc_norm": 0.6934865900383141, + "acc_norm_stderr": 0.01648695289304151 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.04319223625811331, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.04319223625811331 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.0387862677100236, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.0387862677100236 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6366559485530546, + "acc_stderr": 0.027316847674192717, + "acc_norm": 0.6366559485530546, + "acc_norm_stderr": 0.027316847674192717 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6322869955156951, + "acc_stderr": 0.03236198350928276, + "acc_norm": 0.6322869955156951, + "acc_norm_stderr": 0.03236198350928276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7626262626262627, + "acc_stderr": 0.030313710538198906, + "acc_norm": 0.7626262626262627, + "acc_norm_stderr": 0.030313710538198906 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.03142946637883708, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.03142946637883708 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.617948717948718, + "acc_stderr": 0.024635549163908237, + "acc_norm": 0.617948717948718, + "acc_norm_stderr": 0.024635549163908237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.046166311118017146, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.046166311118017146 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6096774193548387, + "acc_stderr": 0.02775125663696958, + "acc_norm": 0.6096774193548387, + "acc_norm_stderr": 0.02775125663696958 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8290598290598291, + "acc_stderr": 0.024662496845209804, + "acc_norm": 0.8290598290598291, + "acc_norm_stderr": 0.024662496845209804 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.030437794342983052, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.030437794342983052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083015, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083015 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919795, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.03804749744364763, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.03804749744364763 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.025634258115554965, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.025634258115554965 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6319444444444444, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.6319444444444444, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6069364161849711, + "acc_stderr": 0.026296227915613667, + "acc_norm": 0.6069364161849711, + "acc_norm_stderr": 0.026296227915613667 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6134969325153374, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.6134969325153374, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.026406145973625682, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.026406145973625682 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7247706422018348, + "acc_stderr": 0.019149093743155196, + "acc_norm": 0.7247706422018348, + "acc_norm_stderr": 0.019149093743155196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.028074158947600653, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.028074158947600653 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6513157894736842, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.6513157894736842, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.019944914136873586, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.019944914136873586 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.43617021276595747, + "acc_stderr": 0.029583452036284073, + "acc_norm": 0.43617021276595747, + "acc_norm_stderr": 0.029583452036284073 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.034086558679777494, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.034086558679777494 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.34301675977653634, + "acc_stderr": 0.015876912673057735, + "acc_norm": 0.34301675977653634, + "acc_norm_stderr": 0.015876912673057735 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.030320243265004137, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.030320243265004137 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6857142857142857, + "acc_stderr": 0.029719329422417458, + "acc_norm": 0.6857142857142857, + "acc_norm_stderr": 0.029719329422417458 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.02904133351059802, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.02904133351059802 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44328552803129073, + "acc_stderr": 0.012687818419599917, + "acc_norm": 0.44328552803129073, + "acc_norm_stderr": 0.012687818419599917 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.032962451101722294, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.032962451101722294 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.769889840881273, + "mc1_stderr": 0.014734557959807763, + "mc2": 0.8406147464886705, + "mc2_stderr": 0.012330654803878138 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5088547815820543, + "acc_stderr": 0.01718765819933674, + "acc_norm": 0.526564344746163, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "freewheelin/free-solar-evo-v0.13", + "model_sha": "2a7eb72f84c54898630f9db470eee0f936a64396", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/freewheelin/free-solar-instrunction-v0.1/result_2024-03-13 07:27:38.json b/freewheelin/free-solar-instrunction-v0.1/result_2024-03-13 07:27:38.json new file mode 100644 index 0000000000000000000000000000000000000000..7e61aa437f63966ed8177cb2408a0ec5375310fc --- /dev/null +++ b/freewheelin/free-solar-instrunction-v0.1/result_2024-03-13 07:27:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42235494880546076, + "acc_stderr": 0.014434138713379977, + "acc_norm": 0.47696245733788395, + "acc_norm_stderr": 0.014595873205358273 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4475204142601075, + "acc_stderr": 0.004962220512548364, + "acc_norm": 0.6075482971519618, + "acc_norm_stderr": 0.0048729844929679975 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6079182630906769, + "acc_stderr": 0.01745852405014763, + "acc_norm": 0.6079182630906769, + "acc_norm_stderr": 0.01745852405014763 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5787781350482315, + "acc_stderr": 0.028043399858210628, + "acc_norm": 0.5787781350482315, + "acc_norm_stderr": 0.028043399858210628 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6868686868686869, + "acc_stderr": 0.03304205087813652, + "acc_norm": 0.6868686868686869, + "acc_norm_stderr": 0.03304205087813652 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5672268907563025, + "acc_stderr": 0.032183581077426124, + "acc_norm": 0.5672268907563025, + "acc_norm_stderr": 0.032183581077426124 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.025317649726448666, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.025317649726448666 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406795, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406795 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5548387096774193, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.5548387096774193, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.02920254015343119, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.02920254015343119 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.02938162072646507, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.02938162072646507 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268815, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268815 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.037786210790920545, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.037786210790920545 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067877, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067877 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111502, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111502 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5524691358024691, + "acc_stderr": 0.027667138569422708, + "acc_norm": 0.5524691358024691, + "acc_norm_stderr": 0.027667138569422708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6632124352331606, + "acc_stderr": 0.034107802518361846, + "acc_norm": 0.6632124352331606, + "acc_norm_stderr": 0.034107802518361846 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6, + "acc_stderr": 0.021004201260420075, + "acc_norm": 0.6, + "acc_norm_stderr": 0.021004201260420075 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.02856869975222587, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.02856869975222587 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309172, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309172 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.020142974553795195, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.020142974553795195 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347247, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347247 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.034063153607115086, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.034063153607115086 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2022346368715084, + "acc_stderr": 0.013433729483320993, + "acc_norm": 0.2022346368715084, + "acc_norm_stderr": 0.013433729483320993 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121603, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121603 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.029818024749753095, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.029818024749753095 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37157757496740546, + "acc_stderr": 0.012341828514528282, + "acc_norm": 0.37157757496740546, + "acc_norm_stderr": 0.012341828514528282 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31701346389228885, + "mc1_stderr": 0.016289203374403403, + "mc2": 0.4767863869390881, + "mc2_stderr": 0.015689519123454735 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5242030696576151, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5631641086186541, + "acc_norm_stderr": 0.01705263355985607 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "freewheelin/free-solar-instrunction-v0.1", + "model_sha": "97f4bafae041ca9bd04cd788cfd9a82a28843284", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/freewheelin/free-solar-instrunction-v0.2/result_2024-03-14 08:40:01.json b/freewheelin/free-solar-instrunction-v0.2/result_2024-03-14 08:40:01.json new file mode 100644 index 0000000000000000000000000000000000000000..410b703fd402d4264670bc3397c939f8631fdc6a --- /dev/null +++ b/freewheelin/free-solar-instrunction-v0.2/result_2024-03-14 08:40:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36177474402730375, + "acc_stderr": 0.014041957945038071, + "acc_norm": 0.41638225255972694, + "acc_norm_stderr": 0.014405618279436169 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3910575582553276, + "acc_stderr": 0.004869899297734549, + "acc_norm": 0.50318661621191, + "acc_norm_stderr": 0.004989680072717476 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03615507630310936, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03615507630310936 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45849297573435505, + "acc_stderr": 0.017818248603465554, + "acc_norm": 0.45849297573435505, + "acc_norm_stderr": 0.017818248603465554 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.02924188386962881, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.02924188386962881 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.03005258057955784, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.03005258057955784 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794918, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794918 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.47761194029850745, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.47761194029850745, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404948, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404948 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02487081525105708, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02487081525105708 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.0403299905396072, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.0403299905396072 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.02743162372241502, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.02743162372241502 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43669724770642204, + "acc_stderr": 0.021264820158714212, + "acc_norm": 0.43669724770642204, + "acc_norm_stderr": 0.021264820158714212 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.038932596106046755, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.038932596106046755 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.028180596328259293, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.028180596328259293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319774, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319774 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.019691459052354143, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.019691459052354143 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169934, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169934 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.014736926383761968, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.014736926383761968 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.029624663581159703, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.029624663581159703 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.030932858792789848, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.030932858792789848 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.011989936640666535, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.011989936640666535 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.038783721137112745, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.038783721137112745 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31334149326805383, + "mc1_stderr": 0.016238065069059622, + "mc2": 0.4779576546158916, + "mc2_stderr": 0.015598802674230815 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3577331759149941, + "acc_stderr": 0.016479808935749976, + "acc_norm": 0.4002361275088548, + "acc_norm_stderr": 0.016844693510505056 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "freewheelin/free-solar-instrunction-v0.2", + "model_sha": "ed5712f70568bf2ce71b98f16e0e50ce85924d89", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/freewheelin/free-solar-instrunction-v0.3/result_2024-03-15 02:32:48.json b/freewheelin/free-solar-instrunction-v0.3/result_2024-03-15 02:32:48.json new file mode 100644 index 0000000000000000000000000000000000000000..fbeddf117accbd60fdfdcedcf16974c7761b9b09 --- /dev/null +++ b/freewheelin/free-solar-instrunction-v0.3/result_2024-03-15 02:32:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36860068259385664, + "acc_stderr": 0.014097810678042189, + "acc_norm": 0.44112627986348124, + "acc_norm_stderr": 0.014509747749064664 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4071898028281219, + "acc_stderr": 0.0049030666397619545, + "acc_norm": 0.5368452499502091, + "acc_norm_stderr": 0.004976214989483504 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.04950504382128919, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.04950504382128919 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299798, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299798 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016338, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016338 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006718, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006718 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159788, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159788 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5346820809248555, + "acc_stderr": 0.026854257928258886, + "acc_norm": 0.5346820809248555, + "acc_norm_stderr": 0.026854257928258886 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5246913580246914, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.5246913580246914, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423556, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423556 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.019898412717635906, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.019898412717635906 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31843575418994413, + "acc_stderr": 0.015581008080360276, + "acc_norm": 0.31843575418994413, + "acc_norm_stderr": 0.015581008080360276 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714867, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714867 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33833116036505867, + "acc_stderr": 0.012084265626344202, + "acc_norm": 0.33833116036505867, + "acc_norm_stderr": 0.012084265626344202 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557956, + "mc2": 0.4100875153762214, + "mc2_stderr": 0.015334807992770368 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4309327036599764, + "acc_stderr": 0.017025558196043136, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.017161563949916345 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "freewheelin/free-solar-instrunction-v0.3", + "model_sha": "22b8c228c551ba2ebf16331887dc795140af41e5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/freewheelin/free-solar-sft-v0.7/result_2024-04-04 01:54:50.json b/freewheelin/free-solar-sft-v0.7/result_2024-04-04 01:54:50.json new file mode 100644 index 0000000000000000000000000000000000000000..65a70a604942b3b6196f54e0fcc9acf73bacf1e2 --- /dev/null +++ b/freewheelin/free-solar-sft-v0.7/result_2024-04-04 01:54:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7320819112627986, + "acc_stderr": 0.01294203019513643, + "acc_norm": 0.7619453924914675, + "acc_norm_stderr": 0.012445770028026208 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6867157936666003, + "acc_stderr": 0.004628809258483525, + "acc_norm": 0.8077076279625572, + "acc_norm_stderr": 0.003932960974008063 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.685823754789272, + "acc_stderr": 0.01659929173588493, + "acc_norm": 0.685823754789272, + "acc_norm_stderr": 0.01659929173588493 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033583, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033583 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.027368078243971646, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.027368078243971646 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6412556053811659, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.6412556053811659, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.047840607041056527, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.047840607041056527 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.03142946637883708, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.03142946637883708 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.617948717948718, + "acc_stderr": 0.024635549163908237, + "acc_norm": 0.617948717948718, + "acc_norm_stderr": 0.024635549163908237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301812, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301812 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4729064039408867, + "acc_stderr": 0.03512819077876106, + "acc_norm": 0.4729064039408867, + "acc_norm_stderr": 0.03512819077876106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.02786932057166462, + "acc_norm": 0.6, + "acc_norm_stderr": 0.02786932057166462 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8376068376068376, + "acc_stderr": 0.024161618127987745, + "acc_norm": 0.8376068376068376, + "acc_norm_stderr": 0.024161618127987745 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5547169811320755, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.5547169811320755, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.038073017265045125, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.038073017265045125 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4497354497354497, + "acc_stderr": 0.02562085704293665, + "acc_norm": 0.4497354497354497, + "acc_norm_stderr": 0.02562085704293665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6458333333333334, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.6458333333333334, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6073619631901841, + "acc_stderr": 0.0383674090783103, + "acc_norm": 0.6073619631901841, + "acc_norm_stderr": 0.0383674090783103 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.026406145973625682, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.026406145973625682 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7461139896373057, + "acc_stderr": 0.03141024780565318, + "acc_norm": 0.7461139896373057, + "acc_norm_stderr": 0.03141024780565318 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7229357798165138, + "acc_stderr": 0.01918848259016954, + "acc_norm": 0.7229357798165138, + "acc_norm_stderr": 0.01918848259016954 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.02830457667314111, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.02830457667314111 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.020007912739359368, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.020007912739359368 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.42907801418439717, + "acc_stderr": 0.029525914302558562, + "acc_norm": 0.42907801418439717, + "acc_norm_stderr": 0.029525914302558562 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.0340763209385405, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.0340763209385405 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.33631284916201115, + "acc_stderr": 0.015801003729145897, + "acc_norm": 0.33631284916201115, + "acc_norm_stderr": 0.015801003729145897 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252609, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252609 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4889705882352941, + "acc_stderr": 0.030365446477275675, + "acc_norm": 0.4889705882352941, + "acc_norm_stderr": 0.030365446477275675 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6693877551020408, + "acc_stderr": 0.030116426296540617, + "acc_norm": 0.6693877551020408, + "acc_norm_stderr": 0.030116426296540617 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7341772151898734, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.7341772151898734, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4491525423728814, + "acc_stderr": 0.012704030518851474, + "acc_norm": 0.4491525423728814, + "acc_norm_stderr": 0.012704030518851474 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6813725490196079, + "acc_stderr": 0.032702871814820816, + "acc_norm": 0.6813725490196079, + "acc_norm_stderr": 0.032702871814820816 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205983, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205983 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7649938800489596, + "mc1_stderr": 0.014843061507731613, + "mc2": 0.8437494205785457, + "mc2_stderr": 0.012090072787421555 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.017188904359077314, + "acc_norm": 0.5395513577331759, + "acc_norm_stderr": 0.017136487626049846 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "freewheelin/free-solar-sft-v0.7", + "model_sha": "d69c10e6c98bf76c2b92323c317532173cd01a19", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/freewheelin/free-solar-slerp-v0.2/result_2024-03-12 14:56:59.json b/freewheelin/free-solar-slerp-v0.2/result_2024-03-12 14:56:59.json new file mode 100644 index 0000000000000000000000000000000000000000..1b2b42f43061939b5c2dd09ca5b10e346836fbf6 --- /dev/null +++ b/freewheelin/free-solar-slerp-v0.2/result_2024-03-12 14:56:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5068259385665529, + "acc_stderr": 0.014610029151379813, + "acc_norm": 0.5648464163822525, + "acc_norm_stderr": 0.01448798619718605 + }, + "harness|ko_hellaswag|10": { + "acc": 0.49442342162915753, + "acc_stderr": 0.004989471055090963, + "acc_norm": 0.676956781517626, + "acc_norm_stderr": 0.004666833452796185 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5906432748538012, + "acc_stderr": 0.03771283107626545, + "acc_norm": 0.5906432748538012, + "acc_norm_stderr": 0.03771283107626545 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.016857391247472542, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.016857391247472542 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033582, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033582 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6366559485530546, + "acc_stderr": 0.027316847674192703, + "acc_norm": 0.6366559485530546, + "acc_norm_stderr": 0.027316847674192703 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.042438692422305246, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.042438692422305246 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7121212121212122, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.7121212121212122, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.04810840148082636, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.04810840148082636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5743589743589743, + "acc_stderr": 0.02506909438729652, + "acc_norm": 0.5743589743589743, + "acc_norm_stderr": 0.02506909438729652 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5741935483870968, + "acc_stderr": 0.028129112709165908, + "acc_norm": 0.5741935483870968, + "acc_norm_stderr": 0.028129112709165908 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.028286324075564424, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.028286324075564424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6965174129353234, + "acc_stderr": 0.03251006816458619, + "acc_norm": 0.6965174129353234, + "acc_norm_stderr": 0.03251006816458619 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41005291005291006, + "acc_stderr": 0.025331202438944447, + "acc_norm": 0.41005291005291006, + "acc_norm_stderr": 0.025331202438944447 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5635838150289018, + "acc_stderr": 0.026700545424943687, + "acc_norm": 0.5635838150289018, + "acc_norm_stderr": 0.026700545424943687 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.02700252103451646, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.02700252103451646 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.032210245080411544, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.032210245080411544 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.5, + "acc_stderr": 0.047036043419179864, + "acc_norm": 0.5, + "acc_norm_stderr": 0.047036043419179864 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6862385321100918, + "acc_stderr": 0.01989472334146913, + "acc_norm": 0.6862385321100918, + "acc_norm_stderr": 0.01989472334146913 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.042857142857142816, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.042857142857142816 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.028180596328259283, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.028180596328259283 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.04008973785779205, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.04008973785779205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.020192808271433788, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.020192808271433788 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3754189944134078, + "acc_stderr": 0.01619510424846353, + "acc_norm": 0.3754189944134078, + "acc_norm_stderr": 0.01619510424846353 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5257352941176471, + "acc_stderr": 0.030332578094555026, + "acc_norm": 0.5257352941176471, + "acc_norm_stderr": 0.030332578094555026 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6, + "acc_stderr": 0.031362502409358936, + "acc_norm": 0.6, + "acc_norm_stderr": 0.031362502409358936 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4361147327249022, + "acc_stderr": 0.012665568135455324, + "acc_norm": 0.4361147327249022, + "acc_norm_stderr": 0.012665568135455324 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.03354092437591519, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.03354092437591519 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.49571603427172584, + "mc1_stderr": 0.01750285857737129, + "mc2": 0.6617732403932902, + "mc2_stderr": 0.015772796260019418 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5938606847697757, + "acc_stderr": 0.016884749503191396, + "acc_norm": 0.5985832349468713, + "acc_norm_stderr": 0.01685290785872906 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "freewheelin/free-solar-slerp-v0.2", + "model_sha": "26feaa5f576d8a3c644e7f157e4001c99853a2a4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/freewheelin/free-solar-slerp-v0.3/result_2024-03-13 02:05:35.json b/freewheelin/free-solar-slerp-v0.3/result_2024-03-13 02:05:35.json new file mode 100644 index 0000000000000000000000000000000000000000..25561eeaa827629e7caf2b33d6a0d48020bf312c --- /dev/null +++ b/freewheelin/free-solar-slerp-v0.3/result_2024-03-13 02:05:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5093856655290102, + "acc_stderr": 0.014608816322065003, + "acc_norm": 0.5708191126279863, + "acc_norm_stderr": 0.014464085894870653 + }, + "harness|ko_hellaswag|10": { + "acc": 0.489344752041426, + "acc_stderr": 0.0049886482600100535, + "acc_norm": 0.6724756024696276, + "acc_norm_stderr": 0.004683511716552247 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.016857391247472542, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.016857391247472542 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6527331189710611, + "acc_stderr": 0.027040745502307336, + "acc_norm": 0.6527331189710611, + "acc_norm_stderr": 0.027040745502307336 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.042438692422305246, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.042438692422305246 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5743589743589743, + "acc_stderr": 0.025069094387296518, + "acc_norm": 0.5743589743589743, + "acc_norm_stderr": 0.025069094387296518 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5806451612903226, + "acc_stderr": 0.028071588901091828, + "acc_norm": 0.5806451612903226, + "acc_norm_stderr": 0.028071588901091828 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.02812096650391438, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.02812096650391438 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5433962264150943, + "acc_stderr": 0.03065674869673944, + "acc_norm": 0.5433962264150943, + "acc_norm_stderr": 0.03065674869673944 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113115, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113115 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.032200241045342054, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.032200241045342054 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41005291005291006, + "acc_stderr": 0.025331202438944447, + "acc_norm": 0.41005291005291006, + "acc_norm_stderr": 0.025331202438944447 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6234567901234568, + "acc_stderr": 0.026959344518747784, + "acc_norm": 0.6234567901234568, + "acc_norm_stderr": 0.026959344518747784 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7202072538860104, + "acc_stderr": 0.03239637046735703, + "acc_norm": 0.7202072538860104, + "acc_norm_stderr": 0.03239637046735703 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4824561403508772, + "acc_stderr": 0.047007080335510376, + "acc_norm": 0.4824561403508772, + "acc_norm_stderr": 0.047007080335510376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6954128440366972, + "acc_stderr": 0.019732299420354045, + "acc_norm": 0.6954128440366972, + "acc_norm_stderr": 0.019732299420354045 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.028074158947600656, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.028074158947600656 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.040260970832965634, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.040260970832965634 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.020212274976302957, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.020212274976302957 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.03407632093854053, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.03407632093854053 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3687150837988827, + "acc_stderr": 0.016135759015030126, + "acc_norm": 0.3687150837988827, + "acc_norm_stderr": 0.016135759015030126 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03032024326500413, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03032024326500413 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6, + "acc_stderr": 0.031362502409358936, + "acc_norm": 0.6, + "acc_norm_stderr": 0.031362502409358936 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.02798569938703641, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.02798569938703641 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4406779661016949, + "acc_stderr": 0.012680037994097055, + "acc_norm": 0.4406779661016949, + "acc_norm_stderr": 0.012680037994097055 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6519607843137255, + "acc_stderr": 0.03343311240488419, + "acc_norm": 0.6519607843137255, + "acc_norm_stderr": 0.03343311240488419 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.03588624800091708, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.03588624800091708 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4981640146878825, + "mc1_stderr": 0.01750338304687702, + "mc2": 0.6604112859467058, + "mc2_stderr": 0.015771357646077157 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5879574970484062, + "acc_stderr": 0.016922276738528357, + "acc_norm": 0.5985832349468713, + "acc_norm_stderr": 0.01685290785872906 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "freewheelin/free-solar-slerp-v0.3", + "model_sha": "4128cb2b54a8c15433be46cd8413926732b1e521", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/freewheelin/komt-solar-slerp-v0.1/result_2024-03-12 07:24:02.json b/freewheelin/komt-solar-slerp-v0.1/result_2024-03-12 07:24:02.json new file mode 100644 index 0000000000000000000000000000000000000000..9ab1e8e4af03ceb8ad02fca6f526c049463c8229 --- /dev/null +++ b/freewheelin/komt-solar-slerp-v0.1/result_2024-03-12 07:24:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5042662116040956, + "acc_stderr": 0.014610858923956952, + "acc_norm": 0.5614334470989761, + "acc_norm_stderr": 0.014500682618212865 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5001991635132443, + "acc_stderr": 0.00498978101559546, + "acc_norm": 0.6901015733917546, + "acc_norm_stderr": 0.004615063817741868 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6577266922094508, + "acc_stderr": 0.01696703176641363, + "acc_norm": 0.6577266922094508, + "acc_norm_stderr": 0.01696703176641363 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751468, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751468 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.702020202020202, + "acc_stderr": 0.03258630383836556, + "acc_norm": 0.702020202020202, + "acc_norm_stderr": 0.03258630383836556 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.048786087144669955, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.048786087144669955 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5717948717948718, + "acc_stderr": 0.025088301454694827, + "acc_norm": 0.5717948717948718, + "acc_norm_stderr": 0.025088301454694827 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5903225806451613, + "acc_stderr": 0.02797605491534737, + "acc_norm": 0.5903225806451613, + "acc_norm_stderr": 0.02797605491534737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731571, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731571 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616255, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.038118909889404126, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.038118909889404126 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4312169312169312, + "acc_stderr": 0.025506481698138208, + "acc_norm": 0.4312169312169312, + "acc_norm_stderr": 0.025506481698138208 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5347222222222222, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.5347222222222222, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5895061728395061, + "acc_stderr": 0.027371350925124764, + "acc_norm": 0.5895061728395061, + "acc_norm_stderr": 0.027371350925124764 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7098445595854922, + "acc_stderr": 0.03275264467791516, + "acc_norm": 0.7098445595854922, + "acc_norm_stderr": 0.03275264467791516 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6825688073394496, + "acc_stderr": 0.0199571521984605, + "acc_norm": 0.6825688073394496, + "acc_norm_stderr": 0.0199571521984605 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.028304576673141107, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.028304576673141107 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.040089737857792046, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.040089737857792046 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.020226106567657807, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.020226106567657807 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.03409386946992699, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.03409386946992699 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.39776536312849164, + "acc_stderr": 0.016369204971262978, + "acc_norm": 0.39776536312849164, + "acc_norm_stderr": 0.016369204971262978 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4889705882352941, + "acc_stderr": 0.030365446477275668, + "acc_norm": 0.4889705882352941, + "acc_norm_stderr": 0.030365446477275668 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6040816326530613, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.6040816326530613, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7468354430379747, + "acc_stderr": 0.02830465794303531, + "acc_norm": 0.7468354430379747, + "acc_norm_stderr": 0.02830465794303531 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4406779661016949, + "acc_stderr": 0.012680037994097055, + "acc_norm": 0.4406779661016949, + "acc_norm_stderr": 0.012680037994097055 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6323529411764706, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.6323529411764706, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.03646204963253812, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.03646204963253812 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4908200734394125, + "mc1_stderr": 0.017500550724819746, + "mc2": 0.6705772574223984, + "mc2_stderr": 0.015718273421063033 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5478158205430933, + "acc_stderr": 0.017111567130916796, + "acc_norm": 0.5525383707201889, + "acc_norm_stderr": 0.01709519030150058 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "freewheelin/komt-solar-slerp-v0.1", + "model_sha": "51260983813f645d3916ca03d4098e831dc0b9ef", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gamzadole/llama3_Commonsense_Reasoning/result_2024-07-10 11:38:23.json b/gamzadole/llama3_Commonsense_Reasoning/result_2024-07-10 11:38:23.json new file mode 100644 index 0000000000000000000000000000000000000000..0a15afffbdddf48d68ac63baf864fffff47069df --- /dev/null +++ b/gamzadole/llama3_Commonsense_Reasoning/result_2024-07-10 11:38:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32764505119453924, + "acc_stderr": 0.01371584794071934, + "acc_norm": 0.35921501706484643, + "acc_norm_stderr": 0.014020224155839143 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33827922724556864, + "acc_stderr": 0.004721571443354456, + "acc_norm": 0.42252539334793865, + "acc_norm_stderr": 0.004929517011508224 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3742690058479532, + "acc_stderr": 0.037116011853894806, + "acc_norm": 0.3742690058479532, + "acc_norm_stderr": 0.037116011853894806 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690879, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690879 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3767560664112388, + "acc_stderr": 0.017328292907303047, + "acc_norm": 0.3767560664112388, + "acc_norm_stderr": 0.017328292907303047 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.029513196625539355, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.029513196625539355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683229, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683229 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3890675241157556, + "acc_stderr": 0.027690337536485372, + "acc_norm": 0.3890675241157556, + "acc_norm_stderr": 0.027690337536485372 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.041423137719966634, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.041423137719966634 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713549, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713549 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03960933549451207, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03960933549451207 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31092436974789917, + "acc_stderr": 0.03006676158297792, + "acc_norm": 0.31092436974789917, + "acc_norm_stderr": 0.03006676158297792 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.023400928918310495, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.023400928918310495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.0314471258167824, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.0314471258167824 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.33548387096774196, + "acc_stderr": 0.026860206444724356, + "acc_norm": 0.33548387096774196, + "acc_norm_stderr": 0.026860206444724356 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.49145299145299143, + "acc_stderr": 0.032751303000970296, + "acc_norm": 0.49145299145299143, + "acc_norm_stderr": 0.032751303000970296 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32075471698113206, + "acc_stderr": 0.028727502957880274, + "acc_norm": 0.32075471698113206, + "acc_norm_stderr": 0.028727502957880274 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3681592039800995, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.3681592039800995, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.0246853168672578, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.0246853168672578 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.32515337423312884, + "acc_stderr": 0.0368035037128646, + "acc_norm": 0.32515337423312884, + "acc_norm_stderr": 0.0368035037128646 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33024691358024694, + "acc_stderr": 0.026168298456732846, + "acc_norm": 0.33024691358024694, + "acc_norm_stderr": 0.026168298456732846 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.31088082901554404, + "acc_stderr": 0.03340361906276585, + "acc_norm": 0.31088082901554404, + "acc_norm_stderr": 0.03340361906276585 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29724770642201837, + "acc_stderr": 0.019595707224643547, + "acc_norm": 0.29724770642201837, + "acc_norm_stderr": 0.019595707224643547 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333337, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333337 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.0273053080762747, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.0273053080762747 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.033176727875331574, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.033176727875331574 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.018492596536396955, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.018492596536396955 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.0141022236231526, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.0141022236231526 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2426470588235294, + "acc_stderr": 0.026040662474201285, + "acc_norm": 0.2426470588235294, + "acc_norm_stderr": 0.026040662474201285 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.21224489795918366, + "acc_stderr": 0.026176967197866764, + "acc_norm": 0.21224489795918366, + "acc_norm_stderr": 0.026176967197866764 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.42616033755274263, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.42616033755274263, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2620599739243807, + "acc_stderr": 0.011231552795890394, + "acc_norm": 0.2620599739243807, + "acc_norm_stderr": 0.011231552795890394 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.03198001660115072, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.03198001660115072 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.4632908955037508, + "mc2_stderr": 0.015567243322927053 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30460448642266824, + "acc_stderr": 0.01582336727312938, + "acc_norm": 0.33412042502951594, + "acc_norm_stderr": 0.01621676330423969 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gamzadole/llama3_Commonsense_Reasoning", + "model_sha": "0a1af624dc7d54edabb583ea666cf4e315b0eb6a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gamzadole/llama3_instruct_preview_alpaca_finetuning/result_2024-07-08 11:07:51.json b/gamzadole/llama3_instruct_preview_alpaca_finetuning/result_2024-07-08 11:07:51.json new file mode 100644 index 0000000000000000000000000000000000000000..48aab32466a3e24c32d158c7d3d173a2c3f9e11b --- /dev/null +++ b/gamzadole/llama3_instruct_preview_alpaca_finetuning/result_2024-07-08 11:07:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3302047781569966, + "acc_stderr": 0.013743085603760426, + "acc_norm": 0.35921501706484643, + "acc_norm_stderr": 0.014020224155839147 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34405496912965544, + "acc_stderr": 0.004740882120999971, + "acc_norm": 0.4347739494124676, + "acc_norm_stderr": 0.004947141797384129 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.03762738699917055, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.03762738699917055 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3300970873786408, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.3300970873786408, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.017351268117544453, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.017351268117544453 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.37942122186495175, + "acc_stderr": 0.027559949802347817, + "acc_norm": 0.37942122186495175, + "acc_norm_stderr": 0.027559949802347817 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330314, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330314 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732524, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732524 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.33613445378151263, + "acc_stderr": 0.03068473711513537, + "acc_norm": 0.33613445378151263, + "acc_norm_stderr": 0.03068473711513537 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3435897435897436, + "acc_stderr": 0.02407869658063547, + "acc_norm": 0.3435897435897436, + "acc_norm_stderr": 0.02407869658063547 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3741935483870968, + "acc_stderr": 0.027528904299845783, + "acc_norm": 0.3741935483870968, + "acc_norm_stderr": 0.027528904299845783 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5042735042735043, + "acc_stderr": 0.03275489264382132, + "acc_norm": 0.5042735042735043, + "acc_norm_stderr": 0.03275489264382132 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3584905660377358, + "acc_stderr": 0.02951470358398175, + "acc_norm": 0.3584905660377358, + "acc_norm_stderr": 0.02951470358398175 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4079601990049751, + "acc_stderr": 0.034751163651940926, + "acc_norm": 0.4079601990049751, + "acc_norm_stderr": 0.034751163651940926 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.0349610148119118, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.0349610148119118 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432562, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432562 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.025906632631016124, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.025906632631016124 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292406, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292406 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02657148348071997, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02657148348071997 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.03410780251836184, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.03410780251836184 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374767, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374767 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3431192660550459, + "acc_stderr": 0.02035477773608604, + "acc_norm": 0.3431192660550459, + "acc_norm_stderr": 0.02035477773608604 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.027582811415159614, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.027582811415159614 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4380165289256198, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.019023726160724556, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.019023726160724556 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.02699145450203673, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.02699145450203673 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.01442229220480885, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.01442229220480885 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23161764705882354, + "acc_stderr": 0.025626533803777565, + "acc_norm": 0.23161764705882354, + "acc_norm_stderr": 0.025626533803777565 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2163265306122449, + "acc_stderr": 0.026358916334904035, + "acc_norm": 0.2163265306122449, + "acc_norm_stderr": 0.026358916334904035 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4219409282700422, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.4219409282700422, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2894393741851369, + "acc_stderr": 0.011582659702210238, + "acc_norm": 0.2894393741851369, + "acc_norm_stderr": 0.011582659702210238 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.03608541011573967, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.03608541011573967 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768542, + "mc2": 0.47094263649993384, + "mc2_stderr": 0.015485309870663936 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31995277449822906, + "acc_stderr": 0.01603715384028054, + "acc_norm": 0.3990554899645809, + "acc_norm_stderr": 0.016836377292849296 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gamzadole/llama3_instruct_preview_alpaca_finetuning", + "model_sha": "ecb3f2d644f14b4cc73112f6d9265905d0eb68cc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gamzadole/llama3_instruct_tuning_without_pretraing/result_2024-07-07 11:30:11.json b/gamzadole/llama3_instruct_tuning_without_pretraing/result_2024-07-07 11:30:11.json new file mode 100644 index 0000000000000000000000000000000000000000..7f54b30d079ddccca7cc620206b8e9aadad9574c --- /dev/null +++ b/gamzadole/llama3_instruct_tuning_without_pretraing/result_2024-07-07 11:30:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938162, + "acc_norm": 0.46331058020477817, + "acc_norm_stderr": 0.014572000527756993 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40938060147381, + "acc_stderr": 0.004907146229347553, + "acc_norm": 0.5544712208723361, + "acc_norm_stderr": 0.004960082528852438 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6257309941520468, + "acc_stderr": 0.037116011853894806, + "acc_norm": 0.6257309941520468, + "acc_norm_stderr": 0.037116011853894806 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.047504583990416946, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.047504583990416946 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5606641123882503, + "acc_stderr": 0.017747874245683606, + "acc_norm": 0.5606641123882503, + "acc_norm_stderr": 0.017747874245683606 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.547085201793722, + "acc_stderr": 0.033408675019233246, + "acc_norm": 0.547085201793722, + "acc_norm_stderr": 0.033408675019233246 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.032422250271150074, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.032422250271150074 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017827, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017827 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509566, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509566 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5129032258064516, + "acc_stderr": 0.02843453315268187, + "acc_norm": 0.5129032258064516, + "acc_norm_stderr": 0.02843453315268187 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851302, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851302 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268815, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268815 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5216049382716049, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.5216049382716049, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5614678899082569, + "acc_stderr": 0.02127471307395457, + "acc_norm": 0.5614678899082569, + "acc_norm_stderr": 0.02127471307395457 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848876, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848876 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.02003639376835263, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.02003639376835263 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03005820270430985, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03005820270430985 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22905027932960895, + "acc_stderr": 0.014054314935614553, + "acc_norm": 0.22905027932960895, + "acc_norm_stderr": 0.014054314935614553 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31747066492829207, + "acc_stderr": 0.01188889206880931, + "acc_norm": 0.31747066492829207, + "acc_norm_stderr": 0.01188889206880931 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4332190804159043, + "mc2_stderr": 0.015026071835560874 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4734356552538371, + "acc_stderr": 0.017166075717577747, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.017122829143292655 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gamzadole/llama3_instruct_tuning_without_pretraing", + "model_sha": "bbb53ae23ac0e9ffbe602762d39e7517e947f663", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gangkongkong/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge/result_2023-11-01 15:54:56.json b/gangkongkong/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge/result_2023-11-01 15:54:56.json new file mode 100644 index 0000000000000000000000000000000000000000..1d545d86f2f06f459fe47f6f2b6679fb2a9969c2 --- /dev/null +++ b/gangkongkong/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge/result_2023-11-01 15:54:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.014049106564955005, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.014494421584256512 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4130651264688309, + "acc_stderr": 0.0049137803474988756, + "acc_norm": 0.5571599283011353, + "acc_norm_stderr": 0.004957068377516513 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49680715197956576, + "acc_stderr": 0.017879598945933065, + "acc_norm": 0.49680715197956576, + "acc_norm_stderr": 0.017879598945933065 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.03878352372138621, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.03878352372138621 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.03175367846096625, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.03175367846096625 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280458, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280458 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.028040981380761547, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.028040981380761547 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.030102793781791194, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.030102793781791194 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137288, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137288 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.03567603799639172, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.03567603799639172 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643895, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643895 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.0276847214156562, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.0276847214156562 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.02142140298254889, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.02142140298254889 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924317, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924317 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.027530078447110307, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.027530078447110307 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529675, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529675 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022135, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022135 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791033, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791033 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.031001209039894836, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.031001209039894836 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28683181225554105, + "acc_stderr": 0.011551504781176933, + "acc_norm": 0.28683181225554105, + "acc_norm_stderr": 0.011551504781176933 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.0346022832723917, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.0346022832723917 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.43540541386680215, + "mc2_stderr": 0.015086654503820634 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43683589138134593, + "acc_stderr": 0.017052633559856076, + "acc_norm": 0.5324675324675324, + "acc_norm_stderr": 0.017154073716682868 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gangkongkong/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge", + "model_sha": "b38c6d07fbdb2119f7c1ee28c1a764c305547aec", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gangkongkong/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge/result_2023-11-01 16:20:48.json b/gangkongkong/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge/result_2023-11-01 16:20:48.json new file mode 100644 index 0000000000000000000000000000000000000000..1d545d86f2f06f459fe47f6f2b6679fb2a9969c2 --- /dev/null +++ b/gangkongkong/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge/result_2023-11-01 16:20:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.014049106564955005, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.014494421584256512 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4130651264688309, + "acc_stderr": 0.0049137803474988756, + "acc_norm": 0.5571599283011353, + "acc_norm_stderr": 0.004957068377516513 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49680715197956576, + "acc_stderr": 0.017879598945933065, + "acc_norm": 0.49680715197956576, + "acc_norm_stderr": 0.017879598945933065 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.03878352372138621, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.03878352372138621 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.03175367846096625, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.03175367846096625 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280458, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280458 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.028040981380761547, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.028040981380761547 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.030102793781791194, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.030102793781791194 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137288, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137288 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.03567603799639172, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.03567603799639172 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643895, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643895 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.0276847214156562, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.0276847214156562 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.02142140298254889, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.02142140298254889 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924317, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924317 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.027530078447110307, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.027530078447110307 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529675, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529675 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022135, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022135 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791033, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791033 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.031001209039894836, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.031001209039894836 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28683181225554105, + "acc_stderr": 0.011551504781176933, + "acc_norm": 0.28683181225554105, + "acc_norm_stderr": 0.011551504781176933 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.0346022832723917, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.0346022832723917 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.43540541386680215, + "mc2_stderr": 0.015086654503820634 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43683589138134593, + "acc_stderr": 0.017052633559856076, + "acc_norm": 0.5324675324675324, + "acc_norm_stderr": 0.017154073716682868 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gangkongkong/llama-2-koen-13b-gangkk-alpaca-cosine-all-epoch3-merge", + "model_sha": "b38c6d07fbdb2119f7c1ee28c1a764c305547aec", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gangyeolkim/llama-3-chat/result_2024-06-10 06:44:28.json b/gangyeolkim/llama-3-chat/result_2024-06-10 06:44:28.json new file mode 100644 index 0000000000000000000000000000000000000000..07cd391c05c4bbe95ff13476a305884a7f3a8306 --- /dev/null +++ b/gangyeolkim/llama-3-chat/result_2024-06-10 06:44:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3967576791808874, + "acc_stderr": 0.014296513020180639, + "acc_norm": 0.4522184300341297, + "acc_norm_stderr": 0.014544519880633839 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40669189404501094, + "acc_stderr": 0.004902125388002213, + "acc_norm": 0.5372435769766979, + "acc_norm_stderr": 0.00497591966511654 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6257309941520468, + "acc_stderr": 0.03711601185389481, + "acc_norm": 0.6257309941520468, + "acc_norm_stderr": 0.03711601185389481 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5568326947637292, + "acc_stderr": 0.017764085035348414, + "acc_norm": 0.5568326947637292, + "acc_norm_stderr": 0.017764085035348414 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5434083601286174, + "acc_stderr": 0.0282908690541976, + "acc_norm": 0.5434083601286174, + "acc_norm_stderr": 0.0282908690541976 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956909, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956909 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6414141414141414, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.6414141414141414, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5862068965517241, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.5862068965517241, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5051282051282051, + "acc_stderr": 0.025349672906838643, + "acc_norm": 0.5051282051282051, + "acc_norm_stderr": 0.025349672906838643 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5451612903225806, + "acc_stderr": 0.028327743091561074, + "acc_norm": 0.5451612903225806, + "acc_norm_stderr": 0.028327743091561074 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.03400598505599015, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.03400598505599015 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.02437319786798306, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.02437319786798306 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.041614023984032786, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.041614023984032786 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.026636539741116082, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.026636539741116082 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5493827160493827, + "acc_stderr": 0.0276847214156562, + "acc_norm": 0.5493827160493827, + "acc_norm_stderr": 0.0276847214156562 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5779816513761468, + "acc_stderr": 0.021174991407763175, + "acc_norm": 0.5779816513761468, + "acc_norm_stderr": 0.021174991407763175 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.04065771002562603, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.04065771002562603 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4199346405228758, + "acc_stderr": 0.01996681117825648, + "acc_norm": 0.4199346405228758, + "acc_norm_stderr": 0.01996681117825648 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5178571428571429, + "acc_stderr": 0.04742762361243011, + "acc_norm": 0.5178571428571429, + "acc_norm_stderr": 0.04742762361243011 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859938, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859938 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.031137304297185798, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.031137304297185798 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.363754889178618, + "acc_stderr": 0.012286991879902889, + "acc_norm": 0.363754889178618, + "acc_norm_stderr": 0.012286991879902889 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.034924061041636124, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.034924061041636124 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.01572313952460875, + "mc2": 0.45463779850161695, + "mc2_stderr": 0.015298593614354405 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40731995277449823, + "acc_stderr": 0.016892456695191266, + "acc_norm": 0.4852420306965762, + "acc_norm_stderr": 0.017182864434998564 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gangyeolkim/llama-3-chat", + "model_sha": "6ecc67270ce3d0671fb08c556bff0fb3a5f4b171", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/garage-bAInd/Platypus2-13B/result_2023-10-10 06:53:53.json b/garage-bAInd/Platypus2-13B/result_2023-10-10 06:53:53.json new file mode 100644 index 0000000000000000000000000000000000000000..e3c979e57da9a5bd8e4ec02a09a9026861529917 --- /dev/null +++ b/garage-bAInd/Platypus2-13B/result_2023-10-10 06:53:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3370307167235495, + "acc_stderr": 0.01381347665290228, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349822 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3623780123481378, + "acc_stderr": 0.004797048154893968, + "acc_norm": 0.46883091017725553, + "acc_norm_stderr": 0.00498007670739243 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.017874698667491355, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.017874698667491355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339525, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339525 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617749, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617749 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.025069094387296546, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.025069094387296546 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.030365050829115215, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.030365050829115215 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608466, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608466 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952166, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952166 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463084, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463084 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960719, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960719 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379417, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379417 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680804, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.04537935177947879, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.04537935177947879 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36437908496732024, + "acc_stderr": 0.019469518221573702, + "acc_norm": 0.36437908496732024, + "acc_norm_stderr": 0.019469518221573702 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26927374301675977, + "acc_stderr": 0.014835616582882601, + "acc_norm": 0.26927374301675977, + "acc_norm_stderr": 0.014835616582882601 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125468, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125468 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163907, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163907 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.012150699768228568, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.012150699768228568 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.0156596057553269, + "mc2": 0.44247428746712286, + "mc2_stderr": 0.015350644205547385 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3612750885478158, + "acc_stderr": 0.016515463022412014, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.016876941165045612 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "garage-bAInd/Platypus2-13B", + "model_sha": "0a474bc0e76203528db789f027f4d6cce2727cce", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/garage-bAInd/Platypus2-7B/result_2023-10-10 06:53:10.json b/garage-bAInd/Platypus2-7B/result_2023-10-10 06:53:10.json new file mode 100644 index 0000000000000000000000000000000000000000..4f841716e826c07b66d0fdc31141e728ae3e3936 --- /dev/null +++ b/garage-bAInd/Platypus2-7B/result_2023-10-10 06:53:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.013203196088537365, + "acc_norm": 0.3242320819112628, + "acc_norm_stderr": 0.013678810399518822 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3348934475204143, + "acc_stderr": 0.004709886644157085, + "acc_norm": 0.4153555068711412, + "acc_norm_stderr": 0.0049177611817401625 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41762452107279696, + "acc_stderr": 0.01763563732695152, + "acc_norm": 0.41762452107279696, + "acc_norm_stderr": 0.01763563732695152 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742399, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.02951319662553935, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.02951319662553935 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.03550920185689629, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.03550920185689629 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3858520900321543, + "acc_stderr": 0.027648149599751475, + "acc_norm": 0.3858520900321543, + "acc_norm_stderr": 0.027648149599751475 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.032443052830087304, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.032443052830087304 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.033832012232444426, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.033832012232444426 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.037245636197746325, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.037245636197746325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3739495798319328, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.3739495798319328, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.02311936275823229, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.02311936275823229 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509568, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509568 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.027379871229943238, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.027379871229943238 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5512820512820513, + "acc_stderr": 0.032583346493868806, + "acc_norm": 0.5512820512820513, + "acc_norm_stderr": 0.032583346493868806 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3132075471698113, + "acc_stderr": 0.02854479331905533, + "acc_norm": 0.3132075471698113, + "acc_norm_stderr": 0.02854479331905533 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425464, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425464 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507383, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507383 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4626865671641791, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.4626865671641791, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.03533133389323657, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.03533133389323657 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.026424816594009845, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.026424816594009845 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899616, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899616 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38580246913580246, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.38580246913580246, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3559633027522936, + "acc_stderr": 0.020528559278244218, + "acc_norm": 0.3559633027522936, + "acc_norm_stderr": 0.020528559278244218 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.026925654653615686, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.026925654653615686 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35947712418300654, + "acc_stderr": 0.01941253924203216, + "acc_norm": 0.35947712418300654, + "acc_norm_stderr": 0.01941253924203216 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.0278079901413202, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.0278079901413202 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.0449394906861354, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.0449394906861354 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.030546745264953195, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.030546745264953195 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.01437816988409842, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.01437816988409842 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.027033041151681456, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.027033041151681456 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.02866685779027465, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.02866685779027465 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.46835443037974683, + "acc_stderr": 0.03248197400511075, + "acc_norm": 0.46835443037974683, + "acc_norm_stderr": 0.03248197400511075 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31486310299869624, + "acc_stderr": 0.011862561755715923, + "acc_norm": 0.31486310299869624, + "acc_norm_stderr": 0.011862561755715923 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.01565960575532691, + "mc2": 0.4571739435072619, + "mc2_stderr": 0.015454282704862585 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28689492325855964, + "acc_stderr": 0.015550809966781778, + "acc_norm": 0.34946871310507677, + "acc_norm_stderr": 0.016392797085769854 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "garage-bAInd/Platypus2-7B", + "model_sha": "c27aff7201e611f301c0e19f351cbe74b1a9f1f1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gemmathon/gemma-2b-ko-dev-pbc432/result_2024-03-25 07:56:24.json b/gemmathon/gemma-2b-ko-dev-pbc432/result_2024-03-25 07:56:24.json new file mode 100644 index 0000000000000000000000000000000000000000..ea6b37869df7d4ad5d17754eb468c5f793c515f7 --- /dev/null +++ b/gemmathon/gemma-2b-ko-dev-pbc432/result_2024-03-25 07:56:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2960750853242321, + "acc_stderr": 0.013340916085246261, + "acc_norm": 0.363481228668942, + "acc_norm_stderr": 0.014056207319068283 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36068512248556067, + "acc_stderr": 0.0047921790525834425, + "acc_norm": 0.46086436964748057, + "acc_norm_stderr": 0.004974473255391264 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21637426900584794, + "acc_stderr": 0.031581495393387324, + "acc_norm": 0.21637426900584794, + "acc_norm_stderr": 0.031581495393387324 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.01598281477469563, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.01598281477469563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.251063829787234, + "acc_stderr": 0.02834696377716245, + "acc_norm": 0.251063829787234, + "acc_norm_stderr": 0.02834696377716245 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071857, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071857 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3215434083601286, + "acc_stderr": 0.026527724079528872, + "acc_norm": 0.3215434083601286, + "acc_norm_stderr": 0.026527724079528872 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.22869955156950672, + "acc_stderr": 0.028188240046929203, + "acc_norm": 0.22869955156950672, + "acc_norm_stderr": 0.028188240046929203 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3282442748091603, + "acc_stderr": 0.041184385658062976, + "acc_norm": 0.3282442748091603, + "acc_norm_stderr": 0.041184385658062976 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932026, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932026 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.32413793103448274, + "acc_stderr": 0.03900432069185555, + "acc_norm": 0.32413793103448274, + "acc_norm_stderr": 0.03900432069185555 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617748, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617748 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868966, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868966 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.022421273612923707, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.022421273612923707 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.036028141763926456, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.036028141763926456 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27741935483870966, + "acc_stderr": 0.025470196835900055, + "acc_norm": 0.27741935483870966, + "acc_norm_stderr": 0.025470196835900055 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.19622641509433963, + "acc_stderr": 0.024442388131100817, + "acc_norm": 0.19622641509433963, + "acc_norm_stderr": 0.024442388131100817 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.04172343038705383, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.04172343038705383 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31343283582089554, + "acc_stderr": 0.03280188205348645, + "acc_norm": 0.31343283582089554, + "acc_norm_stderr": 0.03280188205348645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.030631145539198823, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.030631145539198823 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30346820809248554, + "acc_stderr": 0.024752411960917202, + "acc_norm": 0.30346820809248554, + "acc_norm_stderr": 0.024752411960917202 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900936, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900936 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.031618779179354094, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.031618779179354094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518754, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518754 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26972477064220185, + "acc_stderr": 0.01902848671111544, + "acc_norm": 0.26972477064220185, + "acc_norm_stderr": 0.01902848671111544 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011743, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.026090162504279042, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.026090162504279042 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.04173349148083497, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.04173349148083497 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03583496176361064, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03583496176361064 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.016729937565537523, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.016729937565537523 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.024987106365642976, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.024987106365642976 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.027467401804057986, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.027467401804057986 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.027678468642144693, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.027678468642144693 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22857142857142856, + "acc_stderr": 0.02688214492230774, + "acc_norm": 0.22857142857142856, + "acc_norm_stderr": 0.02688214492230774 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842555, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842555 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24445893089960888, + "acc_stderr": 0.010976425013113897, + "acc_norm": 0.24445893089960888, + "acc_norm_stderr": 0.010976425013113897 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.03058759135160425, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.03058759135160425 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.0340150671524904, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.0340150671524904 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520705, + "mc2": 0.4016607777606808, + "mc2_stderr": 0.014845926809175847 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2644628099173554, + "acc_stderr": 0.015163499477892408, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.016272952997019124 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gemmathon/gemma-2b-ko-dev-pbc432", + "model_sha": "35d87b5b339736c8754526c8507ba57a512da812", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gemmathon/gemma-2b-ko-dev-pbmt192/result_2024-03-25 09:46:47.json b/gemmathon/gemma-2b-ko-dev-pbmt192/result_2024-03-25 09:46:47.json new file mode 100644 index 0000000000000000000000000000000000000000..9579d1f6207a2bd545bcc91447d9d5372ae9aa6b --- /dev/null +++ b/gemmathon/gemma-2b-ko-dev-pbmt192/result_2024-03-25 09:46:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34215017064846415, + "acc_stderr": 0.01386415215917728, + "acc_norm": 0.4052901023890785, + "acc_norm_stderr": 0.01434686906022933 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3686516630153356, + "acc_stderr": 0.004814532642574656, + "acc_norm": 0.4689304919338777, + "acc_norm_stderr": 0.00498013867916104 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.18128654970760233, + "acc_stderr": 0.029547741687640024, + "acc_norm": 0.18128654970760233, + "acc_norm_stderr": 0.029547741687640024 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041693, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041693 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3001277139208174, + "acc_stderr": 0.016389249691317432, + "acc_norm": 0.3001277139208174, + "acc_norm_stderr": 0.016389249691317432 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.02865917937429232, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.02865917937429232 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.03571609230053481, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.03571609230053481 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.026858825879488544, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.026858825879488544 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21076233183856502, + "acc_stderr": 0.027373095500540193, + "acc_norm": 0.21076233183856502, + "acc_norm_stderr": 0.027373095500540193 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306085, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306085 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.32323232323232326, + "acc_stderr": 0.03332299921070643, + "acc_norm": 0.32323232323232326, + "acc_norm_stderr": 0.03332299921070643 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438015, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438015 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.035240689515674495, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.035240689515674495 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.029344572500634335, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.029344572500634335 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31025641025641026, + "acc_stderr": 0.023454674889404288, + "acc_norm": 0.31025641025641026, + "acc_norm_stderr": 0.023454674889404288 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.030712730070982592, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.030712730070982592 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042757, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042757 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.029343114798094476, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.029343114798094476 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22641509433962265, + "acc_stderr": 0.025757559893106727, + "acc_norm": 0.22641509433962265, + "acc_norm_stderr": 0.025757559893106727 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.36318407960199006, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.36318407960199006, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906865, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906865 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869355, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869355 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.02425790170532338, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.02425790170532338 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.033220157957767414, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.033220157957767414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.02640614597362567, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.02640614597362567 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.03097543638684543, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.03097543638684543 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.03712454853721368, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.03712454853721368 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3229357798165138, + "acc_stderr": 0.020048115923415325, + "acc_norm": 0.3229357798165138, + "acc_norm_stderr": 0.020048115923415325 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.027057974624494382, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.027057974624494382 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.03984979653302871, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.03984979653302871 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.017322789207784326, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.017322789207784326 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2198581560283688, + "acc_stderr": 0.024706141070705477, + "acc_norm": 0.2198581560283688, + "acc_norm_stderr": 0.024706141070705477 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290803, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290803 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483927, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483927 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788174, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788174 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.23628691983122363, + "acc_stderr": 0.027652153144159263, + "acc_norm": 0.23628691983122363, + "acc_norm_stderr": 0.027652153144159263 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2288135593220339, + "acc_stderr": 0.010728759090375505, + "acc_norm": 0.2288135593220339, + "acc_norm_stderr": 0.010728759090375505 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.20098039215686275, + "acc_stderr": 0.028125972265654373, + "acc_norm": 0.20098039215686275, + "acc_norm_stderr": 0.028125972265654373 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.035014387062967806, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.035014387062967806 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015006, + "mc2": 0.4222174184184925, + "mc2_stderr": 0.015149588490094387 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29043683589138136, + "acc_stderr": 0.01560760256981463, + "acc_norm": 0.3730814639905549, + "acc_norm_stderr": 0.01662731827513744 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gemmathon/gemma-2b-ko-dev-pbmt192", + "model_sha": "856bd740a9c014e7dfc7233dbe6b9083f44ac8a5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gemmathon/gemma-2b-ko-v0/result_2024-04-05 05:01:14.json b/gemmathon/gemma-2b-ko-v0/result_2024-04-05 05:01:14.json new file mode 100644 index 0000000000000000000000000000000000000000..d2e46a448210c2f62dfe342ff67bc9c1bb3c6ee5 --- /dev/null +++ b/gemmathon/gemma-2b-ko-v0/result_2024-04-05 05:01:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.257679180887372, + "acc_stderr": 0.01278077056276842, + "acc_norm": 0.3122866894197952, + "acc_norm_stderr": 0.013542598541688064 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3407687711611233, + "acc_stderr": 0.004729990807895061, + "acc_norm": 0.4272057359091814, + "acc_norm_stderr": 0.004936616428922639 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.17777777777777778, + "acc_stderr": 0.03302789859901717, + "acc_norm": 0.17777777777777778, + "acc_norm_stderr": 0.03302789859901717 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944967, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944967 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868966, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868966 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21025641025641026, + "acc_stderr": 0.020660597485026928, + "acc_norm": 0.21025641025641026, + "acc_norm_stderr": 0.020660597485026928 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.14285714285714285, + "acc_stderr": 0.024620785269489666, + "acc_norm": 0.14285714285714285, + "acc_norm_stderr": 0.024620785269489666 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2, + "acc_stderr": 0.022755204959542936, + "acc_norm": 0.2, + "acc_norm_stderr": 0.022755204959542936 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02201908001221789, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02201908001221789 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0230836585869842, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0230836585869842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.02447722285613511, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.02447722285613511 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242515, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242515 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.02367908986180772, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.02367908986180772 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.01414957534897627, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.01414957534897627 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20408163265306123, + "acc_stderr": 0.025801283475090496, + "acc_norm": 0.20408163265306123, + "acc_norm_stderr": 0.025801283475090496 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923393, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923393 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283345, + "mc2": 0.4278860982053915, + "mc2_stderr": 0.014945872270488682 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29161747343565525, + "acc_stderr": 0.015626276690070242, + "acc_norm": 0.4085005903187721, + "acc_norm_stderr": 0.01690006287942712 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gemmathon/gemma-2b-ko-v0", + "model_sha": "9dc5d344054c7a96ef8aa49261892718ae4fbde3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gemmathon/gemma-pro-2.8b-ko-v0/result_2024-04-05 05:00:45.json b/gemmathon/gemma-pro-2.8b-ko-v0/result_2024-04-05 05:00:45.json new file mode 100644 index 0000000000000000000000000000000000000000..6388bd3f10cf5ec3ba2a15b7076e62a5da9ee8af --- /dev/null +++ b/gemmathon/gemma-pro-2.8b-ko-v0/result_2024-04-05 05:00:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2636518771331058, + "acc_stderr": 0.012875929151297066, + "acc_norm": 0.302901023890785, + "acc_norm_stderr": 0.013428241573185349 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3311093407687712, + "acc_stderr": 0.004696505101217406, + "acc_norm": 0.4111730730930094, + "acc_norm_stderr": 0.0049104091501354875 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.26900584795321636, + "acc_stderr": 0.0340105262010409, + "acc_norm": 0.26900584795321636, + "acc_norm_stderr": 0.0340105262010409 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3001277139208174, + "acc_stderr": 0.016389249691317425, + "acc_norm": 0.3001277139208174, + "acc_norm_stderr": 0.016389249691317425 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610344, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610344 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944967, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944967 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3311897106109325, + "acc_stderr": 0.026730620728004927, + "acc_norm": 0.3311897106109325, + "acc_norm_stderr": 0.026730620728004927 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.030216831011508752, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.030216831011508752 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713547, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713547 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003336, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.02865749128507197, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.02865749128507197 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2230769230769231, + "acc_stderr": 0.021107730127244, + "acc_norm": 0.2230769230769231, + "acc_norm_stderr": 0.021107730127244 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.025649381063029254, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.025649381063029254 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.031426169937919225, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.031426169937919225 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.026616482980501708, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.026616482980501708 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008936, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008936 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3034825870646766, + "acc_stderr": 0.03251006816458616, + "acc_norm": 0.3034825870646766, + "acc_norm_stderr": 0.03251006816458616 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321659, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321659 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.022101128787415426, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.022101128787415426 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.023445826276545543, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.023445826276545543 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615623, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615623 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33024691358024694, + "acc_stderr": 0.026168298456732846, + "acc_norm": 0.33024691358024694, + "acc_norm_stderr": 0.026168298456732846 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.029778663037752954, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.029778663037752954 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24770642201834864, + "acc_stderr": 0.018508143602547815, + "acc_norm": 0.24770642201834864, + "acc_norm_stderr": 0.018508143602547815 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333337, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333337 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.026568921015457162, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.026568921015457162 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3305785123966942, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.034597776068105365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.034597776068105365 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.042032772914677614, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.042032772914677614 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.22685185185185186, + "acc_stderr": 0.028561650102422273, + "acc_norm": 0.22685185185185186, + "acc_norm_stderr": 0.028561650102422273 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16911764705882354, + "acc_stderr": 0.022770868010113007, + "acc_norm": 0.16911764705882354, + "acc_norm_stderr": 0.022770868010113007 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2571428571428571, + "acc_stderr": 0.027979823538744543, + "acc_norm": 0.2571428571428571, + "acc_norm_stderr": 0.027979823538744543 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27509778357235987, + "acc_stderr": 0.011405443620996937, + "acc_norm": 0.27509778357235987, + "acc_norm_stderr": 0.011405443620996937 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268047, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268047 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842881, + "mc2": 0.41513756388115247, + "mc2_stderr": 0.015091794875810036 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24203069657615112, + "acc_stderr": 0.014725696750525338, + "acc_norm": 0.35064935064935066, + "acc_norm_stderr": 0.0164055569038933 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gemmathon/gemma-pro-2.8b-ko-v0", + "model_sha": "350ca217de3f88bb143f293ce7ea4f4edf41ec81", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gemmathon/gemma-pro-3.1b-ko-v0.1/result_2024-04-08 16:55:49.json b/gemmathon/gemma-pro-3.1b-ko-v0.1/result_2024-04-08 16:55:49.json new file mode 100644 index 0000000000000000000000000000000000000000..e190117fd87de3ff4427d975af059742f096914c --- /dev/null +++ b/gemmathon/gemma-pro-3.1b-ko-v0.1/result_2024-04-08 16:55:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2687713310580205, + "acc_stderr": 0.012955065963710675, + "acc_norm": 0.32081911262798635, + "acc_norm_stderr": 0.013640943091946531 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32812188807010556, + "acc_stderr": 0.004685698752104813, + "acc_norm": 0.4032065325632344, + "acc_norm_stderr": 0.0048953903414456264 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2573099415204678, + "acc_stderr": 0.03352799844161865, + "acc_norm": 0.2573099415204678, + "acc_norm_stderr": 0.03352799844161865 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3001277139208174, + "acc_stderr": 0.016389249691317425, + "acc_norm": 0.3001277139208174, + "acc_norm_stderr": 0.016389249691317425 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552004, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745647, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745647 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683229, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683229 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.025583062489984845, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.025583062489984845 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.031156269519646836, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.031156269519646836 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.037245636197746325, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.037245636197746325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.226890756302521, + "acc_stderr": 0.027205371538279483, + "acc_norm": 0.226890756302521, + "acc_norm_stderr": 0.027205371538279483 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.02127839386358628, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.02127839386358628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.043300437496507416, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.043300437496507416 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.031447125816782426, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.031447125816782426 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885203, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885203 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.03023638994217309, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.03023638994217309 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493868, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493868 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31343283582089554, + "acc_stderr": 0.03280188205348644, + "acc_norm": 0.31343283582089554, + "acc_norm_stderr": 0.03280188205348644 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749895, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749895 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.024027745155265016, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.024027745155265016 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.032910995786157686, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.032910995786157686 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.02555765398186803, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.02555765398186803 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23119266055045873, + "acc_stderr": 0.01807575024163315, + "acc_norm": 0.23119266055045873, + "acc_norm_stderr": 0.01807575024163315 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020534, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020534 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.0179866153040303, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.0179866153040303 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902002, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902002 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.026491914727355143, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.026491914727355143 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21691176470588236, + "acc_stderr": 0.025035845227711268, + "acc_norm": 0.21691176470588236, + "acc_norm_stderr": 0.025035845227711268 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.027682979522960238, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.027682979522960238 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598025, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598025 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539252, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539252 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.0309645179269234, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.0309645179269234 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.035014387062967806, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.035014387062967806 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.01548369193923727, + "mc2": 0.409502544261879, + "mc2_stderr": 0.015251835396178985 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.23730814639905548, + "acc_stderr": 0.014626677837186226, + "acc_norm": 0.38961038961038963, + "acc_norm_stderr": 0.016766161671893497 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gemmathon/gemma-pro-3.1b-ko-v0.1", + "model_sha": "d74c0e53f7f82fe528a5af1b8a2c23d7a5bed717", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gemmathon/gemma-pro-3.1b-ko-v0.5/result_2024-04-09 09:07:12.json b/gemmathon/gemma-pro-3.1b-ko-v0.5/result_2024-04-09 09:07:12.json new file mode 100644 index 0000000000000000000000000000000000000000..61a9f13a5988f9c34194da96dffe3ce08e06fa57 --- /dev/null +++ b/gemmathon/gemma-pro-3.1b-ko-v0.5/result_2024-04-09 09:07:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27047781569965873, + "acc_stderr": 0.012980954547659556, + "acc_norm": 0.3216723549488055, + "acc_norm_stderr": 0.013650488084494164 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3489344752041426, + "acc_stderr": 0.0047565909615765916, + "acc_norm": 0.4382593108942442, + "acc_norm_stderr": 0.004951594063272054 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.15789473684210525, + "acc_stderr": 0.027966785859160882, + "acc_norm": 0.15789473684210525, + "acc_norm_stderr": 0.027966785859160882 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2988505747126437, + "acc_stderr": 0.016369256815093117, + "acc_norm": 0.2988505747126437, + "acc_norm_stderr": 0.016369256815093117 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.031709956060406545, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.031709956060406545 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3086816720257235, + "acc_stderr": 0.02623696588115325, + "acc_norm": 0.3086816720257235, + "acc_norm_stderr": 0.02623696588115325 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2914798206278027, + "acc_stderr": 0.030500283176545906, + "acc_norm": 0.2914798206278027, + "acc_norm_stderr": 0.030500283176545906 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.029620227874790486, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.029620227874790486 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416542, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416542 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277723, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277723 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2923076923076923, + "acc_stderr": 0.02306043838085774, + "acc_norm": 0.2923076923076923, + "acc_norm_stderr": 0.02306043838085774 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.03090379695211447, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.03090379695211447 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23870967741935484, + "acc_stderr": 0.024251071262208834, + "acc_norm": 0.23870967741935484, + "acc_norm_stderr": 0.024251071262208834 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.029872577708891148, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.029872577708891148 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.373134328358209, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.373134328358209, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.03063114553919882, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.03063114553919882 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2861271676300578, + "acc_stderr": 0.024332146779134114, + "acc_norm": 0.2861271676300578, + "acc_norm_stderr": 0.024332146779134114 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.03322015795776741, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.03322015795776741 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.026105673861409818, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.026105673861409818 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.18652849740932642, + "acc_stderr": 0.02811209121011745, + "acc_norm": 0.18652849740932642, + "acc_norm_stderr": 0.02811209121011745 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27706422018348625, + "acc_stderr": 0.019188482590169535, + "acc_norm": 0.27706422018348625, + "acc_norm_stderr": 0.019188482590169535 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242494, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242494 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.026173908506718576, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.026173908506718576 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04391326286724071, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04391326286724071 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810535, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810535 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26633986928104575, + "acc_stderr": 0.017883188134667192, + "acc_norm": 0.26633986928104575, + "acc_norm_stderr": 0.017883188134667192 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791016, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.02703304115168146, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.02703304115168146 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.1836734693877551, + "acc_stderr": 0.024789071332007636, + "acc_norm": 0.1836734693877551, + "acc_norm_stderr": 0.024789071332007636 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.030685820596610798, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.030685820596610798 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24771838331160365, + "acc_stderr": 0.011025499291443733, + "acc_norm": 0.24771838331160365, + "acc_norm_stderr": 0.011025499291443733 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02910225438967408, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02910225438967408 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520693, + "mc2": 0.4035786507693237, + "mc2_stderr": 0.0149835296244634 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24911452184179456, + "acc_stderr": 0.014869665316691266, + "acc_norm": 0.38488783943329397, + "acc_norm_stderr": 0.016728579701498665 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gemmathon/gemma-pro-3.1b-ko-v0.5", + "model_sha": "04d10e1810a33f877460e1c8ae206daf525a5a3d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gemmathon/test-last/result_2024-03-24 08:32:53.json b/gemmathon/test-last/result_2024-03-24 08:32:53.json new file mode 100644 index 0000000000000000000000000000000000000000..02116f5e19e3b5e1a037af61d2b6568c7c0a524d --- /dev/null +++ b/gemmathon/test-last/result_2024-03-24 08:32:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2790102389078498, + "acc_stderr": 0.013106784883601346, + "acc_norm": 0.3464163822525597, + "acc_norm_stderr": 0.013905011180063246 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3510256920932085, + "acc_stderr": 0.004763155068744884, + "acc_norm": 0.45200159330810596, + "acc_norm_stderr": 0.00496673681101049 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03565079670708311, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03565079670708311 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37420178799489145, + "acc_stderr": 0.017304805072252044, + "acc_norm": 0.37420178799489145, + "acc_norm_stderr": 0.017304805072252044 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424004, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745647, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745647 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288088, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288088 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40514469453376206, + "acc_stderr": 0.02788238379132595, + "acc_norm": 0.40514469453376206, + "acc_norm_stderr": 0.02788238379132595 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.02944249558585747, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.02944249558585747 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.37373737373737376, + "acc_stderr": 0.03446897738659333, + "acc_norm": 0.37373737373737376, + "acc_norm_stderr": 0.03446897738659333 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2794871794871795, + "acc_stderr": 0.022752388839776816, + "acc_norm": 0.2794871794871795, + "acc_norm_stderr": 0.022752388839776816 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.03282649385304151, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.03282649385304151 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.026522709674667765, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.026522709674667765 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.032745319388423504, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.032745319388423504 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.029445175328199586, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.029445175328199586 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731571, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731571 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4079601990049751, + "acc_stderr": 0.034751163651940926, + "acc_norm": 0.4079601990049751, + "acc_norm_stderr": 0.034751163651940926 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918417, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.025416003773165552, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.025416003773165552 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38271604938271603, + "acc_stderr": 0.02704453813840262, + "acc_norm": 0.38271604938271603, + "acc_norm_stderr": 0.02704453813840262 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3174311926605505, + "acc_stderr": 0.0199571521984605, + "acc_norm": 0.3174311926605505, + "acc_norm_stderr": 0.0199571521984605 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102148, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102148 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.027582811415159614, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.027582811415159614 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2957516339869281, + "acc_stderr": 0.018463154132632817, + "acc_norm": 0.2957516339869281, + "acc_norm_stderr": 0.018463154132632817 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503803, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503803 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.22685185185185186, + "acc_stderr": 0.028561650102422273, + "acc_norm": 0.22685185185185186, + "acc_norm_stderr": 0.028561650102422273 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29957805907172996, + "acc_stderr": 0.029818024749753102, + "acc_norm": 0.29957805907172996, + "acc_norm_stderr": 0.029818024749753102 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27444589308996087, + "acc_stderr": 0.011397043163078154, + "acc_norm": 0.27444589308996087, + "acc_norm_stderr": 0.011397043163078154 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501947, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501947 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.014896277441041867, + "mc2": 0.3872239218172927, + "mc2_stderr": 0.01477363934461748 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2514757969303424, + "acc_stderr": 0.014916462437232238, + "acc_norm": 0.3730814639905549, + "acc_norm_stderr": 0.01662731827513743 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gemmathon/test-last", + "model_sha": "f058eb51ef31a577a52053c57fb3b70f2b6a7ac7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gemmathon/test-pb-v0/result_2024-03-24 02:37:58.json b/gemmathon/test-pb-v0/result_2024-03-24 02:37:58.json new file mode 100644 index 0000000000000000000000000000000000000000..8b352375d0f7450dae9090d743f6aa9bbd62967f --- /dev/null +++ b/gemmathon/test-pb-v0/result_2024-03-24 02:37:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27474402730375425, + "acc_stderr": 0.013044617212771227, + "acc_norm": 0.34812286689419797, + "acc_norm_stderr": 0.013921008595179344 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3487353116908982, + "acc_stderr": 0.004755960559929154, + "acc_norm": 0.4431388169687313, + "acc_norm_stderr": 0.004957410545559411 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457923, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457923 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3397190293742018, + "acc_stderr": 0.01693639411430165, + "acc_norm": 0.3397190293742018, + "acc_norm_stderr": 0.01693639411430165 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.03036358219723816, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.03036358219723816 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683228, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683228 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3408360128617363, + "acc_stderr": 0.026920841260776162, + "acc_norm": 0.3408360128617363, + "acc_norm_stderr": 0.026920841260776162 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3383838383838384, + "acc_stderr": 0.03371124142626303, + "acc_norm": 0.3383838383838384, + "acc_norm_stderr": 0.03371124142626303 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.039215453124671215, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.039215453124671215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.13725490196078433, + "acc_stderr": 0.03424084669891521, + "acc_norm": 0.13725490196078433, + "acc_norm_stderr": 0.03424084669891521 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.028657491285071963, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.028657491285071963 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.258974358974359, + "acc_stderr": 0.02221110681006167, + "acc_norm": 0.258974358974359, + "acc_norm_stderr": 0.02221110681006167 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335134, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335134 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3803418803418803, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.3803418803418803, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.02761116340239972, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.02761116340239972 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3681592039800995, + "acc_stderr": 0.034104105654953025, + "acc_norm": 0.3681592039800995, + "acc_norm_stderr": 0.034104105654953025 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788992, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788992 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.02425790170532337, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.02425790170532337 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.03487825168497892, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.03487825168497892 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.345679012345679, + "acc_stderr": 0.026462487777001886, + "acc_norm": 0.345679012345679, + "acc_norm_stderr": 0.026462487777001886 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.03097543638684543, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.03097543638684543 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27522935779816515, + "acc_stderr": 0.019149093743155196, + "acc_norm": 0.27522935779816515, + "acc_norm_stderr": 0.019149093743155196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124248, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124248 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.02656892101545714, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.02656892101545714 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3305785123966942, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810538, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810538 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.018521756215423027, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.018521756215423027 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.02678917235114025, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.02678917235114025 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.026991454502036733, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.026991454502036733 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20955882352941177, + "acc_stderr": 0.02472311040767706, + "acc_norm": 0.20955882352941177, + "acc_norm_stderr": 0.02472311040767706 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2979591836734694, + "acc_stderr": 0.029279567411065677, + "acc_norm": 0.2979591836734694, + "acc_norm_stderr": 0.029279567411065677 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03068582059661081, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03068582059661081 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27640156453715775, + "acc_stderr": 0.011422153194553574, + "acc_norm": 0.27640156453715775, + "acc_norm_stderr": 0.011422153194553574 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.40764400427797826, + "mc2_stderr": 0.015277729147031637 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26564344746162927, + "acc_stderr": 0.015185107107791253, + "acc_norm": 0.35182998819362454, + "acc_norm_stderr": 0.016418206451218054 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gemmathon/test-pb-v0", + "model_sha": "661337a0751c964a38a5aa073353a6e339131095", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gemmathon/test-tt-last/result_2024-03-24 08:33:21.json b/gemmathon/test-tt-last/result_2024-03-24 08:33:21.json new file mode 100644 index 0000000000000000000000000000000000000000..8df51721db7827b61f30fee02a776f97f0533032 --- /dev/null +++ b/gemmathon/test-tt-last/result_2024-03-24 08:33:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3148464163822526, + "acc_stderr": 0.013572657703084948, + "acc_norm": 0.35921501706484643, + "acc_norm_stderr": 0.014020224155839157 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36207926707827126, + "acc_stderr": 0.00479619358493008, + "acc_norm": 0.4506074487153953, + "acc_norm_stderr": 0.004965375341643134 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.037439798259264016, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.037439798259264016 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161549, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161549 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.34227330779054915, + "acc_stderr": 0.016967031766413624, + "acc_norm": 0.34227330779054915, + "acc_norm_stderr": 0.016967031766413624 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.026858825879488558, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.026858825879488558 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.030769352008229132, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.030769352008229132 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.04039314978724561, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.04039314978724561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.03208779558786751, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.03208779558786751 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416544, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416544 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3067226890756303, + "acc_stderr": 0.029953823891887044, + "acc_norm": 0.3067226890756303, + "acc_norm_stderr": 0.029953823891887044 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.022556551010132368, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.022556551010132368 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969566, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969566 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.025649381063029265, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.025649381063029265 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.03271298896811158, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.03271298896811158 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.028254200344438662, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.028254200344438662 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.32338308457711445, + "acc_stderr": 0.03307615947979033, + "acc_norm": 0.32338308457711445, + "acc_norm_stderr": 0.03307615947979033 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.023000086859068652, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.023000086859068652 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624576, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624576 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.025305258131879723, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.025305258131879723 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924055, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924055 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.026571483480719964, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.026571483480719964 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28623853211009176, + "acc_stderr": 0.01937943662891997, + "acc_norm": 0.28623853211009176, + "acc_norm_stderr": 0.01937943662891997 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02791405551046802, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02791405551046802 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.028353212866863438, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.028353212866863438 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16911764705882354, + "acc_stderr": 0.02277086801011301, + "acc_norm": 0.16911764705882354, + "acc_norm_stderr": 0.02277086801011301 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.028263889943784596, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.028263889943784596 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.02904133351059804, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.02904133351059804 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28226857887874834, + "acc_stderr": 0.01149585217624195, + "acc_norm": 0.28226857887874834, + "acc_norm_stderr": 0.01149585217624195 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.03077855467869326, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.03077855467869326 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.035014387062967806, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.035014387062967806 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.3947640607794501, + "mc2_stderr": 0.015177971303835365 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2396694214876033, + "acc_stderr": 0.014676495332267253, + "acc_norm": 0.32113341204250295, + "acc_norm_stderr": 0.016052762579111573 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gemmathon/test-tt-last", + "model_sha": "86402ced9c21cb2b8222fe95464afa669a2258c9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gemmathon/test-twk-last/result_2024-03-24 08:33:15.json b/gemmathon/test-twk-last/result_2024-03-24 08:33:15.json new file mode 100644 index 0000000000000000000000000000000000000000..874a5b99f2f4f7b6b1bf330159e9f0dcece153ba --- /dev/null +++ b/gemmathon/test-twk-last/result_2024-03-24 08:33:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.013203196088537365, + "acc_norm": 0.3455631399317406, + "acc_norm_stderr": 0.013896938461145687 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3474407488548098, + "acc_stderr": 0.004751840646730851, + "acc_norm": 0.4214299940250946, + "acc_norm_stderr": 0.004927790036726636 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3742690058479532, + "acc_stderr": 0.03711601185389481, + "acc_norm": 0.3742690058479532, + "acc_norm_stderr": 0.03711601185389481 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.33077905491698595, + "acc_stderr": 0.016824818462563746, + "acc_norm": 0.33077905491698595, + "acc_norm_stderr": 0.016824818462563746 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.03148955829745531, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.03148955829745531 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3183279742765273, + "acc_stderr": 0.02645722506781102, + "acc_norm": 0.3183279742765273, + "acc_norm_stderr": 0.02645722506781102 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30808080808080807, + "acc_stderr": 0.032894773300986155, + "acc_norm": 0.30808080808080807, + "acc_norm_stderr": 0.032894773300986155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.040287315329475604, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.040287315329475604 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.12745098039215685, + "acc_stderr": 0.03318224921942076, + "acc_norm": 0.12745098039215685, + "acc_norm_stderr": 0.03318224921942076 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2773109243697479, + "acc_stderr": 0.029079374539480007, + "acc_norm": 0.2773109243697479, + "acc_norm_stderr": 0.029079374539480007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2743589743589744, + "acc_stderr": 0.022622765767493214, + "acc_norm": 0.2743589743589744, + "acc_norm_stderr": 0.022622765767493214 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969566, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969566 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.42735042735042733, + "acc_stderr": 0.03240847393516326, + "acc_norm": 0.42735042735042733, + "acc_norm_stderr": 0.03240847393516326 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33962264150943394, + "acc_stderr": 0.029146904747798335, + "acc_norm": 0.33962264150943394, + "acc_norm_stderr": 0.029146904747798335 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.32338308457711445, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.32338308457711445, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749895, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749895 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776564, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776564 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554858, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554858 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.025190181327608415, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.025190181327608415 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.025630824975621337, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.025630824975621337 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565318, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565318 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29357798165137616, + "acc_stderr": 0.019525151122639663, + "acc_norm": 0.29357798165137616, + "acc_norm_stderr": 0.019525151122639663 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.027582811415159607, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.027582811415159607 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.32231404958677684, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2826797385620915, + "acc_stderr": 0.018217269552053435, + "acc_norm": 0.2826797385620915, + "acc_norm_stderr": 0.018217269552053435 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503786, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503786 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833587, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833587 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.027696910713093936, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.027696910713093936 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.24632352941176472, + "acc_stderr": 0.02617343857052, + "acc_norm": 0.24632352941176472, + "acc_norm_stderr": 0.02617343857052 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22857142857142856, + "acc_stderr": 0.02688214492230775, + "acc_norm": 0.22857142857142856, + "acc_norm_stderr": 0.02688214492230775 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.37130801687763715, + "acc_stderr": 0.0314506860074486, + "acc_norm": 0.37130801687763715, + "acc_norm_stderr": 0.0314506860074486 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2770534550195567, + "acc_stderr": 0.011430462443719671, + "acc_norm": 0.2770534550195567, + "acc_norm_stderr": 0.011430462443719671 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373616, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373616 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.03608541011573967, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.03608541011573967 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.01500067437357034, + "mc2": 0.3954337573631033, + "mc2_stderr": 0.01524123230342052 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2384887839433294, + "acc_stderr": 0.014651663985271578, + "acc_norm": 0.3187721369539551, + "acc_norm_stderr": 0.016021427055309578 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gemmathon/test-twk-last", + "model_sha": "07a194a1f4127bb5ea9aad5854dfa5678ede0c6e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/eclectus1.1/result_2023-12-26 02:23:09.json b/genne/eclectus1.1/result_2023-12-26 02:23:09.json new file mode 100644 index 0000000000000000000000000000000000000000..5d34b77a93672390b184673d2646f3603cfcff05 --- /dev/null +++ b/genne/eclectus1.1/result_2023-12-26 02:23:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3583617747440273, + "acc_stderr": 0.014012883334859862, + "acc_norm": 0.3924914675767918, + "acc_norm_stderr": 0.014269634635670712 + }, + "harness|ko_hellaswag|10": { + "acc": 0.429097789285003, + "acc_stderr": 0.00493935814556132, + "acc_norm": 0.5464050985859391, + "acc_norm_stderr": 0.004968244611429389 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.049486373240266356, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.049486373240266356 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5747126436781609, + "acc_stderr": 0.017679225489431457, + "acc_norm": 0.5747126436781609, + "acc_norm_stderr": 0.017679225489431457 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788682, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788682 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.028173917761762906, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.028173917761762906 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149354, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149354 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.02523038123893483, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.02523038123893483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507748, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507748 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983052, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887249, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887249 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342665, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5493827160493827, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.5493827160493827, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5321100917431193, + "acc_stderr": 0.021393071222680818, + "acc_norm": 0.5321100917431193, + "acc_norm_stderr": 0.021393071222680818 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061177, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681404, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681404 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175364, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38005215123859193, + "acc_stderr": 0.012397328205137803, + "acc_norm": 0.38005215123859193, + "acc_norm_stderr": 0.012397328205137803 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635896, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635896 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520705, + "mc2": 0.41502275168372754, + "mc2_stderr": 0.016098527627583504 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3187721369539551, + "acc_stderr": 0.016021427055309574, + "acc_norm": 0.33530106257378983, + "acc_norm_stderr": 0.01623098123298982 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/eclectus1.1", + "model_sha": "8afd6163d1bd3f27576f4879c2aac1b67771654f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/eclectus_1.1_dedup/result_2023-12-27 23:59:41.json b/genne/eclectus_1.1_dedup/result_2023-12-27 23:59:41.json new file mode 100644 index 0000000000000000000000000000000000000000..9329c1c498d30ec88261848288e5f80ea926b35a --- /dev/null +++ b/genne/eclectus_1.1_dedup/result_2023-12-27 23:59:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44112627986348124, + "acc_stderr": 0.014509747749064663, + "acc_norm": 0.4812286689419795, + "acc_norm_stderr": 0.014601090150633964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4441346345349532, + "acc_stderr": 0.004958537988993583, + "acc_norm": 0.5880302728540131, + "acc_norm_stderr": 0.004911837730582199 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.048979577377811674, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.048979577377811674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5402298850574713, + "acc_stderr": 0.01782199409693354, + "acc_norm": 0.5402298850574713, + "acc_norm_stderr": 0.01782199409693354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.03228410626716391, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.03228410626716391 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846475, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846475 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674078, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.03070948699255655, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.03070948699255655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.026864624366756646, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.026864624366756646 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.027648477877413324, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.027648477877413324 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008587, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008587 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6036697247706422, + "acc_stderr": 0.02097146994790053, + "acc_norm": 0.6036697247706422, + "acc_norm_stderr": 0.02097146994790053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604673, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604673 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.019675808135281525, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.019675808135281525 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199502, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199502 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833587, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833587 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261462, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261462 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.02997280717046463, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.02997280717046463 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763128, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763128 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.032335327775334835, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.032335327775334835 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3376792698826597, + "acc_stderr": 0.01207856377714555, + "acc_norm": 0.3376792698826597, + "acc_norm_stderr": 0.01207856377714555 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070263, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070263 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775527, + "mc2": 0.4309522207775758, + "mc2_stderr": 0.01552456574847187 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42621015348288077, + "acc_stderr": 0.01700212260948926, + "acc_norm": 0.4734356552538371, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/eclectus_1.1_dedup", + "model_sha": "799c6defbb95129a59d3b6f1d363c6a7b908ba0e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/eclectus_7b_1.1/result_2023-12-26 23:43:05.json b/genne/eclectus_7b_1.1/result_2023-12-26 23:43:05.json new file mode 100644 index 0000000000000000000000000000000000000000..e0b0c6d6918092cf2cf28bb5a111d22721e79a4b --- /dev/null +++ b/genne/eclectus_7b_1.1/result_2023-12-26 23:43:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38822525597269625, + "acc_stderr": 0.014241614207414047, + "acc_norm": 0.44197952218430037, + "acc_norm_stderr": 0.014512682523128343 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4113722366062537, + "acc_stderr": 0.004910767540867421, + "acc_norm": 0.5376419040031866, + "acc_norm_stderr": 0.004975621147406101 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47381864623243936, + "acc_stderr": 0.017855434554041986, + "acc_norm": 0.47381864623243936, + "acc_norm_stderr": 0.017855434554041986 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596241, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596241 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40836012861736337, + "acc_stderr": 0.027917050748484627, + "acc_norm": 0.40836012861736337, + "acc_norm_stderr": 0.027917050748484627 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.03163145807552379, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.03163145807552379 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.02483881198803316, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02483881198803316 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.027666182075539635, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.027666182075539635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5641025641025641, + "acc_stderr": 0.032485775115783995, + "acc_norm": 0.5641025641025641, + "acc_norm_stderr": 0.032485775115783995 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119996, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119996 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4527363184079602, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.4527363184079602, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.40173410404624277, + "acc_stderr": 0.026394104177643634, + "acc_norm": 0.40173410404624277, + "acc_norm_stderr": 0.026394104177643634 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.02716368603827123, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.02716368603827123 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43302752293577984, + "acc_stderr": 0.021244146569074345, + "acc_norm": 0.43302752293577984, + "acc_norm_stderr": 0.021244146569074345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.028408302020332694, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.028408302020332694 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.045454545454545484, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.045454545454545484 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849723, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849723 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36437908496732024, + "acc_stderr": 0.019469518221573695, + "acc_norm": 0.36437908496732024, + "acc_norm_stderr": 0.019469518221573695 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103986, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103986 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.027971541370170598, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.027971541370170598 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4641350210970464, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.4641350210970464, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30964797913950454, + "acc_stderr": 0.011808598262503318, + "acc_norm": 0.30964797913950454, + "acc_norm_stderr": 0.011808598262503318 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.01520152224629994, + "mc2": 0.40284336981653474, + "mc2_stderr": 0.015378724374521922 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3010625737898465, + "acc_stderr": 0.015771113299945454, + "acc_norm": 0.3364817001180638, + "acc_norm_stderr": 0.01624508529438656 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/eclectus_7b_1.1", + "model_sha": "199e7a5119dd83ff2ca1a34dfb681bc89842517c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/electus_yiko_dpo/result_2023-12-27 23:34:02.json b/genne/electus_yiko_dpo/result_2023-12-27 23:34:02.json new file mode 100644 index 0000000000000000000000000000000000000000..27ecceb67e3493541ca97b3eab687340d26ac41c --- /dev/null +++ b/genne/electus_yiko_dpo/result_2023-12-27 23:34:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.013847460518892973, + "acc_norm": 0.3924914675767918, + "acc_norm_stderr": 0.014269634635670709 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3815972913762199, + "acc_stderr": 0.004847857546957471, + "acc_norm": 0.5190201155148377, + "acc_norm_stderr": 0.0049861698499463055 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4763729246487867, + "acc_stderr": 0.01785998976517645, + "acc_norm": 0.4763729246487867, + "acc_norm_stderr": 0.01785998976517645 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0314108219759624, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0314108219759624 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3954983922829582, + "acc_stderr": 0.027770918531427838, + "acc_norm": 0.3954983922829582, + "acc_norm_stderr": 0.027770918531427838 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330313, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330313 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.47474747474747475, + "acc_stderr": 0.035578062450873145, + "acc_norm": 0.47474747474747475, + "acc_norm_stderr": 0.035578062450873145 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237656, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237656 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.382051282051282, + "acc_stderr": 0.024635549163908227, + "acc_norm": 0.382051282051282, + "acc_norm_stderr": 0.024635549163908227 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.046166311118017125, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.046166311118017125 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.03282649385304151, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.03282649385304151 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3774193548387097, + "acc_stderr": 0.027575960723278246, + "acc_norm": 0.3774193548387097, + "acc_norm_stderr": 0.027575960723278246 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.47863247863247865, + "acc_stderr": 0.032726164476349545, + "acc_norm": 0.47863247863247865, + "acc_norm_stderr": 0.032726164476349545 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3660377358490566, + "acc_stderr": 0.02964781353936525, + "acc_norm": 0.3660377358490566, + "acc_norm_stderr": 0.02964781353936525 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4527363184079602, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.4527363184079602, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.02339382650048488, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.02339382650048488 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46238532110091746, + "acc_stderr": 0.02137657527439758, + "acc_norm": 0.46238532110091746, + "acc_norm_stderr": 0.02137657527439758 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047182, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047182 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.027582811415159624, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.027582811415159624 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355442, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861131, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861131 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.03128039084329882, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.03128039084329882 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225417, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225417 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.028666857790274645, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.028666857790274645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.510548523206751, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.510548523206751, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27444589308996087, + "acc_stderr": 0.011397043163078154, + "acc_norm": 0.27444589308996087, + "acc_norm_stderr": 0.011397043163078154 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.03804913653971011, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.03804913653971011 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.4351917744597652, + "mc2_stderr": 0.015624548959574044 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32113341204250295, + "acc_stderr": 0.016052762579111576, + "acc_norm": 0.3624557260920897, + "acc_norm_stderr": 0.016527131240453713 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/electus_yiko_dpo", + "model_sha": "e6917de3d9ace4fd400e4a463ca8c1605ac613c1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/kiwi_solar_merge_slerp/result_2023-12-28 04:04:24.json b/genne/kiwi_solar_merge_slerp/result_2023-12-28 04:04:24.json new file mode 100644 index 0000000000000000000000000000000000000000..649379aa8346b82a57d93304940fdea072a58976 --- /dev/null +++ b/genne/kiwi_solar_merge_slerp/result_2023-12-28 04:04:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4044368600682594, + "acc_stderr": 0.014342036483436177, + "acc_norm": 0.4880546075085324, + "acc_norm_stderr": 0.014607220340597171 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41874128659629556, + "acc_stderr": 0.004923445627861518, + "acc_norm": 0.5595498904600678, + "acc_norm_stderr": 0.0049542655953734565 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6462324393358876, + "acc_stderr": 0.017098184708161896, + "acc_norm": 0.6462324393358876, + "acc_norm_stderr": 0.017098184708161896 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368878, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368878 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936338, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936338 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6077170418006431, + "acc_stderr": 0.027731258647012, + "acc_norm": 0.6077170418006431, + "acc_norm_stderr": 0.027731258647012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465918, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465918 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.634453781512605, + "acc_stderr": 0.031282177063684614, + "acc_norm": 0.634453781512605, + "acc_norm_stderr": 0.031282177063684614 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5923076923076923, + "acc_stderr": 0.024915243985987857, + "acc_norm": 0.5923076923076923, + "acc_norm_stderr": 0.024915243985987857 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6096774193548387, + "acc_stderr": 0.027751256636969583, + "acc_norm": 0.6096774193548387, + "acc_norm_stderr": 0.027751256636969583 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.02685345037700914, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.02685345037700914 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43915343915343913, + "acc_stderr": 0.02555992055053101, + "acc_norm": 0.43915343915343913, + "acc_norm_stderr": 0.02555992055053101 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5347222222222222, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.5347222222222222, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6213872832369942, + "acc_stderr": 0.02611374936131034, + "acc_norm": 0.6213872832369942, + "acc_norm_stderr": 0.02611374936131034 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5895061728395061, + "acc_stderr": 0.027371350925124768, + "acc_norm": 0.5895061728395061, + "acc_norm_stderr": 0.027371350925124768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.03221024508041154, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.03221024508041154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6752293577981652, + "acc_stderr": 0.02007772910931033, + "acc_norm": 0.6752293577981652, + "acc_norm_stderr": 0.02007772910931033 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.0403356566784832, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.0403356566784832 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.020220920829626902, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.020220920829626902 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.02949482760014437, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.02949482760014437 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210744, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210744 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.74, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.74, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6244897959183674, + "acc_stderr": 0.031001209039894843, + "acc_norm": 0.6244897959183674, + "acc_norm_stderr": 0.031001209039894843 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7468354430379747, + "acc_stderr": 0.028304657943035307, + "acc_norm": 0.7468354430379747, + "acc_norm_stderr": 0.028304657943035307 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41916558018252936, + "acc_stderr": 0.012602244505788222, + "acc_norm": 0.41916558018252936, + "acc_norm_stderr": 0.012602244505788222 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.696078431372549, + "acc_stderr": 0.032282103870378914, + "acc_norm": 0.696078431372549, + "acc_norm_stderr": 0.032282103870378914 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.03646204963253812, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.03646204963253812 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32068543451652387, + "mc1_stderr": 0.0163391703732809, + "mc2": 0.49040076769077373, + "mc2_stderr": 0.016166130098772675 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5608028335301063, + "acc_stderr": 0.017062775744780705, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.017019847535972205 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/kiwi_solar_merge_slerp", + "model_sha": "223abb74d9d41d83e6c39d8ceedf86d3e270b6e2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/kiwi_solar_merge_ties/result_2023-12-28 05:58:45.json b/genne/kiwi_solar_merge_ties/result_2023-12-28 05:58:45.json new file mode 100644 index 0000000000000000000000000000000000000000..c62a0ead703c9f5161d0a9c69166563203c8308e --- /dev/null +++ b/genne/kiwi_solar_merge_ties/result_2023-12-28 05:58:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4044368600682594, + "acc_stderr": 0.014342036483436177, + "acc_norm": 0.4880546075085324, + "acc_norm_stderr": 0.014607220340597171 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41874128659629556, + "acc_stderr": 0.004923445627861518, + "acc_norm": 0.5596494722166899, + "acc_norm_stderr": 0.004954146286513353 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6462324393358876, + "acc_stderr": 0.017098184708161896, + "acc_norm": 0.6462324393358876, + "acc_norm_stderr": 0.017098184708161896 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368878, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368878 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936338, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936338 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6077170418006431, + "acc_stderr": 0.027731258647012, + "acc_norm": 0.6077170418006431, + "acc_norm_stderr": 0.027731258647012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465918, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465918 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.634453781512605, + "acc_stderr": 0.031282177063684614, + "acc_norm": 0.634453781512605, + "acc_norm_stderr": 0.031282177063684614 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5923076923076923, + "acc_stderr": 0.024915243985987857, + "acc_norm": 0.5923076923076923, + "acc_norm_stderr": 0.024915243985987857 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6096774193548387, + "acc_stderr": 0.027751256636969583, + "acc_norm": 0.6096774193548387, + "acc_norm_stderr": 0.027751256636969583 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.02685345037700914, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.02685345037700914 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43915343915343913, + "acc_stderr": 0.02555992055053101, + "acc_norm": 0.43915343915343913, + "acc_norm_stderr": 0.02555992055053101 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5347222222222222, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.5347222222222222, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6213872832369942, + "acc_stderr": 0.02611374936131034, + "acc_norm": 0.6213872832369942, + "acc_norm_stderr": 0.02611374936131034 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5895061728395061, + "acc_stderr": 0.027371350925124768, + "acc_norm": 0.5895061728395061, + "acc_norm_stderr": 0.027371350925124768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.03221024508041154, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.03221024508041154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6752293577981652, + "acc_stderr": 0.02007772910931033, + "acc_norm": 0.6752293577981652, + "acc_norm_stderr": 0.02007772910931033 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.0403356566784832, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.0403356566784832 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.020220920829626902, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.020220920829626902 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.02949482760014437, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.02949482760014437 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210744, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210744 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.74, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.74, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6244897959183674, + "acc_stderr": 0.031001209039894843, + "acc_norm": 0.6244897959183674, + "acc_norm_stderr": 0.031001209039894843 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7468354430379747, + "acc_stderr": 0.028304657943035307, + "acc_norm": 0.7468354430379747, + "acc_norm_stderr": 0.028304657943035307 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41916558018252936, + "acc_stderr": 0.012602244505788222, + "acc_norm": 0.41916558018252936, + "acc_norm_stderr": 0.012602244505788222 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.696078431372549, + "acc_stderr": 0.032282103870378914, + "acc_norm": 0.696078431372549, + "acc_norm_stderr": 0.032282103870378914 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.03646204963253812, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.03646204963253812 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32068543451652387, + "mc1_stderr": 0.0163391703732809, + "mc2": 0.49042655611434866, + "mc2_stderr": 0.016165929101778585 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5608028335301063, + "acc_stderr": 0.017062775744780705, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.017019847535972205 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/kiwi_solar_merge_ties", + "model_sha": "6112b7b551288ec773639693987d950a26f035db", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/otter2.1/result_2023-11-10 01:58:53.json b/genne/otter2.1/result_2023-11-10 01:58:53.json new file mode 100644 index 0000000000000000000000000000000000000000..db3d66ac799f279bbe938836fe67892ce6c3b103 --- /dev/null +++ b/genne/otter2.1/result_2023-11-10 01:58:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30119453924914674, + "acc_stderr": 0.013406741767847619, + "acc_norm": 0.34982935153583616, + "acc_norm_stderr": 0.013936809212158285 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3547102170882294, + "acc_stderr": 0.004774476498238616, + "acc_norm": 0.4565823541127266, + "acc_norm_stderr": 0.0049709334202319285 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3742690058479532, + "acc_stderr": 0.03711601185389481, + "acc_norm": 0.3742690058479532, + "acc_norm_stderr": 0.03711601185389481 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.04498676320572924, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.04498676320572924 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.35759897828863346, + "acc_stderr": 0.017139488998803284, + "acc_norm": 0.35759897828863346, + "acc_norm_stderr": 0.017139488998803284 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.029771642712491227, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.029771642712491227 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288086, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288086 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3311897106109325, + "acc_stderr": 0.026730620728004917, + "acc_norm": 0.3311897106109325, + "acc_norm_stderr": 0.026730620728004917 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.03427308652999934, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.03427308652999934 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.037245636197746325, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.037245636197746325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.029719142876342863, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.029719142876342863 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.0219169577092138, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.0219169577092138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.030712730070982592, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.030712730070982592 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.026226485652553883, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.026226485652553883 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3418803418803419, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.3418803418803419, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443866, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443866 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959912, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959912 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473834, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473834 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31343283582089554, + "acc_stderr": 0.032801882053486414, + "acc_norm": 0.31343283582089554, + "acc_norm_stderr": 0.032801882053486414 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.033450369167889904, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.033450369167889904 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106135, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.15, + "acc_stderr": 0.03588702812826371, + "acc_norm": 0.15, + "acc_norm_stderr": 0.03588702812826371 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.024946792225272314, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.024946792225272314 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.0360251131880677, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.0360251131880677 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30091743119266057, + "acc_stderr": 0.019664751366802114, + "acc_norm": 0.30091743119266057, + "acc_norm_stderr": 0.019664751366802114 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.027057974624494382, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.027057974624494382 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03583496176361061, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03583496176361061 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28594771241830064, + "acc_stderr": 0.01828048507295467, + "acc_norm": 0.28594771241830064, + "acc_norm_stderr": 0.01828048507295467 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.02668456434046099, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.02668456434046099 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.0449394906861354, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.0449394906861354 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605607, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605607 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468636, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468636 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254177, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254177 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.02599111767281329, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.02599111767281329 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.030964810588786716, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.030964810588786716 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27640156453715775, + "acc_stderr": 0.011422153194553582, + "acc_norm": 0.27640156453715775, + "acc_norm_stderr": 0.011422153194553582 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.03228210387037895, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.03228210387037895 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03681050869161548, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03681050869161548 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766373, + "mc2": 0.40833060750474154, + "mc2_stderr": 0.015549016246770386 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22668240850059032, + "acc_stderr": 0.014394701800505921, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.015453559655458278 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/otter2.1", + "model_sha": "8f65311a3b97d2625b09d8d9cba853e791a52faf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/otter3.1.3.0/result_2023-11-10 02:56:11.json b/genne/otter3.1.3.0/result_2023-11-10 02:56:11.json new file mode 100644 index 0000000000000000000000000000000000000000..d297a8b33fdb230478afbfd18be05376e4949dfb --- /dev/null +++ b/genne/otter3.1.3.0/result_2023-11-10 02:56:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19880546075085323, + "acc_stderr": 0.011662850198175534, + "acc_norm": 0.25, + "acc_norm_stderr": 0.012653835621466646 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3110934076877116, + "acc_stderr": 0.004619948037222912, + "acc_norm": 0.3563035251941844, + "acc_norm_stderr": 0.004779276329704028 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.021020672680827912, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.021020672680827912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501947, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501947 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139405, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139405 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.49959860935358086, + "mc2_stderr": 0.017034078108243818 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.014846044968252247, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.014846044968252247 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/otter3.1.3.0", + "model_sha": "93f7d6bc33181cf6e444a0695e15c4a8859511d0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/otter3.1.3/result_2023-11-10 01:36:13.json b/genne/otter3.1.3/result_2023-11-10 01:36:13.json new file mode 100644 index 0000000000000000000000000000000000000000..4164e886b263742151e3a376a7ed8365de58aa23 --- /dev/null +++ b/genne/otter3.1.3/result_2023-11-10 01:36:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34215017064846415, + "acc_stderr": 0.01386415215917728, + "acc_norm": 0.40102389078498296, + "acc_norm_stderr": 0.014322255790719867 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3975303724357698, + "acc_stderr": 0.004883871774350596, + "acc_norm": 0.522903804023103, + "acc_norm_stderr": 0.004984543540932336 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3524904214559387, + "acc_stderr": 0.01708415024408137, + "acc_norm": 0.3524904214559387, + "acc_norm_stderr": 0.01708415024408137 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3311897106109325, + "acc_stderr": 0.026730620728004917, + "acc_norm": 0.3311897106109325, + "acc_norm_stderr": 0.026730620728004917 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.031353050095330834, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.031353050095330834 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.32413793103448274, + "acc_stderr": 0.03900432069185553, + "acc_norm": 0.32413793103448274, + "acc_norm_stderr": 0.03900432069185553 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.037932811853078084, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.037932811853078084 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23949579831932774, + "acc_stderr": 0.027722065493361266, + "acc_norm": 0.23949579831932774, + "acc_norm_stderr": 0.027722065493361266 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2358974358974359, + "acc_stderr": 0.021525965407408726, + "acc_norm": 0.2358974358974359, + "acc_norm_stderr": 0.021525965407408726 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.03108982600293752, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.03108982600293752 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.02606936229533513, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02606936229533513 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.0311669573672359, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.0311669573672359 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252089, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252089 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959312, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959312 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.030769444967296024, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.030769444967296024 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.022019080012217904, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.022019080012217904 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.03309615177059006, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.03309615177059006 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.024105712607754307, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.024105712607754307 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33024691358024694, + "acc_stderr": 0.026168298456732846, + "acc_norm": 0.33024691358024694, + "acc_norm_stderr": 0.026168298456732846 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29015544041450775, + "acc_stderr": 0.03275264467791515, + "acc_norm": 0.29015544041450775, + "acc_norm_stderr": 0.03275264467791515 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30458715596330277, + "acc_stderr": 0.019732299420354038, + "acc_norm": 0.30458715596330277, + "acc_norm_stderr": 0.019732299420354038 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011745, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011745 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.025058503316958154, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.025058503316958154 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.018152871051538812, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.018152871051538812 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022128, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.18055555555555555, + "acc_stderr": 0.02623287897149166, + "acc_norm": 0.18055555555555555, + "acc_norm_stderr": 0.02623287897149166 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22794117647058823, + "acc_stderr": 0.025483081468029804, + "acc_norm": 0.22794117647058823, + "acc_norm_stderr": 0.025483081468029804 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22448979591836735, + "acc_stderr": 0.026711430555538415, + "acc_norm": 0.22448979591836735, + "acc_norm_stderr": 0.026711430555538415 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3037974683544304, + "acc_stderr": 0.029936696387138605, + "acc_norm": 0.3037974683544304, + "acc_norm_stderr": 0.029936696387138605 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29986962190352023, + "acc_stderr": 0.011702660860193975, + "acc_norm": 0.29986962190352023, + "acc_norm_stderr": 0.011702660860193975 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.03646204963253813, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.03646204963253813 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2215422276621787, + "mc1_stderr": 0.014537867601301139, + "mc2": 0.37108434422532566, + "mc2_stderr": 0.014734273077370515 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24321133412042503, + "acc_stderr": 0.014750068360453266, + "acc_norm": 0.3435655253837072, + "acc_norm_stderr": 0.016327334806429138 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/otter3.1.3", + "model_sha": "10886a4299e7060b16236abf0c743599af49ce1e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/otter3.1.4_7b/result_2023-11-12 23:54:42.json b/genne/otter3.1.4_7b/result_2023-11-12 23:54:42.json new file mode 100644 index 0000000000000000000000000000000000000000..3caeeb824d688956ac03fc11e10ca5627b9abfac --- /dev/null +++ b/genne/otter3.1.4_7b/result_2023-11-12 23:54:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2568259385665529, + "acc_stderr": 0.0127669237941168, + "acc_norm": 0.31569965870307165, + "acc_norm_stderr": 0.01358257109581529 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34554869547898825, + "acc_stderr": 0.0047457495387523176, + "acc_norm": 0.4391555467038439, + "acc_norm_stderr": 0.004952698802275645 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.36257309941520466, + "acc_stderr": 0.036871306155620606, + "acc_norm": 0.36257309941520466, + "acc_norm_stderr": 0.036871306155620606 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3767560664112388, + "acc_stderr": 0.01732829290730305, + "acc_norm": 0.3767560664112388, + "acc_norm_stderr": 0.01732829290730305 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.0357160923005348, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.0357160923005348 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.34726688102893893, + "acc_stderr": 0.027040745502307336, + "acc_norm": 0.34726688102893893, + "acc_norm_stderr": 0.027040745502307336 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3282442748091603, + "acc_stderr": 0.041184385658062976, + "acc_norm": 0.3282442748091603, + "acc_norm_stderr": 0.041184385658062976 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.029376616484945637, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.029376616484945637 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23949579831932774, + "acc_stderr": 0.027722065493361245, + "acc_norm": 0.23949579831932774, + "acc_norm_stderr": 0.027722065493361245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.258974358974359, + "acc_stderr": 0.022211106810061675, + "acc_norm": 0.258974358974359, + "acc_norm_stderr": 0.022211106810061675 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.030315099285617715, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.030315099285617715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.025649381063029254, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.025649381063029254 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36752136752136755, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.36752136752136755, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443866, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443866 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.024720713193952158, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.024720713193952158 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.032578473844367774, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.032578473844367774 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.36318407960199006, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.36318407960199006, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.022019080012217883, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.022019080012217883 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.03309615177059006, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.03309615177059006 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30346820809248554, + "acc_stderr": 0.024752411960917205, + "acc_norm": 0.30346820809248554, + "acc_norm_stderr": 0.024752411960917205 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.025702640260603753, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.025702640260603753 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.031618779179354115, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.031618779179354115 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28440366972477066, + "acc_stderr": 0.019342036587702578, + "acc_norm": 0.28440366972477066, + "acc_norm_stderr": 0.019342036587702578 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.12698412698412698, + "acc_stderr": 0.02978041752268843, + "acc_norm": 0.12698412698412698, + "acc_norm_stderr": 0.02978041752268843 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.02664327847450875, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.02664327847450875 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03583496176361063, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03583496176361063 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3022875816993464, + "acc_stderr": 0.018579232711113877, + "acc_norm": 0.3022875816993464, + "acc_norm_stderr": 0.018579232711113877 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.026491914727355143, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.026491914727355143 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.02533684856333237, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.02533684856333237 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.031052391937584353, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.031052391937584353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2588005215123859, + "acc_stderr": 0.011186109046564611, + "acc_norm": 0.2588005215123859, + "acc_norm_stderr": 0.011186109046564611 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923393, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923393 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520688, + "mc2": 0.5016118824314271, + "mc2_stderr": 0.016997655364904746 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2762691853600944, + "acc_stderr": 0.015373387500464469, + "acc_norm": 0.36835891381345925, + "acc_norm_stderr": 0.016583858982639074 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/otter3.1.4_7b", + "model_sha": "40cfc23e4b84a0cd42d11320942985c48f75ed56", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/otter3.1.4n_7B/result_2023-11-13 02:39:47.json b/genne/otter3.1.4n_7B/result_2023-11-13 02:39:47.json new file mode 100644 index 0000000000000000000000000000000000000000..fdb99f20c319c072d8816f43257d96eaba7c12bb --- /dev/null +++ b/genne/otter3.1.4n_7B/result_2023-11-13 02:39:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.01385583128749772, + "acc_norm": 0.4069965870307167, + "acc_norm_stderr": 0.01435639941800913 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39762995419239194, + "acc_stderr": 0.00488407975043389, + "acc_norm": 0.5258912567217686, + "acc_norm_stderr": 0.004983087049281747 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.34227330779054915, + "acc_stderr": 0.016967031766413624, + "acc_norm": 0.34227330779054915, + "acc_norm_stderr": 0.016967031766413624 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.035716092300534796, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.035716092300534796 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3729903536977492, + "acc_stderr": 0.027466610213140116, + "acc_norm": 0.3729903536977492, + "acc_norm_stderr": 0.027466610213140116 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34080717488789236, + "acc_stderr": 0.0318114974705536, + "acc_norm": 0.34080717488789236, + "acc_norm_stderr": 0.0318114974705536 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677697, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677697 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03274287914026868, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03274287914026868 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149353, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149353 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.027025433498882385, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.027025433498882385 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.22564102564102564, + "acc_stderr": 0.02119363252514854, + "acc_norm": 0.22564102564102564, + "acc_norm_stderr": 0.02119363252514854 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.03127090713297698, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.03127090713297698 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.02509189237885928, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.02509189237885928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.02815283794249386, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.02815283794249386 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2935323383084577, + "acc_stderr": 0.03220024104534205, + "acc_norm": 0.2935323383084577, + "acc_norm_stderr": 0.03220024104534205 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.024405173935783234, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.024405173935783234 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.025089478523765134, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.025089478523765134 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.031821550509166484, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.031821550509166484 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3229357798165138, + "acc_stderr": 0.020048115923415325, + "acc_norm": 0.3229357798165138, + "acc_norm_stderr": 0.020048115923415325 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020534, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020534 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4297520661157025, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275908, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275908 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590627, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590627 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.0420327729146776, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.0420327729146776 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.028139689444859676, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.028139689444859676 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02518778666022726, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02518778666022726 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2571428571428571, + "acc_stderr": 0.027979823538744543, + "acc_norm": 0.2571428571428571, + "acc_norm_stderr": 0.027979823538744543 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.37130801687763715, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.37130801687763715, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29791395045632335, + "acc_stderr": 0.011680717340400035, + "acc_norm": 0.29791395045632335, + "acc_norm_stderr": 0.011680717340400035 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.036462049632538115, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.036462049632538115 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731618, + "mc2": 0.38613822493621114, + "mc2_stderr": 0.014932812065930626 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22668240850059032, + "acc_stderr": 0.014394701800505885, + "acc_norm": 0.33766233766233766, + "acc_norm_stderr": 0.016259075784754964 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/otter3.1.4n_7B", + "model_sha": "d4c3b71520aa665560a6ebc06068d3b94da09dc1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/otter3.1.4n_7b/result_2023-11-12 23:45:56.json b/genne/otter3.1.4n_7b/result_2023-11-12 23:45:56.json new file mode 100644 index 0000000000000000000000000000000000000000..5ce235e363c49eb712ffd8cc50e3c6ae57aa37f5 --- /dev/null +++ b/genne/otter3.1.4n_7b/result_2023-11-12 23:45:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.01385583128749772, + "acc_norm": 0.4069965870307167, + "acc_norm_stderr": 0.01435639941800913 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39762995419239194, + "acc_stderr": 0.00488407975043389, + "acc_norm": 0.5259908384783908, + "acc_norm_stderr": 0.00498303542023571 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.34355044699872284, + "acc_stderr": 0.016982145632652466, + "acc_norm": 0.34355044699872284, + "acc_norm_stderr": 0.016982145632652466 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.035716092300534796, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.035716092300534796 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3729903536977492, + "acc_stderr": 0.027466610213140116, + "acc_norm": 0.3729903536977492, + "acc_norm_stderr": 0.027466610213140116 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34080717488789236, + "acc_stderr": 0.0318114974705536, + "acc_norm": 0.34080717488789236, + "acc_norm_stderr": 0.0318114974705536 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677697, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677697 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03274287914026868, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03274287914026868 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149353, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149353 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.027025433498882385, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.027025433498882385 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.22564102564102564, + "acc_stderr": 0.02119363252514854, + "acc_norm": 0.22564102564102564, + "acc_norm_stderr": 0.02119363252514854 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.03127090713297698, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.03127090713297698 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.02509189237885928, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.02509189237885928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.02815283794249386, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.02815283794249386 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2935323383084577, + "acc_stderr": 0.03220024104534205, + "acc_norm": 0.2935323383084577, + "acc_norm_stderr": 0.03220024104534205 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2861271676300578, + "acc_stderr": 0.02433214677913413, + "acc_norm": 0.2861271676300578, + "acc_norm_stderr": 0.02433214677913413 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.025089478523765134, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.025089478523765134 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.031821550509166484, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.031821550509166484 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3247706422018349, + "acc_stderr": 0.02007772910931032, + "acc_norm": 0.3247706422018349, + "acc_norm_stderr": 0.02007772910931032 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020534, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020534 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4297520661157025, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275908, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275908 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590627, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590627 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.0420327729146776, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.0420327729146776 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.028353212866863445, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.028353212866863445 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21691176470588236, + "acc_stderr": 0.02503584522771126, + "acc_norm": 0.21691176470588236, + "acc_norm_stderr": 0.02503584522771126 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.02783302387139968, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.02783302387139968 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.37130801687763715, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.37130801687763715, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2966101694915254, + "acc_stderr": 0.011665946586082847, + "acc_norm": 0.2966101694915254, + "acc_norm_stderr": 0.011665946586082847 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.036462049632538115, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.036462049632538115 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731618, + "mc2": 0.386120854227026, + "mc2_stderr": 0.014932757143046258 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22668240850059032, + "acc_stderr": 0.014394701800505885, + "acc_norm": 0.33766233766233766, + "acc_norm_stderr": 0.016259075784754964 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/otter3.1.4n_7b", + "model_sha": "d4c3b71520aa665560a6ebc06068d3b94da09dc1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/otter3.1.6n_13b/result_2023-11-20 00:19:47.json b/genne/otter3.1.6n_13b/result_2023-11-20 00:19:47.json new file mode 100644 index 0000000000000000000000000000000000000000..22834e63e5b47744a7e9b5d3fc509eb515f26157 --- /dev/null +++ b/genne/otter3.1.6n_13b/result_2023-11-20 00:19:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36689419795221845, + "acc_stderr": 0.014084133118104296, + "acc_norm": 0.4189419795221843, + "acc_norm_stderr": 0.01441810695363901 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39892451702848036, + "acc_stderr": 0.004886764243204049, + "acc_norm": 0.5270862378012349, + "acc_norm_stderr": 0.004982454383162064 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.04846748253977239, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.04846748253977239 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48659003831417624, + "acc_stderr": 0.017873531736510392, + "acc_norm": 0.48659003831417624, + "acc_norm_stderr": 0.017873531736510392 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400352, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400352 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956278, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956278 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4595959595959596, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.4595959595959596, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.35172413793103446, + "acc_stderr": 0.03979236637497411, + "acc_norm": 0.35172413793103446, + "acc_norm_stderr": 0.03979236637497411 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3739495798319328, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.3739495798319328, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.024756000382130945, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.024756000382130945 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3580645161290323, + "acc_stderr": 0.02727389059430065, + "acc_norm": 0.3580645161290323, + "acc_norm_stderr": 0.02727389059430065 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.594017094017094, + "acc_stderr": 0.03217180182641086, + "acc_norm": 0.594017094017094, + "acc_norm_stderr": 0.03217180182641086 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228416, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228416 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360383, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360383 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.035351400842767194, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.035351400842767194 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159665, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159665 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112126, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112126 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.430635838150289, + "acc_stderr": 0.026658800273672376, + "acc_norm": 0.430635838150289, + "acc_norm_stderr": 0.026658800273672376 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005138, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005138 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.03499807276193339, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.03499807276193339 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.038351539543994194, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.038351539543994194 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.02142140298254889, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.02142140298254889 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047182, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047182 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.027780141207023355, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.027780141207023355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.045190820213197744, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.045190820213197744 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.037610708698674805, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.037610708698674805 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.01962744474841224, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.01962744474841224 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503796, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503796 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005357, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005357 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.02736586113151381, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.02736586113151381 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2985658409387223, + "acc_stderr": 0.01168806014179422, + "acc_norm": 0.2985658409387223, + "acc_norm_stderr": 0.01168806014179422 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707687, + "mc2": 0.40503923056987007, + "mc2_stderr": 0.01620658438032362 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31641086186540734, + "acc_stderr": 0.015989617951065474, + "acc_norm": 0.3754427390791027, + "acc_norm_stderr": 0.01664841158951109 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/otter3.1.6n_13b", + "model_sha": "3b9d64a18215b8ef074d0aa79ad73bf273193068", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/genne/otter3.1.6n_13b_pre/result_2023-11-21 00:04:57.json b/genne/otter3.1.6n_13b_pre/result_2023-11-21 00:04:57.json new file mode 100644 index 0000000000000000000000000000000000000000..734b28d42bc4011f32b9196adfa9299c54d69753 --- /dev/null +++ b/genne/otter3.1.6n_13b_pre/result_2023-11-21 00:04:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4138225255972696, + "acc_stderr": 0.014392730009221007, + "acc_norm": 0.48293515358361777, + "acc_norm_stderr": 0.014602878388536598 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4402509460266879, + "acc_stderr": 0.004954026775425767, + "acc_norm": 0.5984863572993427, + "acc_norm_stderr": 0.004892026457294709 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5108556832694764, + "acc_stderr": 0.01787574884024241, + "acc_norm": 0.5108556832694764, + "acc_norm_stderr": 0.01787574884024241 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.028397944907806612, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.028397944907806612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792399, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792399 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714506, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714506 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03196876989195778, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03196876989195778 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.0236369759961018, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.0236369759961018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5174311926605505, + "acc_stderr": 0.021424291871853147, + "acc_norm": 0.5174311926605505, + "acc_norm_stderr": 0.021424291871853147 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523809, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523809 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618065, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618065 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.22685185185185186, + "acc_stderr": 0.028561650102422283, + "acc_norm": 0.22685185185185186, + "acc_norm_stderr": 0.028561650102422283 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.028418208619406794, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.028418208619406794 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.031722950043323296, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.031722950043323296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.011787910251664585, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.011787910251664585 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557961, + "mc2": 0.4153807034736448, + "mc2_stderr": 0.015135333189478775 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3105076741440378, + "acc_stderr": 0.01590800452876201, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.016943586313076568 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "genne/otter3.1.6n_13b_pre", + "model_sha": "e9ec62fda916f9254d7fe6afe2f55404599a7fa1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ghost-x/ghost-7b-alpha/result_2024-04-23 06:56:11.json b/ghost-x/ghost-7b-alpha/result_2024-04-23 06:56:11.json new file mode 100644 index 0000000000000000000000000000000000000000..dac9aa0533e6c0e0f216d58910d1a4ce8bda86aa --- /dev/null +++ b/ghost-x/ghost-7b-alpha/result_2024-04-23 06:56:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2525597269624573, + "acc_stderr": 0.012696728980207708, + "acc_norm": 0.30204778156996587, + "acc_norm_stderr": 0.013417519144716417 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3105954989046007, + "acc_stderr": 0.004617917316181452, + "acc_norm": 0.36207926707827126, + "acc_norm_stderr": 0.004796193584930071 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03615507630310935, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03615507630310935 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3300970873786408, + "acc_stderr": 0.04656147110012351, + "acc_norm": 0.3300970873786408, + "acc_norm_stderr": 0.04656147110012351 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3486590038314176, + "acc_stderr": 0.017041243143490932, + "acc_norm": 0.3486590038314176, + "acc_norm_stderr": 0.017041243143490932 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3021276595744681, + "acc_stderr": 0.030017554471880554, + "acc_norm": 0.3021276595744681, + "acc_norm_stderr": 0.030017554471880554 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3504823151125402, + "acc_stderr": 0.027098652621301754, + "acc_norm": 0.3504823151125402, + "acc_norm_stderr": 0.027098652621301754 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929188, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929188 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3282442748091603, + "acc_stderr": 0.041184385658062976, + "acc_norm": 0.3282442748091603, + "acc_norm_stderr": 0.041184385658062976 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.034273086529999344, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.034273086529999344 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36554621848739494, + "acc_stderr": 0.03128217706368461, + "acc_norm": 0.36554621848739494, + "acc_norm_stderr": 0.03128217706368461 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.32564102564102565, + "acc_stderr": 0.02375966576741229, + "acc_norm": 0.32564102564102565, + "acc_norm_stderr": 0.02375966576741229 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04750077341199986, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04750077341199986 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233486, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233486 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34838709677419355, + "acc_stderr": 0.027104826328100944, + "acc_norm": 0.34838709677419355, + "acc_norm_stderr": 0.027104826328100944 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.03271298896811159, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.03271298896811159 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.02761116340239972, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.02761116340239972 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252089, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252089 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.0349610148119118, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.0349610148119118 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.02397386199899207, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.02397386199899207 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3670520231213873, + "acc_stderr": 0.02595005433765407, + "acc_norm": 0.3670520231213873, + "acc_norm_stderr": 0.02595005433765407 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3487654320987654, + "acc_stderr": 0.02651759772446501, + "acc_norm": 0.3487654320987654, + "acc_norm_stderr": 0.02651759772446501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2849740932642487, + "acc_stderr": 0.0325771407770966, + "acc_norm": 0.2849740932642487, + "acc_norm_stderr": 0.0325771407770966 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3100917431192661, + "acc_stderr": 0.01983084968443975, + "acc_norm": 0.3100917431192661, + "acc_norm_stderr": 0.01983084968443975 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.02773283435336394, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.02773283435336394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4793388429752066, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.4793388429752066, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640766, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640766 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.01892608291608339, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.01892608291608339 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534795, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534795 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510923, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510923 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21691176470588236, + "acc_stderr": 0.02503584522771125, + "acc_norm": 0.21691176470588236, + "acc_norm_stderr": 0.02503584522771125 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.03198761546763125, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.03198761546763125 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4177215189873418, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.4177215189873418, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2894393741851369, + "acc_stderr": 0.01158265970221023, + "acc_norm": 0.2894393741851369, + "acc_norm_stderr": 0.01158265970221023 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.0313217980308329, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.0313217980308329 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.03192271569548299, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.03192271569548299 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219374, + "mc2": 0.4586426097870682, + "mc2_stderr": 0.016163062173112325 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.014846044968252254, + "acc_norm": 0.29988193624557263, + "acc_norm_stderr": 0.015753447615429454 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ghost-x/ghost-7b-alpha", + "model_sha": "6b721bb35bbf0d8d7c95d92ac92685b5a16c1609", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gihong99/Myrrh_solar_10.7b_1.0/result_2024-04-10 10:49:51.json b/gihong99/Myrrh_solar_10.7b_1.0/result_2024-04-10 10:49:51.json new file mode 100644 index 0000000000000000000000000000000000000000..6131bd497d8d3776241f837e73603ac96bc4c63c --- /dev/null +++ b/gihong99/Myrrh_solar_10.7b_1.0/result_2024-04-10 10:49:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.734641638225256, + "acc_stderr": 0.012902554762313966, + "acc_norm": 0.7764505119453925, + "acc_norm_stderr": 0.012174896631202605 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6904999004182434, + "acc_stderr": 0.004613427745209498, + "acc_norm": 0.795857398924517, + "acc_norm_stderr": 0.00402249921076073 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7572815533980582, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.7572815533980582, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6781609195402298, + "acc_stderr": 0.0167063814150579, + "acc_norm": 0.6781609195402298, + "acc_norm_stderr": 0.0167063814150579 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.02736807824397163, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.02736807824397163 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.672645739910314, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.672645739910314, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.031156269519646847, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.031156269519646847 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.031566630992154156, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.031566630992154156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6256410256410256, + "acc_stderr": 0.024537591572830496, + "acc_norm": 0.6256410256410256, + "acc_norm_stderr": 0.024537591572830496 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6193548387096774, + "acc_stderr": 0.027621717832907036, + "acc_norm": 0.6193548387096774, + "acc_norm_stderr": 0.027621717832907036 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8247863247863247, + "acc_stderr": 0.024904439098918214, + "acc_norm": 0.8247863247863247, + "acc_norm_stderr": 0.024904439098918214 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5924528301886792, + "acc_stderr": 0.030242233800854494, + "acc_norm": 0.5924528301886792, + "acc_norm_stderr": 0.030242233800854494 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.04653429807913507, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.04653429807913507 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465073, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465073 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.02563425811555496, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.02563425811555496 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.625, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.625, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6184971098265896, + "acc_stderr": 0.0261521986197268, + "acc_norm": 0.6184971098265896, + "acc_norm_stderr": 0.0261521986197268 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5828220858895705, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.5828220858895705, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6450617283950617, + "acc_stderr": 0.026624152478845853, + "acc_norm": 0.6450617283950617, + "acc_norm_stderr": 0.026624152478845853 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7321100917431193, + "acc_stderr": 0.018987462257978652, + "acc_norm": 0.7321100917431193, + "acc_norm_stderr": 0.018987462257978652 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5915032679738562, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.5915032679738562, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.618421052631579, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.618421052631579, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.019977422600227477, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.019977422600227477 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.02946218923337059, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.02946218923337059 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.034063153607115086, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.034063153607115086 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3016759776536313, + "acc_stderr": 0.015350767572220285, + "acc_norm": 0.3016759776536313, + "acc_norm_stderr": 0.015350767572220285 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.030320243265004137, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.030320243265004137 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6653061224489796, + "acc_stderr": 0.030209235226242304, + "acc_norm": 0.6653061224489796, + "acc_norm_stderr": 0.030209235226242304 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.029818024749753095, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.029818024749753095 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4517601043024772, + "acc_stderr": 0.012710662233660245, + "acc_norm": 0.4517601043024772, + "acc_norm_stderr": 0.012710662233660245 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.0332057461294543, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.0332057461294543 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7539779681762546, + "mc1_stderr": 0.015077219200662595, + "mc2": 0.81437767576557, + "mc2_stderr": 0.013311821841355344 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5076741440377804, + "acc_stderr": 0.017188329219654276, + "acc_norm": 0.5348288075560803, + "acc_norm_stderr": 0.017148598015747422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gihong99/Myrrh_solar_10.7b_1.0", + "model_sha": "8804dc71d5084371c9b1b4933822f80fda72123e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/giprime/OOM-13B_01/result_2024-02-05 03:34:05.json b/giprime/OOM-13B_01/result_2024-02-05 03:34:05.json new file mode 100644 index 0000000000000000000000000000000000000000..23f0bc04d4e3083581df6d81c15ce8aea216e852 --- /dev/null +++ b/giprime/OOM-13B_01/result_2024-02-05 03:34:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3796928327645051, + "acc_stderr": 0.014182119866974874, + "acc_norm": 0.447098976109215, + "acc_norm_stderr": 0.014529380160526842 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42332204740091617, + "acc_stderr": 0.004930757390897346, + "acc_norm": 0.5732921728739295, + "acc_norm_stderr": 0.004935882666250483 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5389527458492975, + "acc_stderr": 0.017825621793239006, + "acc_norm": 0.5389527458492975, + "acc_norm_stderr": 0.017825621793239006 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236784, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236784 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.028396770444111288, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.028396770444111288 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.03343577705583065, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.03343577705583065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.043171711948702556, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.043171711948702556 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38974358974358975, + "acc_stderr": 0.024726967886647074, + "acc_norm": 0.38974358974358975, + "acc_norm_stderr": 0.024726967886647074 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561077, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561077 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.030365050829115208, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.030365050829115208 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655816, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655816 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.03889066619112722, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.03889066619112722 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607715, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607715 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518754, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518754 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.02131133500970858, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.02131133500970858 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142635, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142635 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779205, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577457, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577457 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0286638201471995, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0286638201471995 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456053, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456053 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411955, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411955 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763128, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763128 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3239895697522816, + "acc_stderr": 0.011952840809646573, + "acc_norm": 0.3239895697522816, + "acc_norm_stderr": 0.011952840809646573 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.593939393939394, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.593939393939394, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752329, + "mc2": 0.3907945555137752, + "mc2_stderr": 0.01485923517857711 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3907910271546635, + "acc_stderr": 0.01677529846510826, + "acc_norm": 0.4817001180637544, + "acc_norm_stderr": 0.01717883663917775 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "giprime/OOM-13B_01", + "model_sha": "171ab542cd41b44ed2d6bd64fb8fad8e063756f0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/giprime/OOM-13B_02/result_2024-02-14 01:52:55.json b/giprime/OOM-13B_02/result_2024-02-14 01:52:55.json new file mode 100644 index 0000000000000000000000000000000000000000..3f8b5b412550c8e60151003a79c112095abcb2a2 --- /dev/null +++ b/giprime/OOM-13B_02/result_2024-02-14 01:52:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19965870307167236, + "acc_stderr": 0.01168162575688867, + "acc_norm": 0.26706484641638223, + "acc_norm_stderr": 0.012928933196496349 + }, + "harness|ko_hellaswag|10": { + "acc": 0.252141007767377, + "acc_stderr": 0.004333543083293472, + "acc_norm": 0.25124477195777734, + "acc_norm_stderr": 0.00432842570099869 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707682, + "mc2": 0.5028255959149182, + "mc2_stderr": 0.016711422343385334 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08264462809917356, + "acc_stderr": 0.00946653265971998, + "acc_norm": 0.4037780401416765, + "acc_norm_stderr": 0.01686903154029863 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "giprime/OOM-13B_02", + "model_sha": "1419bb32faa863c90d82e26ac37c5caeb2162c87", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/giprime/OOM-7B_01/result_2024-02-05 07:55:52.json b/giprime/OOM-7B_01/result_2024-02-05 07:55:52.json new file mode 100644 index 0000000000000000000000000000000000000000..e8997cca2a3e1636e7b581fec6a46d28b26ca85f --- /dev/null +++ b/giprime/OOM-7B_01/result_2024-02-05 07:55:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3370307167235495, + "acc_stderr": 0.013813476652902276, + "acc_norm": 0.4104095563139932, + "acc_norm_stderr": 0.014374922192642666 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38458474407488547, + "acc_stderr": 0.004855027248398164, + "acc_norm": 0.5021907986456882, + "acc_norm_stderr": 0.004989733513319115 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.03743979825926401, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.03743979825926401 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4763729246487867, + "acc_stderr": 0.017859989765176453, + "acc_norm": 0.4763729246487867, + "acc_norm_stderr": 0.017859989765176453 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42765273311897106, + "acc_stderr": 0.028099240775809567, + "acc_norm": 0.42765273311897106, + "acc_norm_stderr": 0.028099240775809567 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3838383838383838, + "acc_stderr": 0.03464881675016339, + "acc_norm": 0.3838383838383838, + "acc_norm_stderr": 0.03464881675016339 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.036186648199362445, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.036186648199362445 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.0316314580755238, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.0316314580755238 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.02453759157283052, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.02453759157283052 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04750077341199985, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04750077341199985 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.02779187875313227, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.02779187875313227 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5213675213675214, + "acc_stderr": 0.032726164476349545, + "acc_norm": 0.5213675213675214, + "acc_norm_stderr": 0.032726164476349545 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.029445175328199586, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.029445175328199586 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.03522865864099598, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.03522865864099598 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.38439306358381503, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.38439306358381503, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3950617283950617, + "acc_stderr": 0.027201117666925657, + "acc_norm": 0.3950617283950617, + "acc_norm_stderr": 0.027201117666925657 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442207, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442207 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41100917431192663, + "acc_stderr": 0.02109505068727765, + "acc_norm": 0.41100917431192663, + "acc_norm_stderr": 0.02109505068727765 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604672, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604672 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.027684181883302898, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.027684181883302898 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.019139943748487046, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.019139943748487046 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320203, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320203 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03054674526495316, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03054674526495316 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.31020408163265306, + "acc_stderr": 0.029613459872484378, + "acc_norm": 0.31020408163265306, + "acc_norm_stderr": 0.029613459872484378 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.03248197400511076, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.03248197400511076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2842242503259452, + "acc_stderr": 0.011519880596516078, + "acc_norm": 0.2842242503259452, + "acc_norm_stderr": 0.011519880596516078 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833343, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833343 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.037937131711656344, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.037937131711656344 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862665, + "mc2": 0.3870557388285068, + "mc2_stderr": 0.014691267682088107 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31995277449822906, + "acc_stderr": 0.016037153840280517, + "acc_norm": 0.41912632821723733, + "acc_norm_stderr": 0.01696399501086279 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "giprime/OOM-7B_01", + "model_sha": "2712f3f9deb5b7708401b233afdf8bd1ffde7c5d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/giprime/OOM-7B_02/result_2024-02-14 01:52:46.json b/giprime/OOM-7B_02/result_2024-02-14 01:52:46.json new file mode 100644 index 0000000000000000000000000000000000000000..14be7a6f41f4ce1a130e4b9675c6e61c65b6f731 --- /dev/null +++ b/giprime/OOM-7B_02/result_2024-02-14 01:52:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19795221843003413, + "acc_stderr": 0.011643990971573391, + "acc_norm": 0.26109215017064846, + "acc_norm_stderr": 0.012835523909473848 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2526389165504879, + "acc_stderr": 0.004336375492801786, + "acc_norm": 0.2517426807408883, + "acc_norm_stderr": 0.004331271717773863 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715014, + "mc2": 0.49773833195625355, + "mc2_stderr": 0.016694529553080546 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.07910271546635184, + "acc_stderr": 0.009279319126009069, + "acc_norm": 0.40731995277449823, + "acc_norm_stderr": 0.01689245669519127 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "giprime/OOM-7B_02", + "model_sha": "b4dbe04837742447ed64811b52ffb703efe1c095", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/giprime/OOM-SOLAR-10.7B_01/result_2024-02-19 05:42:43.json b/giprime/OOM-SOLAR-10.7B_01/result_2024-02-19 05:42:43.json new file mode 100644 index 0000000000000000000000000000000000000000..c7251e586ce70cca68f1d7bd0f21ad643ef58371 --- /dev/null +++ b/giprime/OOM-SOLAR-10.7B_01/result_2024-02-19 05:42:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20819112627986347, + "acc_stderr": 0.011864866118448066, + "acc_norm": 0.2627986348122867, + "acc_norm_stderr": 0.012862523175351333 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2673770165305716, + "acc_stderr": 0.00441686191910099, + "acc_norm": 0.28360884285998805, + "acc_norm_stderr": 0.004498280244494494 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386687, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386687 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.03484331592680588, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.03484331592680588 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.32286995515695066, + "acc_stderr": 0.031381476375754995, + "acc_norm": 0.32286995515695066, + "acc_norm_stderr": 0.031381476375754995 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.022421273612923714, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.022421273612923714 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.1724137931034483, + "acc_stderr": 0.026577672183036576, + "acc_norm": 0.1724137931034483, + "acc_norm_stderr": 0.026577672183036576 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1870967741935484, + "acc_stderr": 0.02218571009225225, + "acc_norm": 0.1870967741935484, + "acc_norm_stderr": 0.02218571009225225 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.29914529914529914, + "acc_stderr": 0.02999695185834948, + "acc_norm": 0.29914529914529914, + "acc_norm_stderr": 0.02999695185834948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2037735849056604, + "acc_stderr": 0.02479078450177541, + "acc_norm": 0.2037735849056604, + "acc_norm_stderr": 0.02479078450177541 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.16363636363636364, + "acc_stderr": 0.03543433054298678, + "acc_norm": 0.16363636363636364, + "acc_norm_stderr": 0.03543433054298678 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473835, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473835 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.03036049015401465, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.03036049015401465 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2147239263803681, + "acc_stderr": 0.032262193772867744, + "acc_norm": 0.2147239263803681, + "acc_norm_stderr": 0.032262193772867744 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.02869787397186068, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.02869787397186068 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.02367908986180772, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.02367908986180772 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25326797385620914, + "acc_stderr": 0.01759348689536683, + "acc_norm": 0.25326797385620914, + "acc_norm_stderr": 0.01759348689536683 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1574074074074074, + "acc_stderr": 0.024837173518242394, + "acc_norm": 0.1574074074074074, + "acc_norm_stderr": 0.024837173518242394 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.01437816988409842, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.01437816988409842 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1801470588235294, + "acc_stderr": 0.023345163616544862, + "acc_norm": 0.1801470588235294, + "acc_norm_stderr": 0.023345163616544862 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.025991117672813296, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.025991117672813296 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24771838331160365, + "acc_stderr": 0.011025499291443738, + "acc_norm": 0.24771838331160365, + "acc_norm_stderr": 0.011025499291443738 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350195, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326916, + "mc2": 0.48886745668609705, + "mc2_stderr": 0.016483226879092316 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2077922077922078, + "acc_stderr": 0.013949190397084033, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.017014038119297473 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "giprime/OOM-SOLAR-10.7B_01", + "model_sha": "814d2d143f23714e8c734b21f5b112e8730e73f3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/giprime/OOM-SOLAR-10.7B_02/result_2024-03-14 23:13:20.json b/giprime/OOM-SOLAR-10.7B_02/result_2024-03-14 23:13:20.json new file mode 100644 index 0000000000000000000000000000000000000000..db55d48dbcacf07237315ec5d8373c83b7a9d96a --- /dev/null +++ b/giprime/OOM-SOLAR-10.7B_02/result_2024-03-14 23:13:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.45051194539249145, + "acc_stderr": 0.014539646098471627, + "acc_norm": 0.507679180887372, + "acc_norm_stderr": 0.014609667440892581 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43756223859788884, + "acc_stderr": 0.004950723480149757, + "acc_norm": 0.5980880302728541, + "acc_norm_stderr": 0.004892823415546545 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6432748538011696, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.6432748538011696, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280041, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280041 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6832694763729247, + "acc_stderr": 0.016635566427712575, + "acc_norm": 0.6832694763729247, + "acc_norm_stderr": 0.016635566427712575 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305693, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305693 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.033456784227567773, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.033456784227567773 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.03244980849990028, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.03244980849990028 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5153846153846153, + "acc_stderr": 0.025339003010106494, + "acc_norm": 0.5153846153846153, + "acc_norm_stderr": 0.025339003010106494 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.567741935483871, + "acc_stderr": 0.028181739720019403, + "acc_norm": 0.567741935483871, + "acc_norm_stderr": 0.028181739720019403 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.02777883590493543, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.02777883590493543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.02925290592725198, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.02925290592725198 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556243, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556243 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5432098765432098, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.5432098765432098, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6683937823834197, + "acc_stderr": 0.03397636541089118, + "acc_norm": 0.6683937823834197, + "acc_norm_stderr": 0.03397636541089118 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6678899082568808, + "acc_stderr": 0.02019268298542335, + "acc_norm": 0.6678899082568808, + "acc_norm_stderr": 0.02019268298542335 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5359477124183006, + "acc_stderr": 0.02855582751652878, + "acc_norm": 0.5359477124183006, + "acc_norm_stderr": 0.02855582751652878 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874143, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874143 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.020071257886886528, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.020071257886886528 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614098, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614098 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249622, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249622 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6962025316455697, + "acc_stderr": 0.029936696387138605, + "acc_norm": 0.6962025316455697, + "acc_norm_stderr": 0.029936696387138605 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3513689700130378, + "acc_stderr": 0.01219296945748403, + "acc_norm": 0.3513689700130378, + "acc_norm_stderr": 0.01219296945748403 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.0343413116471913, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.0343413116471913 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22766217870257038, + "mc1_stderr": 0.014679255032111068, + "mc2": 0.38031942972924243, + "mc2_stderr": 0.014619776869307915 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5348288075560803, + "acc_stderr": 0.017148598015747422, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.016977101932601515 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "giprime/OOM-SOLAR-10.7B_02", + "model_sha": "280d03c88a620bbabef2b3f19fe8139f3f85f485", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gl2een/llama2-13b-instruct-full-fintune/result_2023-11-24 03:45:54.json b/gl2een/llama2-13b-instruct-full-fintune/result_2023-11-24 03:45:54.json new file mode 100644 index 0000000000000000000000000000000000000000..35ae03c89d0dc14271f64d69063462cefad1b772 --- /dev/null +++ b/gl2een/llama2-13b-instruct-full-fintune/result_2023-11-24 03:45:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37372013651877134, + "acc_stderr": 0.014137708601759091, + "acc_norm": 0.42235494880546076, + "acc_norm_stderr": 0.014434138713379991 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40360485958972314, + "acc_stderr": 0.004896173035943316, + "acc_norm": 0.5403306114319857, + "acc_norm_stderr": 0.004973522582431206 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.017862091778507855, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.017862091778507855 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894265, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894265 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36554621848739494, + "acc_stderr": 0.03128217706368461, + "acc_norm": 0.36554621848739494, + "acc_norm_stderr": 0.03128217706368461 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.02475600038213094, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.02475600038213094 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.028071588901091855, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.028071588901091855 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791923, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261114, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261114 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4421965317919075, + "acc_stderr": 0.026738603643807403, + "acc_norm": 0.4421965317919075, + "acc_norm_stderr": 0.026738603643807403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.03814269893261837, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.03814269893261837 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353928, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353928 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47339449541284406, + "acc_stderr": 0.021406952688151577, + "acc_norm": 0.47339449541284406, + "acc_norm_stderr": 0.021406952688151577 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848877, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848877 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3349673202614379, + "acc_stderr": 0.01909422816700032, + "acc_norm": 0.3349673202614379, + "acc_norm_stderr": 0.01909422816700032 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.0316746870682898, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.0316746870682898 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.0279715413701706, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.0279715413701706 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4472573839662447, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.4472573839662447, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2848761408083442, + "acc_stderr": 0.011527830846369012, + "acc_norm": 0.2848761408083442, + "acc_norm_stderr": 0.011527830846369012 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875833, + "mc2": 0.43105567225485036, + "mc2_stderr": 0.015011642206271197 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38961038961038963, + "acc_stderr": 0.0167661616718935, + "acc_norm": 0.45690672963400236, + "acc_norm_stderr": 0.017126389093086777 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gl2een/llama2-13b-instruct-full-fintune", + "model_sha": "2a6e2457ca85d5810b55f2e90b36637f2ed4e695", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gl2een/polyglot-ko-12.8b-instrcut-full-finetune2/result_2023-10-30 06:20:20.json b/gl2een/polyglot-ko-12.8b-instrcut-full-finetune2/result_2023-10-30 06:20:20.json new file mode 100644 index 0000000000000000000000000000000000000000..40e5cdacefe91d6195b658eb87ddb64d8cf112aa --- /dev/null +++ b/gl2een/polyglot-ko-12.8b-instrcut-full-finetune2/result_2023-10-30 06:20:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.013203196088537364, + "acc_norm": 0.33532423208191126, + "acc_norm_stderr": 0.013796182947785562 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38926508663612824, + "acc_stderr": 0.004865871290143343, + "acc_norm": 0.5028878709420435, + "acc_norm_stderr": 0.004989698183207819 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20561941251596424, + "acc_stderr": 0.014452500456785823, + "acc_norm": 0.20561941251596424, + "acc_norm_stderr": 0.014452500456785823 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.037125378336148665, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.037125378336148665 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838746, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838746 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.16265060240963855, + "acc_stderr": 0.028730237892613787, + "acc_norm": 0.16265060240963855, + "acc_norm_stderr": 0.028730237892613787 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.27009646302250806, + "acc_stderr": 0.025218040373410622, + "acc_norm": 0.27009646302250806, + "acc_norm_stderr": 0.025218040373410622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.09417040358744394, + "acc_stderr": 0.019602162350340513, + "acc_norm": 0.09417040358744394, + "acc_norm_stderr": 0.019602162350340513 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117317, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117317 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.37373737373737376, + "acc_stderr": 0.03446897738659333, + "acc_norm": 0.37373737373737376, + "acc_norm_stderr": 0.03446897738659333 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3153846153846154, + "acc_stderr": 0.02355964698318995, + "acc_norm": 0.3153846153846154, + "acc_norm_stderr": 0.02355964698318995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22167487684729065, + "acc_stderr": 0.029225575892489614, + "acc_norm": 0.22167487684729065, + "acc_norm_stderr": 0.029225575892489614 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.025560604721022895, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.025560604721022895 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02723601394619666, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02723601394619666 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.02815283794249386, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.02815283794249386 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721376, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721376 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844065, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321659, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321659 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2085889570552147, + "acc_stderr": 0.03192193448934726, + "acc_norm": 0.2085889570552147, + "acc_norm_stderr": 0.03192193448934726 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2191358024691358, + "acc_stderr": 0.023016705640262206, + "acc_norm": 0.2191358024691358, + "acc_norm_stderr": 0.023016705640262206 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3160621761658031, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.3160621761658031, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3412844036697248, + "acc_stderr": 0.020328612816592435, + "acc_norm": 0.3412844036697248, + "acc_norm_stderr": 0.020328612816592435 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.025738854797818737, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.025738854797818737 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036843, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036843 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.1652892561983471, + "acc_stderr": 0.033907806129727755, + "acc_norm": 0.1652892561983471, + "acc_norm_stderr": 0.033907806129727755 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.01728276069516742, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.01728276069516742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.02484792135806396, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.02484792135806396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.14285714285714285, + "acc_stderr": 0.033213611069662696, + "acc_norm": 0.14285714285714285, + "acc_norm_stderr": 0.033213611069662696 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036625, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036625 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3510204081632653, + "acc_stderr": 0.030555316755573637, + "acc_norm": 0.3510204081632653, + "acc_norm_stderr": 0.030555316755573637 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2109704641350211, + "acc_stderr": 0.02655837250266192, + "acc_norm": 0.2109704641350211, + "acc_norm_stderr": 0.02655837250266192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24445893089960888, + "acc_stderr": 0.010976425013113892, + "acc_norm": 0.24445893089960888, + "acc_norm_stderr": 0.010976425013113892 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02910225438967409, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02910225438967409 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.03192271569548298, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.03192271569548298 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.01510240479735965, + "mc2": 0.39935660614901936, + "mc2_stderr": 0.014712360794626336 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33293978748524206, + "acc_stderr": 0.01620243120837379, + "acc_norm": 0.44037780401416765, + "acc_norm_stderr": 0.01706769977431299 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gl2een/polyglot-ko-12.8b-instrcut-full-finetune2", + "model_sha": "ff5e25810aa9d6ca4bc65f7504dac285df05e907", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gwonny/KoSOLAR-10.7B-QLoRA-NEFTune-kolon-v0.1/result_2024-03-22 01:07:50.json b/gwonny/KoSOLAR-10.7B-QLoRA-NEFTune-kolon-v0.1/result_2024-03-22 01:07:50.json new file mode 100644 index 0000000000000000000000000000000000000000..b848886f4b1d3950ca77131a2cc5ffb605747d1f --- /dev/null +++ b/gwonny/KoSOLAR-10.7B-QLoRA-NEFTune-kolon-v0.1/result_2024-03-22 01:07:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3720136518771331, + "acc_stderr": 0.014124597881844461, + "acc_norm": 0.42406143344709896, + "acc_norm_stderr": 0.014441889627464392 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41854212308305117, + "acc_stderr": 0.00492311784974028, + "acc_norm": 0.5701055566620196, + "acc_norm_stderr": 0.004940490508240655 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.672514619883041, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.672514619883041, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.669220945083014, + "acc_stderr": 0.016824818462563746, + "acc_norm": 0.669220945083014, + "acc_norm_stderr": 0.016824818462563746 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5819935691318328, + "acc_stderr": 0.028013651891995076, + "acc_norm": 0.5819935691318328, + "acc_norm_stderr": 0.028013651891995076 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.547085201793722, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.547085201793722, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6919191919191919, + "acc_stderr": 0.032894773300986134, + "acc_norm": 0.6919191919191919, + "acc_norm_stderr": 0.032894773300986134 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.0323854694875898, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.0323854694875898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.02532399086173626, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.02532399086173626 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.0487831731214563, + "acc_norm": 0.62, + "acc_norm_stderr": 0.0487831731214563 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5580645161290323, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.5580645161290323, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.026853450377009144, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.026853450377009144 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6965174129353234, + "acc_stderr": 0.03251006816458619, + "acc_norm": 0.6965174129353234, + "acc_norm_stderr": 0.03251006816458619 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.02413015829976261, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.02413015829976261 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.558282208588957, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.558282208588957, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.027586006221607708, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.027586006221607708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6787564766839378, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.6787564766839378, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.020707458164352984, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.020707458164352984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5424836601307189, + "acc_stderr": 0.028526383452142638, + "acc_norm": 0.5424836601307189, + "acc_norm_stderr": 0.028526383452142638 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5592105263157895, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.5592105263157895, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.020217030653186446, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.020217030653186446 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.02866382014719949, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.02866382014719949 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.03409386946992699, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.03409386946992699 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.46691176470588236, + "acc_stderr": 0.03030625772246832, + "acc_norm": 0.46691176470588236, + "acc_norm_stderr": 0.03030625772246832 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.031996152328062875, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.031996152328062875 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.029818024749753095, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.029818024749753095 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37809647979139505, + "acc_stderr": 0.012384878406798097, + "acc_norm": 0.37809647979139505, + "acc_norm_stderr": 0.012384878406798097 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826809, + "mc2": 0.40326546289871124, + "mc2_stderr": 0.015108388302736994 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3612750885478158, + "acc_stderr": 0.01651546302241201, + "acc_norm": 0.448642266824085, + "acc_norm_stderr": 0.0170994305147258 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gwonny/KoSOLAR-10.7B-QLoRA-NEFTune-kolon-v0.1", + "model_sha": "901ba26134c663b6a6f97b7b5d2e012d9cb3bc39", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gwonny/KoSOLAR-10.7B-QLoRA-NEFTune-kolon-v2.0/result_2024-03-25 05:19:33.json b/gwonny/KoSOLAR-10.7B-QLoRA-NEFTune-kolon-v2.0/result_2024-03-25 05:19:33.json new file mode 100644 index 0000000000000000000000000000000000000000..7bd74cdd3a35a910a6e507fda16e61ce38efd1a5 --- /dev/null +++ b/gwonny/KoSOLAR-10.7B-QLoRA-NEFTune-kolon-v2.0/result_2024-03-25 05:19:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20819112627986347, + "acc_stderr": 0.011864866118448067, + "acc_norm": 0.27474402730375425, + "acc_norm_stderr": 0.013044617212771227 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33718382792272455, + "acc_stderr": 0.004717820714968747, + "acc_norm": 0.4195379406492731, + "acc_norm_stderr": 0.004924748500639348 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.565772669220945, + "acc_stderr": 0.01772458938967779, + "acc_norm": 0.565772669220945, + "acc_norm_stderr": 0.01772458938967779 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685517, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685517 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.032737667254591575, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.032737667254591575 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.02521731518484648, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.02521731518484648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962956, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.02905858830374884, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.02905858830374884 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.030656748696739428, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.030656748696739428 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02784081149587194, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02784081149587194 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.037894017602836484, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.037894017602836484 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307706, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307706 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651283, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651283 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.026882643434022885, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.026882643434022885 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.039015918258361856, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.039015918258361856 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5216049382716049, + "acc_stderr": 0.02779476010500873, + "acc_norm": 0.5216049382716049, + "acc_norm_stderr": 0.02779476010500873 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6424870466321243, + "acc_stderr": 0.03458816042181012, + "acc_norm": 0.6424870466321243, + "acc_norm_stderr": 0.03458816042181012 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322004, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322004 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5743119266055046, + "acc_stderr": 0.0211992359724708, + "acc_norm": 0.5743119266055046, + "acc_norm_stderr": 0.0211992359724708 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296559, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296559 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.01948802574552966, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.01948802574552966 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101366, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101366 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3307262569832402, + "acc_stderr": 0.01573502625896612, + "acc_norm": 0.3307262569832402, + "acc_norm_stderr": 0.01573502625896612 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121596, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.0320068202016391, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.0320068202016391 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.031450686007448596, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.031450686007448596 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2848761408083442, + "acc_stderr": 0.01152783084636902, + "acc_norm": 0.2848761408083442, + "acc_norm_stderr": 0.01152783084636902 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237269, + "mc2": 0.43970263150884004, + "mc2_stderr": 0.015283267129424875 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24203069657615112, + "acc_stderr": 0.014725696750525326, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.016272952997019124 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gwonny/KoSOLAR-10.7B-QLoRA-NEFTune-kolon-v2.0", + "model_sha": "5983ce2635316259a8255623a2b3a7a6ca8aa161", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gwonny/KoSOLAR-10.7B-kolon/result_2024-03-15 02:15:07.json b/gwonny/KoSOLAR-10.7B-kolon/result_2024-03-15 02:15:07.json new file mode 100644 index 0000000000000000000000000000000000000000..c1e5e6862b0f320f5caa301dd10f03626c5d7fad --- /dev/null +++ b/gwonny/KoSOLAR-10.7B-kolon/result_2024-03-15 02:15:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3720136518771331, + "acc_stderr": 0.014124597881844461, + "acc_norm": 0.4232081911262799, + "acc_norm_stderr": 0.014438036220848022 + }, + "harness|ko_hellaswag|10": { + "acc": 0.418442541326429, + "acc_stderr": 0.004922953651577687, + "acc_norm": 0.5702051384186417, + "acc_norm_stderr": 0.004940349676769328 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036155076303109365, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036155076303109365 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.669220945083014, + "acc_stderr": 0.016824818462563746, + "acc_norm": 0.669220945083014, + "acc_norm_stderr": 0.016824818462563746 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5787781350482315, + "acc_stderr": 0.028043399858210628, + "acc_norm": 0.5787781350482315, + "acc_norm_stderr": 0.028043399858210628 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6919191919191919, + "acc_stderr": 0.032894773300986134, + "acc_norm": 0.6919191919191919, + "acc_norm_stderr": 0.032894773300986134 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.0323854694875898, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.0323854694875898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.02532399086173626, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.02532399086173626 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.0487831731214563, + "acc_norm": 0.62, + "acc_norm_stderr": 0.0487831731214563 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5580645161290323, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.5580645161290323, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.026853450377009144, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.026853450377009144 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6965174129353234, + "acc_stderr": 0.03251006816458619, + "acc_norm": 0.6965174129353234, + "acc_norm_stderr": 0.03251006816458619 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.558282208588957, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.558282208588957, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.027586006221607708, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.027586006221607708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6839378238341969, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.6839378238341969, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.020707458164352984, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.020707458164352984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5424836601307189, + "acc_stderr": 0.028526383452142638, + "acc_norm": 0.5424836601307189, + "acc_norm_stderr": 0.028526383452142638 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.04046336883978249, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.04046336883978249 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.020217030653186446, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.020217030653186446 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347243, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347243 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103986, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103986 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.46691176470588236, + "acc_stderr": 0.03030625772246832, + "acc_norm": 0.46691176470588236, + "acc_norm_stderr": 0.03030625772246832 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.031996152328062875, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.031996152328062875 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.029818024749753095, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.029818024749753095 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38005215123859193, + "acc_stderr": 0.012397328205137809, + "acc_norm": 0.38005215123859193, + "acc_norm_stderr": 0.012397328205137809 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826809, + "mc2": 0.40323869513697297, + "mc2_stderr": 0.015107767257714202 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.358913813459268, + "acc_stderr": 0.016491802102999036, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.017090852631668332 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gwonny/KoSOLAR-10.7B-kolon", + "model_sha": "27fe16a49d99387596b83ab72b4b568a7d88c832", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gwonny/Llama-3-8B-kolon-ITD-5-v1.0/result_2024-04-23 04:39:44.json b/gwonny/Llama-3-8B-kolon-ITD-5-v1.0/result_2024-04-23 04:39:44.json new file mode 100644 index 0000000000000000000000000000000000000000..a91aa7c7919818c73029717eacfd49ca623cdb87 --- /dev/null +++ b/gwonny/Llama-3-8B-kolon-ITD-5-v1.0/result_2024-04-23 04:39:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37457337883959047, + "acc_stderr": 0.014144193471893456, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.014494421584256517 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38856801433977295, + "acc_stderr": 0.004864286176731828, + "acc_norm": 0.5236008763194583, + "acc_norm_stderr": 0.004984219681732658 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7281553398058253, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.7281553398058253, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.565772669220945, + "acc_stderr": 0.01772458938967779, + "acc_norm": 0.565772669220945, + "acc_norm_stderr": 0.01772458938967779 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542124, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542124 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.032662042990646775, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.032662042990646775 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5562700964630225, + "acc_stderr": 0.028217683556652308, + "acc_norm": 0.5562700964630225, + "acc_norm_stderr": 0.028217683556652308 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6818181818181818, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.6818181818181818, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929777, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5307692307692308, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.5307692307692308, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5387096774193548, + "acc_stderr": 0.028358634859836942, + "acc_norm": 0.5387096774193548, + "acc_norm_stderr": 0.028358634859836942 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392926, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392926 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.030770900763851302, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.030770900763851302 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.029869605095316904, + "acc_norm": 0.4, + "acc_norm_stderr": 0.029869605095316904 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054096, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054096 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562424, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5432098765432098, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.5432098765432098, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6424870466321243, + "acc_stderr": 0.034588160421810114, + "acc_norm": 0.6424870466321243, + "acc_norm_stderr": 0.034588160421810114 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5981651376146789, + "acc_stderr": 0.02102010617299701, + "acc_norm": 0.5981651376146789, + "acc_norm_stderr": 0.02102010617299701 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.028431095444176636, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.028431095444176636 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874144, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874144 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.020109864547181357, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.020109864547181357 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639882, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639882 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.034076320938540516, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.034076320938540516 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.015131608849963766, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.015131608849963766 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003483, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003483 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6835443037974683, + "acc_stderr": 0.03027497488021897, + "acc_norm": 0.6835443037974683, + "acc_norm_stderr": 0.03027497488021897 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3670143415906128, + "acc_stderr": 0.012310264244842136, + "acc_norm": 0.3670143415906128, + "acc_norm_stderr": 0.012310264244842136 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.0343413116471913, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.0343413116471913 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.01565960575532691, + "mc2": 0.4437004527611263, + "mc2_stderr": 0.015135652903384445 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4604486422668241, + "acc_stderr": 0.017136487626049846, + "acc_norm": 0.5962219598583235, + "acc_norm_stderr": 0.016869031540298632 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gwonny/Llama-3-8B-kolon-ITD-5-v1.0", + "model_sha": "44c77f133f69316ebf59aeb8e5324aed8c6b9bec", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gwonny/llama-2-koen-13b-QLoRA-NEFTune-kolon-v0.1/result_2024-03-22 01:07:28.json b/gwonny/llama-2-koen-13b-QLoRA-NEFTune-kolon-v0.1/result_2024-03-22 01:07:28.json new file mode 100644 index 0000000000000000000000000000000000000000..b0e3a796a36e20b61454b180918a4efcbde26522 --- /dev/null +++ b/gwonny/llama-2-koen-13b-QLoRA-NEFTune-kolon-v0.1/result_2024-03-22 01:07:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.014077223108470139, + "acc_norm": 0.4351535836177474, + "acc_norm_stderr": 0.01448798619718605 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41057558255327625, + "acc_stderr": 0.0049093289929150715, + "acc_norm": 0.5548695478988249, + "acc_norm_stderr": 0.004959645263390236 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5057471264367817, + "acc_stderr": 0.01787878232612923, + "acc_norm": 0.5057471264367817, + "acc_norm_stderr": 0.01787878232612923 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.028320325830105908, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.028320325830105908 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4393939393939394, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.4393939393939394, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.382051282051282, + "acc_stderr": 0.024635549163908223, + "acc_norm": 0.382051282051282, + "acc_norm_stderr": 0.024635549163908223 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.02779187875313227, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.02779187875313227 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.46766169154228854, + "acc_stderr": 0.03528131472933607, + "acc_norm": 0.46766169154228854, + "acc_norm_stderr": 0.03528131472933607 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.035506839891655796, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.035506839891655796 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.026483392042098177, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.026483392042098177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.037466683254700206, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.037466683254700206 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557673, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.03775205013583639, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.03775205013583639 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46422018348623856, + "acc_stderr": 0.021382364775701893, + "acc_norm": 0.46422018348623856, + "acc_norm_stderr": 0.021382364775701893 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283686, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283686 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775089, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775089 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.037385206761196686, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.037385206761196686 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.019333142020797063, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.019333142020797063 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602158, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602158 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29139504563233376, + "acc_stderr": 0.011605720214257612, + "acc_norm": 0.29139504563233376, + "acc_norm_stderr": 0.011605720214257612 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.033321399446680854, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.033321399446680854 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23011015911872704, + "mc1_stderr": 0.014734557959807763, + "mc2": 0.3801847748030492, + "mc2_stderr": 0.014700936791269574 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4085005903187721, + "acc_stderr": 0.016900062879427115, + "acc_norm": 0.5218417945690673, + "acc_norm_stderr": 0.01717394447429438 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gwonny/llama-2-koen-13b-QLoRA-NEFTune-kolon-v0.1", + "model_sha": "e12ff53ce8862efdccd30b302f3028d47e482880", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gwonny/llama-2-koen-13b-QLoRA-NEFTune-kolon/result_2024-03-22 01:06:08.json b/gwonny/llama-2-koen-13b-QLoRA-NEFTune-kolon/result_2024-03-22 01:06:08.json new file mode 100644 index 0000000000000000000000000000000000000000..afc79dc74dfe895c3228b1525521168ff239ebed --- /dev/null +++ b/gwonny/llama-2-koen-13b-QLoRA-NEFTune-kolon/result_2024-03-22 01:06:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.014077223108470139, + "acc_norm": 0.4351535836177474, + "acc_norm_stderr": 0.01448798619718605 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41057558255327625, + "acc_stderr": 0.0049093289929150715, + "acc_norm": 0.5548695478988249, + "acc_norm_stderr": 0.004959645263390236 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5057471264367817, + "acc_stderr": 0.01787878232612923, + "acc_norm": 0.5057471264367817, + "acc_norm_stderr": 0.01787878232612923 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.028320325830105908, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.028320325830105908 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4393939393939394, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.4393939393939394, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.382051282051282, + "acc_stderr": 0.024635549163908223, + "acc_norm": 0.382051282051282, + "acc_norm_stderr": 0.024635549163908223 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.02779187875313227, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.02779187875313227 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.46766169154228854, + "acc_stderr": 0.03528131472933607, + "acc_norm": 0.46766169154228854, + "acc_norm_stderr": 0.03528131472933607 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.035506839891655796, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.035506839891655796 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.026483392042098177, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.026483392042098177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.037466683254700206, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.037466683254700206 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557673, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.03775205013583639, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.03775205013583639 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46422018348623856, + "acc_stderr": 0.021382364775701893, + "acc_norm": 0.46422018348623856, + "acc_norm_stderr": 0.021382364775701893 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283686, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283686 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775089, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775089 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.037385206761196686, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.037385206761196686 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.019333142020797063, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.019333142020797063 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602158, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602158 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29139504563233376, + "acc_stderr": 0.011605720214257612, + "acc_norm": 0.29139504563233376, + "acc_norm_stderr": 0.011605720214257612 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.033321399446680854, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.033321399446680854 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23011015911872704, + "mc1_stderr": 0.014734557959807763, + "mc2": 0.3801847748030492, + "mc2_stderr": 0.014700936791269574 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4085005903187721, + "acc_stderr": 0.016900062879427115, + "acc_norm": 0.5218417945690673, + "acc_norm_stderr": 0.01717394447429438 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gwonny/llama-2-koen-13b-QLoRA-NEFTune-kolon", + "model_sha": "e12ff53ce8862efdccd30b302f3028d47e482880", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gwonny/llama-2-koen-13b-kolon/result_2024-03-11 04:39:08.json b/gwonny/llama-2-koen-13b-kolon/result_2024-03-11 04:39:08.json new file mode 100644 index 0000000000000000000000000000000000000000..7cb941cbdf58171ac6ea8aab254b2269df7da14a --- /dev/null +++ b/gwonny/llama-2-koen-13b-kolon/result_2024-03-11 04:39:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.014077223108470139, + "acc_norm": 0.43600682593856654, + "acc_norm_stderr": 0.014491225699230916 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41047600079665403, + "acc_stderr": 0.004909148239488281, + "acc_norm": 0.5548695478988249, + "acc_norm_stderr": 0.004959645263390236 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5070242656449553, + "acc_stderr": 0.017878199003432214, + "acc_norm": 0.5070242656449553, + "acc_norm_stderr": 0.017878199003432214 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4393939393939394, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.4393939393939394, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37948717948717947, + "acc_stderr": 0.024603626924097417, + "acc_norm": 0.37948717948717947, + "acc_norm_stderr": 0.024603626924097417 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3967741935483871, + "acc_stderr": 0.02783123160576795, + "acc_norm": 0.3967741935483871, + "acc_norm_stderr": 0.02783123160576795 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.46766169154228854, + "acc_stderr": 0.03528131472933607, + "acc_norm": 0.46766169154228854, + "acc_norm_stderr": 0.03528131472933607 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.026483392042098177, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.026483392042098177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.037466683254700206, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.037466683254700206 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557673, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.03775205013583639, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.03775205013583639 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46422018348623856, + "acc_stderr": 0.021382364775701893, + "acc_norm": 0.46422018348623856, + "acc_norm_stderr": 0.021382364775701893 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.027870745278290317, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.027870745278290317 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775089, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775089 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.037385206761196686, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.037385206761196686 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.01929196189506637, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.01929196189506637 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602158, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602158 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271808, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271808 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.033321399446680854, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.033321399446680854 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23011015911872704, + "mc1_stderr": 0.014734557959807763, + "mc2": 0.38024130663422817, + "mc2_stderr": 0.01470030189463877 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40968122786304606, + "acc_stderr": 0.01690756819221947, + "acc_norm": 0.5242030696576151, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gwonny/llama-2-koen-13b-kolon", + "model_sha": "838e67c00401eaedc0cca0041700bc5ba22d0b0b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v1.0/result_2024-04-02 00:08:42.json b/gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v1.0/result_2024-04-02 00:08:42.json new file mode 100644 index 0000000000000000000000000000000000000000..848a5de8b20adf42b697b63eed7715263c378641 --- /dev/null +++ b/gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v1.0/result_2024-04-02 00:08:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6979522184300341, + "acc_stderr": 0.013417519144716413, + "acc_norm": 0.7278156996587031, + "acc_norm_stderr": 0.013006600406423709 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4898426608245369, + "acc_stderr": 0.004988751698341145, + "acc_norm": 0.6473809998008365, + "acc_norm_stderr": 0.004768088918512184 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7192982456140351, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.7192982456140351, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6973180076628352, + "acc_stderr": 0.016428781581749367, + "acc_norm": 0.6973180076628352, + "acc_norm_stderr": 0.016428781581749367 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939101, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939101 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5106382978723404, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.5106382978723404, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6205787781350482, + "acc_stderr": 0.027559949802347824, + "acc_norm": 0.6205787781350482, + "acc_norm_stderr": 0.027559949802347824 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6367713004484304, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.6367713004484304, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7474747474747475, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.7474747474747475, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319617, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319617 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.031566630992154156, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.031566630992154156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6333333333333333, + "acc_stderr": 0.024433016466052466, + "acc_norm": 0.6333333333333333, + "acc_norm_stderr": 0.024433016466052466 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6290322580645161, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.6290322580645161, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8376068376068376, + "acc_stderr": 0.024161618127987745, + "acc_norm": 0.8376068376068376, + "acc_norm_stderr": 0.024161618127987745 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5886792452830188, + "acc_stderr": 0.0302850092590098, + "acc_norm": 0.5886792452830188, + "acc_norm_stderr": 0.0302850092590098 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.04554619617541053, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.04554619617541053 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37407407407407406, + "acc_stderr": 0.02950286112895529, + "acc_norm": 0.37407407407407406, + "acc_norm_stderr": 0.02950286112895529 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5953757225433526, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.5953757225433526, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4417989417989418, + "acc_stderr": 0.025576257061253833, + "acc_norm": 0.4417989417989418, + "acc_norm_stderr": 0.025576257061253833 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.026261677607806646, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.026261677607806646 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6503067484662577, + "acc_stderr": 0.03746668325470023, + "acc_norm": 0.6503067484662577, + "acc_norm_stderr": 0.03746668325470023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7467889908256881, + "acc_stderr": 0.018644073041375053, + "acc_norm": 0.7467889908256881, + "acc_norm_stderr": 0.018644073041375053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.027826109307283686, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.027826109307283686 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7768595041322314, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.7768595041322314, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6644736842105263, + "acc_stderr": 0.038424985593952674, + "acc_norm": 0.6644736842105263, + "acc_norm_stderr": 0.038424985593952674 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.02000791273935936, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.02000791273935936 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4432624113475177, + "acc_stderr": 0.029634838473766006, + "acc_norm": 0.4432624113475177, + "acc_norm_stderr": 0.029634838473766006 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.311731843575419, + "acc_stderr": 0.015491756531894637, + "acc_norm": 0.311731843575419, + "acc_norm_stderr": 0.015491756531894637 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.03004261583271487, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.03004261583271487 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.7061224489795919, + "acc_stderr": 0.029162738410249765, + "acc_norm": 0.7061224489795919, + "acc_norm_stderr": 0.029162738410249765 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6962025316455697, + "acc_stderr": 0.0299366963871386, + "acc_norm": 0.6962025316455697, + "acc_norm_stderr": 0.0299366963871386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44328552803129073, + "acc_stderr": 0.012687818419599917, + "acc_norm": 0.44328552803129073, + "acc_norm_stderr": 0.012687818419599917 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6230110159118727, + "mc1_stderr": 0.01696551757893035, + "mc2": 0.7315639922345355, + "mc2_stderr": 0.014069270046664476 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5336481700118064, + "acc_stderr": 0.017151384117131872, + "acc_norm": 0.5360094451003542, + "acc_norm_stderr": 0.017145715365486657 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v1.0", + "model_sha": "03318681c97eed53697cf18c54f014da4acf40e4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v2.1/result_2024-04-11 04:07:18.json b/gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v2.1/result_2024-04-11 04:07:18.json new file mode 100644 index 0000000000000000000000000000000000000000..4199a5a74fc0dca6a11d87b5aea4a933f99f1b37 --- /dev/null +++ b/gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v2.1/result_2024-04-11 04:07:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6919795221843004, + "acc_stderr": 0.01349142951729204, + "acc_norm": 0.7252559726962458, + "acc_norm_stderr": 0.013044617212771227 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4894443337980482, + "acc_stderr": 0.004988669343786964, + "acc_norm": 0.6469826727743477, + "acc_norm_stderr": 0.004769313300470236 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7134502923976608, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.7134502923976608, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7572815533980582, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.7572815533980582, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7011494252873564, + "acc_stderr": 0.01636925681509311, + "acc_norm": 0.7011494252873564, + "acc_norm_stderr": 0.01636925681509311 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.39, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.39, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5234042553191489, + "acc_stderr": 0.0326501947503358, + "acc_norm": 0.5234042553191489, + "acc_norm_stderr": 0.0326501947503358 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6237942122186495, + "acc_stderr": 0.027513925683549427, + "acc_norm": 0.6237942122186495, + "acc_norm_stderr": 0.027513925683549427 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6457399103139013, + "acc_stderr": 0.03210062154134987, + "acc_norm": 0.6457399103139013, + "acc_norm_stderr": 0.03210062154134987 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.03154449888270285, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.03154449888270285 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319617, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319617 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.03142946637883708, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.03142946637883708 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6205128205128205, + "acc_stderr": 0.024603626924097417, + "acc_norm": 0.6205128205128205, + "acc_norm_stderr": 0.024603626924097417 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.7, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.7, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.632258064516129, + "acc_stderr": 0.027430866579973467, + "acc_norm": 0.632258064516129, + "acc_norm_stderr": 0.027430866579973467 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8376068376068376, + "acc_stderr": 0.024161618127987745, + "acc_norm": 0.8376068376068376, + "acc_norm_stderr": 0.024161618127987745 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6075471698113207, + "acc_stderr": 0.03005258057955784, + "acc_norm": 0.6075471698113207, + "acc_norm_stderr": 0.03005258057955784 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6818181818181818, + "acc_stderr": 0.04461272175910509, + "acc_norm": 0.6818181818181818, + "acc_norm_stderr": 0.04461272175910509 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37407407407407406, + "acc_stderr": 0.02950286112895529, + "acc_norm": 0.37407407407407406, + "acc_norm_stderr": 0.02950286112895529 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.03170056183497309, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.03170056183497309 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.6011560693641619, + "acc_stderr": 0.0373362665538351, + "acc_norm": 0.6011560693641619, + "acc_norm_stderr": 0.0373362665538351 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.025487187147859372, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.025487187147859372 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.02626167760780665, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.02626167760780665 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6687116564417178, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.6687116564417178, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6728395061728395, + "acc_stderr": 0.026105673861409828, + "acc_norm": 0.6728395061728395, + "acc_norm_stderr": 0.026105673861409828 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.772020725388601, + "acc_stderr": 0.03027690994517826, + "acc_norm": 0.772020725388601, + "acc_norm_stderr": 0.03027690994517826 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.046774730044912, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.046774730044912 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7467889908256881, + "acc_stderr": 0.01864407304137505, + "acc_norm": 0.7467889908256881, + "acc_norm_stderr": 0.01864407304137505 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6143790849673203, + "acc_stderr": 0.027870745278290286, + "acc_norm": 0.6143790849673203, + "acc_norm_stderr": 0.027870745278290286 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.768595041322314, + "acc_stderr": 0.03849856098794088, + "acc_norm": 0.768595041322314, + "acc_norm_stderr": 0.03849856098794088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6644736842105263, + "acc_stderr": 0.038424985593952674, + "acc_norm": 0.6644736842105263, + "acc_norm_stderr": 0.038424985593952674 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.576797385620915, + "acc_stderr": 0.019987809769482067, + "acc_norm": 0.576797385620915, + "acc_norm_stderr": 0.019987809769482067 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.42907801418439717, + "acc_stderr": 0.02952591430255856, + "acc_norm": 0.42907801418439717, + "acc_norm_stderr": 0.02952591430255856 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875192, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875192 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.288268156424581, + "acc_stderr": 0.015149132860209436, + "acc_norm": 0.288268156424581, + "acc_norm_stderr": 0.015149132860209436 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5772058823529411, + "acc_stderr": 0.03000856284500348, + "acc_norm": 0.5772058823529411, + "acc_norm_stderr": 0.03000856284500348 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6979591836734694, + "acc_stderr": 0.0293936093198798, + "acc_norm": 0.6979591836734694, + "acc_norm_stderr": 0.0293936093198798 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6962025316455697, + "acc_stderr": 0.029936696387138605, + "acc_norm": 0.6962025316455697, + "acc_norm_stderr": 0.029936696387138605 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4380704041720991, + "acc_stderr": 0.012671902782567638, + "acc_norm": 0.4380704041720991, + "acc_norm_stderr": 0.012671902782567638 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6323529411764706, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.6323529411764706, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6254589963280294, + "mc1_stderr": 0.01694353512840532, + "mc2": 0.7304089453717603, + "mc2_stderr": 0.014079250684911783 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.538370720188902, + "acc_stderr": 0.01713966022184556, + "acc_norm": 0.5489964580873672, + "acc_norm_stderr": 0.017107618859549353 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v2.1", + "model_sha": "766b7c8fa66dc6048a2f5db3cdf2fa780bb9cc77", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v2.2/result_2024-04-11 00:38:30.json b/gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v2.2/result_2024-04-11 00:38:30.json new file mode 100644 index 0000000000000000000000000000000000000000..4e123d6dd7d805aed431659986913580e7a5d335 --- /dev/null +++ b/gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v2.2/result_2024-04-11 00:38:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6919795221843004, + "acc_stderr": 0.01349142951729204, + "acc_norm": 0.726962457337884, + "acc_norm_stderr": 0.013019332762635739 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4900418243377813, + "acc_stderr": 0.004988791687322868, + "acc_norm": 0.6432981477793268, + "acc_norm_stderr": 0.004780467270911781 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7309941520467836, + "acc_stderr": 0.0340105262010409, + "acc_norm": 0.7309941520467836, + "acc_norm_stderr": 0.0340105262010409 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7475728155339806, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.7475728155339806, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7049808429118773, + "acc_stderr": 0.016308363772932717, + "acc_norm": 0.7049808429118773, + "acc_norm_stderr": 0.016308363772932717 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5191489361702127, + "acc_stderr": 0.0326620429906468, + "acc_norm": 0.5191489361702127, + "acc_norm_stderr": 0.0326620429906468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.02760468902858199, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.02760468902858199 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6547085201793722, + "acc_stderr": 0.03191100192835795, + "acc_norm": 0.6547085201793722, + "acc_norm_stderr": 0.03191100192835795 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7474747474747475, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.7474747474747475, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.047240073523838876, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.047240073523838876 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.03142946637883708, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.03142946637883708 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.617948717948718, + "acc_stderr": 0.024635549163908237, + "acc_norm": 0.617948717948718, + "acc_norm_stderr": 0.024635549163908237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.046166311118017146, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.046166311118017146 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.03510766597959217, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.03510766597959217 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6290322580645161, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.6290322580645161, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8376068376068376, + "acc_stderr": 0.02416161812798774, + "acc_norm": 0.8376068376068376, + "acc_norm_stderr": 0.02416161812798774 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5886792452830188, + "acc_stderr": 0.030285009259009798, + "acc_norm": 0.5886792452830188, + "acc_norm_stderr": 0.030285009259009798 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.0449429086625209, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.0449429086625209 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3814814814814815, + "acc_stderr": 0.029616718927497596, + "acc_norm": 0.3814814814814815, + "acc_norm_stderr": 0.029616718927497596 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555402, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555402 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5838150289017341, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.5838150289017341, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43386243386243384, + "acc_stderr": 0.025525034382474884, + "acc_norm": 0.43386243386243384, + "acc_norm_stderr": 0.025525034382474884 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5694444444444444, + "acc_stderr": 0.04140685639111502, + "acc_norm": 0.5694444444444444, + "acc_norm_stderr": 0.04140685639111502 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.77, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6040462427745664, + "acc_stderr": 0.026329813341946243, + "acc_norm": 0.6040462427745664, + "acc_norm_stderr": 0.026329813341946243 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6257668711656442, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.6257668711656442, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.02622964917882116, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.02622964917882116 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7431192660550459, + "acc_stderr": 0.018732492928342444, + "acc_norm": 0.7431192660550459, + "acc_norm_stderr": 0.018732492928342444 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6143790849673203, + "acc_stderr": 0.027870745278290286, + "acc_norm": 0.6143790849673203, + "acc_norm_stderr": 0.027870745278290286 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7603305785123967, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.7603305785123967, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6710526315789473, + "acc_stderr": 0.03823428969926604, + "acc_norm": 0.6710526315789473, + "acc_norm_stderr": 0.03823428969926604 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.576797385620915, + "acc_stderr": 0.019987809769482067, + "acc_norm": 0.576797385620915, + "acc_norm_stderr": 0.019987809769482067 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.42907801418439717, + "acc_stderr": 0.02952591430255856, + "acc_norm": 0.42907801418439717, + "acc_norm_stderr": 0.02952591430255856 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30502793296089387, + "acc_stderr": 0.015398723510916716, + "acc_norm": 0.30502793296089387, + "acc_norm_stderr": 0.015398723510916716 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.030254372573976715, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.030254372573976715 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6816326530612244, + "acc_stderr": 0.029822533793982045, + "acc_norm": 0.6816326530612244, + "acc_norm_stderr": 0.029822533793982045 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6962025316455697, + "acc_stderr": 0.029936696387138605, + "acc_norm": 0.6962025316455697, + "acc_norm_stderr": 0.029936696387138605 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4361147327249022, + "acc_stderr": 0.012665568135455324, + "acc_norm": 0.4361147327249022, + "acc_norm_stderr": 0.012665568135455324 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6519607843137255, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.6519607843137255, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03756335775187897, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03756335775187897 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6364749082007344, + "mc1_stderr": 0.016838862883965838, + "mc2": 0.7376857561946708, + "mc2_stderr": 0.01403150889486167 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5147579693034239, + "acc_stderr": 0.017182864434998564, + "acc_norm": 0.5301062573789846, + "acc_norm_stderr": 0.017159163590170216 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gwonny/nox-solar-10.7b-v4-kolon-ITD-5-v2.2", + "model_sha": "4b0d8807344116c998e4c0c6d9f8ec8f6e0c2ef0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gwonny/nox-solar-10.7b-v4-kolon-all-10/result_2024-03-21 08:27:55.json b/gwonny/nox-solar-10.7b-v4-kolon-all-10/result_2024-03-21 08:27:55.json new file mode 100644 index 0000000000000000000000000000000000000000..d9140c2bf8d6fe350ce5a2387f12fea23ede07ce --- /dev/null +++ b/gwonny/nox-solar-10.7b-v4-kolon-all-10/result_2024-03-21 08:27:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6706484641638225, + "acc_stderr": 0.013734057652635476, + "acc_norm": 0.712457337883959, + "acc_norm_stderr": 0.013226719056266134 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46723760207130055, + "acc_stderr": 0.004979058078478701, + "acc_norm": 0.6167098187612029, + "acc_norm_stderr": 0.004851944170671266 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6679438058748404, + "acc_stderr": 0.016841174655295714, + "acc_norm": 0.6679438058748404, + "acc_norm_stderr": 0.016841174655295714 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4978723404255319, + "acc_stderr": 0.032685726586674915, + "acc_norm": 0.4978723404255319, + "acc_norm_stderr": 0.032685726586674915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.03892212195333045, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.03892212195333045 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305693, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305693 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5919282511210763, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.5919282511210763, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956909, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956909 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062947, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062947 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6076923076923076, + "acc_stderr": 0.02475600038213096, + "acc_norm": 0.6076923076923076, + "acc_norm_stderr": 0.02475600038213096 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04712821257426769, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04712821257426769 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.028040981380761533, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.028040981380761533 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392926, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392926 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5811320754716981, + "acc_stderr": 0.030365050829115208, + "acc_norm": 0.5811320754716981, + "acc_norm_stderr": 0.030365050829115208 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.02904560029061626, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.02904560029061626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.039580272311215706, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.039580272311215706 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.03809342081273957, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.03809342081273957 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.025305906241590632, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.025305906241590632 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6011560693641619, + "acc_stderr": 0.026362437574546545, + "acc_norm": 0.6011560693641619, + "acc_norm_stderr": 0.026362437574546545 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.558282208588957, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.558282208588957, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6080246913580247, + "acc_stderr": 0.02716368603827115, + "acc_norm": 0.6080246913580247, + "acc_norm_stderr": 0.02716368603827115 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.694300518134715, + "acc_stderr": 0.03324837939758159, + "acc_norm": 0.694300518134715, + "acc_norm_stderr": 0.03324837939758159 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.045144961328736334, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.045144961328736334 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7045871559633028, + "acc_stderr": 0.019560619182976, + "acc_norm": 0.7045871559633028, + "acc_norm_stderr": 0.019560619182976 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.02849199358617156, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.02849199358617156 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.511437908496732, + "acc_stderr": 0.02022254151561086, + "acc_norm": 0.511437908496732, + "acc_norm_stderr": 0.02022254151561086 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.42907801418439717, + "acc_stderr": 0.029525914302558562, + "acc_norm": 0.42907801418439717, + "acc_norm_stderr": 0.029525914302558562 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.03407632093854052, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.03407632093854052 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475347, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475347 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4963235294117647, + "acc_stderr": 0.030372015885428188, + "acc_norm": 0.4963235294117647, + "acc_norm_stderr": 0.030372015885428188 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5918367346938775, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.5918367346938775, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.679324894514768, + "acc_stderr": 0.030381931949990403, + "acc_norm": 0.679324894514768, + "acc_norm_stderr": 0.030381931949990403 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39113428943937417, + "acc_stderr": 0.012463861839982058, + "acc_norm": 0.39113428943937417, + "acc_norm_stderr": 0.012463861839982058 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03460228327239172, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03460228327239172 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6070991432068543, + "mc1_stderr": 0.017097248285233065, + "mc2": 0.7325043939362884, + "mc2_stderr": 0.014527259353765072 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45690672963400236, + "acc_stderr": 0.017126389093086784, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.01718506973267654 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gwonny/nox-solar-10.7b-v4-kolon-all-10", + "model_sha": "1668d880209b570df9bcad27c39f2f4d420c8c44", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gwonny/nox-solar-10.7b-v4-kolon-all-5-v2.0/result_2024-04-01 02:41:25.json b/gwonny/nox-solar-10.7b-v4-kolon-all-5-v2.0/result_2024-04-01 02:41:25.json new file mode 100644 index 0000000000000000000000000000000000000000..99d19b44a620a360ac5f141921396c6d7f9fe482 --- /dev/null +++ b/gwonny/nox-solar-10.7b-v4-kolon-all-5-v2.0/result_2024-04-01 02:41:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6962457337883959, + "acc_stderr": 0.013438909184778764, + "acc_norm": 0.7431740614334471, + "acc_norm_stderr": 0.0127669237941168 + }, + "harness|ko_hellaswag|10": { + "acc": 0.49372634933280224, + "acc_stderr": 0.004989388613438808, + "acc_norm": 0.6580362477594105, + "acc_norm_stderr": 0.004733980470799208 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7134502923976608, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.7134502923976608, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7164750957854407, + "acc_stderr": 0.016117318166832297, + "acc_norm": 0.7164750957854407, + "acc_norm_stderr": 0.016117318166832297 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5319148936170213, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.5319148936170213, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6366559485530546, + "acc_stderr": 0.027316847674192717, + "acc_norm": 0.6366559485530546, + "acc_norm_stderr": 0.027316847674192717 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6233183856502242, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.6233183856502242, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.042258754519696386, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.042258754519696386 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7626262626262627, + "acc_stderr": 0.030313710538198917, + "acc_norm": 0.7626262626262627, + "acc_norm_stderr": 0.030313710538198917 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006715, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006715 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.031041941304059288, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.031041941304059288 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.02450347255711092, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.02450347255711092 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.635483870967742, + "acc_stderr": 0.02737987122994325, + "acc_norm": 0.635483870967742, + "acc_norm_stderr": 0.02737987122994325 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8290598290598291, + "acc_stderr": 0.024662496845209807, + "acc_norm": 0.8290598290598291, + "acc_norm_stderr": 0.024662496845209807 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5886792452830188, + "acc_stderr": 0.030285009259009794, + "acc_norm": 0.5886792452830188, + "acc_norm_stderr": 0.030285009259009794 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.044942908662520896, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.044942908662520896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.02967090612463089, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.02967090612463089 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555402, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555402 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.03804749744364763, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.03804749744364763 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.0255428468174005, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.0255428468174005 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.8, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.8, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6242774566473989, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.6242774566473989, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.656441717791411, + "acc_stderr": 0.03731133519673893, + "acc_norm": 0.656441717791411, + "acc_norm_stderr": 0.03731133519673893 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6697530864197531, + "acc_stderr": 0.026168298456732846, + "acc_norm": 0.6697530864197531, + "acc_norm_stderr": 0.026168298456732846 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7772020725388601, + "acc_stderr": 0.03003114797764154, + "acc_norm": 0.7772020725388601, + "acc_norm_stderr": 0.03003114797764154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594964, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594964 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.744954128440367, + "acc_stderr": 0.018688500856535856, + "acc_norm": 0.744954128440367, + "acc_norm_stderr": 0.018688500856535856 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.02782610930728369, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.02782610930728369 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7768595041322314, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.7768595041322314, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5898692810457516, + "acc_stderr": 0.019898412717635892, + "acc_norm": 0.5898692810457516, + "acc_norm_stderr": 0.019898412717635892 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.42907801418439717, + "acc_stderr": 0.029525914302558562, + "acc_norm": 0.42907801418439717, + "acc_norm_stderr": 0.029525914302558562 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219588, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219588 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3139664804469274, + "acc_stderr": 0.01552192393352364, + "acc_norm": 0.3139664804469274, + "acc_norm_stderr": 0.01552192393352364 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.03016191193076711, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.03016191193076711 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6693877551020408, + "acc_stderr": 0.03011642629654062, + "acc_norm": 0.6693877551020408, + "acc_norm_stderr": 0.03011642629654062 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7130801687763713, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.7130801687763713, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44328552803129073, + "acc_stderr": 0.012687818419599916, + "acc_norm": 0.44328552803129073, + "acc_norm_stderr": 0.012687818419599916 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6519607843137255, + "acc_stderr": 0.033433112404884176, + "acc_norm": 0.6519607843137255, + "acc_norm_stderr": 0.033433112404884176 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.037131580674819135, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.037131580674819135 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6242350061199511, + "mc1_stderr": 0.016954584060214287, + "mc2": 0.7258525968878685, + "mc2_stderr": 0.014046002210969856 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5053128689492326, + "acc_stderr": 0.01718938362722971, + "acc_norm": 0.5336481700118064, + "acc_norm_stderr": 0.017151384117131872 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gwonny/nox-solar-10.7b-v4-kolon-all-5-v2.0", + "model_sha": "8a4753328508152fcf4982dfff007c63f2443181", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gwonny/nox-solar-10.7b-v4-kolon-all-5-v3.0/result_2024-03-29 05:10:49.json b/gwonny/nox-solar-10.7b-v4-kolon-all-5-v3.0/result_2024-03-29 05:10:49.json new file mode 100644 index 0000000000000000000000000000000000000000..8acfeaef805838e7c988ce68da23553439b101da --- /dev/null +++ b/gwonny/nox-solar-10.7b-v4-kolon-all-5-v3.0/result_2024-03-29 05:10:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6945392491467577, + "acc_stderr": 0.013460080478002507, + "acc_norm": 0.735494880546075, + "acc_norm_stderr": 0.012889272949313366 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4885480979884485, + "acc_stderr": 0.004988472459418033, + "acc_norm": 0.6522605058753237, + "acc_norm_stderr": 0.004752794829825048 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7017543859649122, + "acc_stderr": 0.03508771929824564, + "acc_norm": 0.7017543859649122, + "acc_norm_stderr": 0.03508771929824564 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7184466019417476, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.7184466019417476, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7037037037037037, + "acc_stderr": 0.016328814422102052, + "acc_norm": 0.7037037037037037, + "acc_norm_stderr": 0.016328814422102052 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5446808510638298, + "acc_stderr": 0.03255525359340356, + "acc_norm": 0.5446808510638298, + "acc_norm_stderr": 0.03255525359340356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.0274666102131401, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.0274666102131401 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6233183856502242, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.6233183856502242, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7525252525252525, + "acc_stderr": 0.03074630074212451, + "acc_norm": 0.7525252525252525, + "acc_norm_stderr": 0.03074630074212451 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6554621848739496, + "acc_stderr": 0.030868682604121626, + "acc_norm": 0.6554621848739496, + "acc_norm_stderr": 0.030868682604121626 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.024666744915187198, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.024666744915187198 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6548387096774193, + "acc_stderr": 0.027045746573534323, + "acc_norm": 0.6548387096774193, + "acc_norm_stderr": 0.027045746573534323 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8290598290598291, + "acc_stderr": 0.024662496845209807, + "acc_norm": 0.8290598290598291, + "acc_norm_stderr": 0.024662496845209807 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5886792452830188, + "acc_stderr": 0.030285009259009794, + "acc_norm": 0.5886792452830188, + "acc_norm_stderr": 0.030285009259009794 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465083, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465083 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.038073017265045125, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.038073017265045125 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41534391534391535, + "acc_stderr": 0.025379524910778398, + "acc_norm": 0.41534391534391535, + "acc_norm_stderr": 0.025379524910778398 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.041633319989322626, + "acc_norm": 0.78, + "acc_norm_stderr": 0.041633319989322626 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6271676300578035, + "acc_stderr": 0.02603389061357628, + "acc_norm": 0.6271676300578035, + "acc_norm_stderr": 0.02603389061357628 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6441717791411042, + "acc_stderr": 0.03761521380046735, + "acc_norm": 0.6441717791411042, + "acc_norm_stderr": 0.03761521380046735 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6728395061728395, + "acc_stderr": 0.026105673861409825, + "acc_norm": 0.6728395061728395, + "acc_norm_stderr": 0.026105673861409825 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.772020725388601, + "acc_stderr": 0.030276909945178274, + "acc_norm": 0.772020725388601, + "acc_norm_stderr": 0.030276909945178274 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583704, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583704 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7357798165137615, + "acc_stderr": 0.018904164171510213, + "acc_norm": 0.7357798165137615, + "acc_norm_stderr": 0.018904164171510213 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6143790849673203, + "acc_stderr": 0.02787074527829028, + "acc_norm": 0.6143790849673203, + "acc_norm_stderr": 0.02787074527829028 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7933884297520661, + "acc_stderr": 0.03695980128098824, + "acc_norm": 0.7933884297520661, + "acc_norm_stderr": 0.03695980128098824 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5816993464052288, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.5816993464052288, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41843971631205673, + "acc_stderr": 0.029427994039419994, + "acc_norm": 0.41843971631205673, + "acc_norm_stderr": 0.029427994039419994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29608938547486036, + "acc_stderr": 0.01526867731760228, + "acc_norm": 0.29608938547486036, + "acc_norm_stderr": 0.01526867731760228 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.0451260859854213, + "acc_norm": 0.72, + "acc_norm_stderr": 0.0451260859854213 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.030306257722468317, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.030306257722468317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6489795918367347, + "acc_stderr": 0.030555316755573644, + "acc_norm": 0.6489795918367347, + "acc_norm_stderr": 0.030555316755573644 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7046413502109705, + "acc_stderr": 0.02969633871342288, + "acc_norm": 0.7046413502109705, + "acc_norm_stderr": 0.02969633871342288 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.43546284224250326, + "acc_stderr": 0.012663412101248349, + "acc_norm": 0.43546284224250326, + "acc_norm_stderr": 0.012663412101248349 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.033540924375915174, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.033540924375915174 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6144430844553244, + "mc1_stderr": 0.017038839010591656, + "mc2": 0.7269460295023641, + "mc2_stderr": 0.014056040171216673 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5147579693034239, + "acc_stderr": 0.017182864434998567, + "acc_norm": 0.5301062573789846, + "acc_norm_stderr": 0.017159163590170216 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gwonny/nox-solar-10.7b-v4-kolon-all-5-v3.0", + "model_sha": "d648798a999095fbcb6c9998e5df3fdd82a02a7f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/gwonny/nox-solar-10.7b-v4-kolon-all-5/result_2024-03-21 07:09:05.json b/gwonny/nox-solar-10.7b-v4-kolon-all-5/result_2024-03-21 07:09:05.json new file mode 100644 index 0000000000000000000000000000000000000000..b2b94e7cd2d1ebcd234841c87870de72c81b5f1d --- /dev/null +++ b/gwonny/nox-solar-10.7b-v4-kolon-all-5/result_2024-03-21 07:09:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6783276450511946, + "acc_stderr": 0.013650488084494167, + "acc_norm": 0.7209897610921502, + "acc_norm_stderr": 0.01310678488360134 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4678350926110337, + "acc_stderr": 0.004979446038824757, + "acc_norm": 0.6189006174068911, + "acc_norm_stderr": 0.004846643735666532 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6608187134502924, + "acc_stderr": 0.036310534964889056, + "acc_norm": 0.6608187134502924, + "acc_norm_stderr": 0.036310534964889056 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.04498676320572922, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.04498676320572922 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6832694763729247, + "acc_stderr": 0.01663556642771259, + "acc_norm": 0.6832694763729247, + "acc_norm_stderr": 0.01663556642771259 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5148936170212766, + "acc_stderr": 0.032671518489247764, + "acc_norm": 0.5148936170212766, + "acc_norm_stderr": 0.032671518489247764 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.028173917761762906, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.028173917761762906 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5964125560538116, + "acc_stderr": 0.032928028193303135, + "acc_norm": 0.5964125560538116, + "acc_norm_stderr": 0.032928028193303135 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.03191178226713546, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.03191178226713546 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.047551296160629475, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.047551296160629475 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5966386554621849, + "acc_stderr": 0.031866081214088314, + "acc_norm": 0.5966386554621849, + "acc_norm_stderr": 0.031866081214088314 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6, + "acc_stderr": 0.02483881198803317, + "acc_norm": 0.6, + "acc_norm_stderr": 0.02483881198803317 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301812, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301812 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.02786932057166462, + "acc_norm": 0.6, + "acc_norm_stderr": 0.02786932057166462 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922758, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922758 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.030437794342983045, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.030437794342983045 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.02904560029061626, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.02904560029061626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555404, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555404 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.025305906241590632, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.025305906241590632 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.041633319989322626, + "acc_norm": 0.78, + "acc_norm_stderr": 0.041633319989322626 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6069364161849711, + "acc_stderr": 0.026296227915613674, + "acc_norm": 0.6069364161849711, + "acc_norm_stderr": 0.026296227915613674 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.588957055214724, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.588957055214724, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6358024691358025, + "acc_stderr": 0.02677492989972234, + "acc_norm": 0.6358024691358025, + "acc_norm_stderr": 0.02677492989972234 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.694300518134715, + "acc_stderr": 0.03324837939758159, + "acc_norm": 0.694300518134715, + "acc_norm_stderr": 0.03324837939758159 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.045796394220704355, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.045796394220704355 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6990825688073394, + "acc_stderr": 0.019664751366802114, + "acc_norm": 0.6990825688073394, + "acc_norm_stderr": 0.019664751366802114 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.028332397483664278, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.028332397483664278 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6052631578947368, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.6052631578947368, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.020203517280261436, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.020203517280261436 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41134751773049644, + "acc_stderr": 0.02935491115994099, + "acc_norm": 0.41134751773049644, + "acc_norm_stderr": 0.02935491115994099 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.01489339173524961, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.01489339173524961 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.7, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5, + "acc_stderr": 0.030372836961539352, + "acc_norm": 0.5, + "acc_norm_stderr": 0.030372836961539352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5959183673469388, + "acc_stderr": 0.031414708025865865, + "acc_norm": 0.5959183673469388, + "acc_norm_stderr": 0.031414708025865865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6835443037974683, + "acc_stderr": 0.030274974880218974, + "acc_norm": 0.6835443037974683, + "acc_norm_stderr": 0.030274974880218974 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3983050847457627, + "acc_stderr": 0.012503310565166228, + "acc_norm": 0.3983050847457627, + "acc_norm_stderr": 0.012503310565166228 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5931372549019608, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.5931372549019608, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.598531211750306, + "mc1_stderr": 0.017160273901693657, + "mc2": 0.7216874872395035, + "mc2_stderr": 0.014585843569251807 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4817001180637544, + "acc_stderr": 0.017178836639177752, + "acc_norm": 0.5171192443919717, + "acc_norm_stderr": 0.01718027524608563 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "gwonny/nox-solar-10.7b-v4-kolon-all-5", + "model_sha": "7740ff37d393bf9afdc30d2109da530506590091", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/haes95/POLAR-10.7B-HES-DPO-v0.1/result_2024-05-29 06:48:14.json b/haes95/POLAR-10.7B-HES-DPO-v0.1/result_2024-05-29 06:48:14.json new file mode 100644 index 0000000000000000000000000000000000000000..3b57af8a296fd59148cf215eef66a992da135f71 --- /dev/null +++ b/haes95/POLAR-10.7B-HES-DPO-v0.1/result_2024-05-29 06:48:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7568259385665529, + "acc_stderr": 0.012536554144587087, + "acc_norm": 0.7866894197952219, + "acc_norm_stderr": 0.011970971742326334 + }, + "harness|ko_hellaswag|10": { + "acc": 0.7290380402310297, + "acc_stderr": 0.004435481515909404, + "acc_norm": 0.8115913164708225, + "acc_norm_stderr": 0.003902389997738998 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7251461988304093, + "acc_stderr": 0.034240429246915824, + "acc_norm": 0.7251461988304093, + "acc_norm_stderr": 0.034240429246915824 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7572815533980582, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.7572815533980582, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.70242656449553, + "acc_stderr": 0.016349111912909425, + "acc_norm": 0.70242656449553, + "acc_norm_stderr": 0.016349111912909425 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4978723404255319, + "acc_stderr": 0.03268572658667491, + "acc_norm": 0.4978723404255319, + "acc_norm_stderr": 0.03268572658667491 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.038913644958358196, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.038913644958358196 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.02736807824397163, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.02736807824397163 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6681614349775785, + "acc_stderr": 0.031602951437766785, + "acc_norm": 0.6681614349775785, + "acc_norm_stderr": 0.031602951437766785 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.03154449888270286, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.03154449888270286 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.047240073523838876, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.047240073523838876 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6307692307692307, + "acc_stderr": 0.02446861524147893, + "acc_norm": 0.6307692307692307, + "acc_norm_stderr": 0.02446861524147893 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.635483870967742, + "acc_stderr": 0.027379871229943245, + "acc_norm": 0.635483870967742, + "acc_norm_stderr": 0.027379871229943245 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.025372139671722933, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.025372139671722933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5811320754716981, + "acc_stderr": 0.03036505082911521, + "acc_norm": 0.5811320754716981, + "acc_norm_stderr": 0.03036505082911521 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4576719576719577, + "acc_stderr": 0.025658868862058322, + "acc_norm": 0.4576719576719577, + "acc_norm_stderr": 0.025658868862058322 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5902777777777778, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.5902777777777778, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6040462427745664, + "acc_stderr": 0.02632981334194625, + "acc_norm": 0.6040462427745664, + "acc_norm_stderr": 0.02632981334194625 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.588957055214724, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.588957055214724, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6604938271604939, + "acc_stderr": 0.026348564412011624, + "acc_norm": 0.6604938271604939, + "acc_norm_stderr": 0.026348564412011624 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4824561403508772, + "acc_stderr": 0.04700708033551038, + "acc_norm": 0.4824561403508772, + "acc_norm_stderr": 0.04700708033551038 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.728440366972477, + "acc_stderr": 0.019069098363191452, + "acc_norm": 0.728440366972477, + "acc_norm_stderr": 0.019069098363191452 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6437908496732027, + "acc_stderr": 0.027420477662629235, + "acc_norm": 0.6437908496732027, + "acc_norm_stderr": 0.027420477662629235 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6447368421052632, + "acc_stderr": 0.03894734487013317, + "acc_norm": 0.6447368421052632, + "acc_norm_stderr": 0.03894734487013317 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.01994491413687359, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.01994491413687359 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4397163120567376, + "acc_stderr": 0.02960991207559411, + "acc_norm": 0.4397163120567376, + "acc_norm_stderr": 0.02960991207559411 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.358659217877095, + "acc_stderr": 0.01604045442616448, + "acc_norm": 0.358659217877095, + "acc_norm_stderr": 0.01604045442616448 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5220588235294118, + "acc_stderr": 0.030343264224213528, + "acc_norm": 0.5220588235294118, + "acc_norm_stderr": 0.030343264224213528 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6408163265306123, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.6408163265306123, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7341772151898734, + "acc_stderr": 0.028756799629658335, + "acc_norm": 0.7341772151898734, + "acc_norm_stderr": 0.028756799629658335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4452411994784876, + "acc_stderr": 0.012693421303973294, + "acc_norm": 0.4452411994784876, + "acc_norm_stderr": 0.012693421303973294 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.03646204963253812, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.03646204963253812 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7539779681762546, + "mc1_stderr": 0.0150772192006626, + "mc2": 0.8274430024876431, + "mc2_stderr": 0.012970293529776238 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4781582054309327, + "acc_stderr": 0.017173944474294378, + "acc_norm": 0.4982290436835891, + "acc_norm_stderr": 0.017190246276231863 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "haes95/POLAR-10.7B-HES-DPO-v0.1", + "model_sha": "5189a0cbc5f7e19cab9f1d875a850afe481f5ccc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/haes95/cdlm-7-ko-nl2sql-v1.0/result_2024-04-18 23:46:09.json b/haes95/cdlm-7-ko-nl2sql-v1.0/result_2024-04-18 23:46:09.json new file mode 100644 index 0000000000000000000000000000000000000000..c9ac5513002fb4494e6e8b93ad25585df82b66de --- /dev/null +++ b/haes95/cdlm-7-ko-nl2sql-v1.0/result_2024-04-18 23:46:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.24914675767918087, + "acc_stderr": 0.012639407111926432, + "acc_norm": 0.3054607508532423, + "acc_norm_stderr": 0.013460080478002501 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32085241983668594, + "acc_stderr": 0.004658501662277613, + "acc_norm": 0.3899621589324836, + "acc_norm_stderr": 0.00486744594527715 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.035650796707083106, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.035650796707083106 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.33980582524271846, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.33980582524271846, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.016543785026048315, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.016543785026048315 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800254, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800254 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231008, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231008 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36977491961414793, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.36977491961414793, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.273542600896861, + "acc_stderr": 0.02991858670779883, + "acc_norm": 0.273542600896861, + "acc_norm_stderr": 0.02991858670779883 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.04039314978724561, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.04039314978724561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3787878787878788, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.3787878787878788, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993177, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.33613445378151263, + "acc_stderr": 0.03068473711513536, + "acc_norm": 0.33613445378151263, + "acc_norm_stderr": 0.03068473711513536 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31025641025641026, + "acc_stderr": 0.023454674889404295, + "acc_norm": 0.31025641025641026, + "acc_norm_stderr": 0.023454674889404295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.031947400722655395, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.031947400722655395 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.33548387096774196, + "acc_stderr": 0.026860206444724342, + "acc_norm": 0.33548387096774196, + "acc_norm_stderr": 0.026860206444724342 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5598290598290598, + "acc_stderr": 0.0325207417206305, + "acc_norm": 0.5598290598290598, + "acc_norm_stderr": 0.0325207417206305 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30566037735849055, + "acc_stderr": 0.02835329807332267, + "acc_norm": 0.30566037735849055, + "acc_norm_stderr": 0.02835329807332267 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.046313813194254635, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.046313813194254635 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3681592039800995, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.3681592039800995, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.02397386199899208, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.02397386199899208 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3208092485549133, + "acc_stderr": 0.0251310002336479, + "acc_norm": 0.3208092485549133, + "acc_norm_stderr": 0.0251310002336479 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3487654320987654, + "acc_stderr": 0.02651759772446501, + "acc_norm": 0.3487654320987654, + "acc_norm_stderr": 0.02651759772446501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720685, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720685 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.031618779179354094, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.031618779179354094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3247706422018349, + "acc_stderr": 0.02007772910931032, + "acc_norm": 0.3247706422018349, + "acc_norm_stderr": 0.02007772910931032 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238106, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238106 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.027245613047215362, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.027245613047215362 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.48760330578512395, + "acc_stderr": 0.045629515481807666, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.045629515481807666 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403165, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403165 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2826797385620915, + "acc_stderr": 0.018217269552053442, + "acc_norm": 0.2826797385620915, + "acc_norm_stderr": 0.018217269552053442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510927, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510927 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2346368715083799, + "acc_stderr": 0.014173044098303672, + "acc_norm": 0.2346368715083799, + "acc_norm_stderr": 0.014173044098303672 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.027472274473233818, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.027472274473233818 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.031067211262872478, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.031067211262872478 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3755274261603376, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.3755274261603376, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26988265971316816, + "acc_stderr": 0.0113373810842504, + "acc_norm": 0.26988265971316816, + "acc_norm_stderr": 0.0113373810842504 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268048, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268048 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.01582614243950234, + "mc2": 0.46230445157252886, + "mc2_stderr": 0.015566152572159462 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2502951593860685, + "acc_stderr": 0.014893137573316869, + "acc_norm": 0.3317591499409681, + "acc_norm_stderr": 0.016187984642157316 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "haes95/cdlm-7-ko-nl2sql-v1.0", + "model_sha": "7b0a4f9a8a534c288e7f749c74ef2b1d4738fcb3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/haoranxu/ALMA-13B-Pretrain/result_2024-05-15 16:36:20.json b/haoranxu/ALMA-13B-Pretrain/result_2024-05-15 16:36:20.json new file mode 100644 index 0000000000000000000000000000000000000000..f5e6b1c99ae3ac486fb536bf129b20c46dc08699 --- /dev/null +++ b/haoranxu/ALMA-13B-Pretrain/result_2024-05-15 16:36:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28924914675767915, + "acc_stderr": 0.013250012579393441, + "acc_norm": 0.3174061433447099, + "acc_norm_stderr": 0.01360223908803817 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3348934475204143, + "acc_stderr": 0.004709886644157089, + "acc_norm": 0.41435968930491934, + "acc_norm_stderr": 0.0049160438384556636 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161549, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161549 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.384418901660281, + "acc_stderr": 0.01739568874281962, + "acc_norm": 0.384418901660281, + "acc_norm_stderr": 0.01739568874281962 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.039446241625011175, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.039446241625011175 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370519, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370519 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3729903536977492, + "acc_stderr": 0.027466610213140116, + "acc_norm": 0.3729903536977492, + "acc_norm_stderr": 0.027466610213140116 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35858585858585856, + "acc_stderr": 0.0341690364039152, + "acc_norm": 0.35858585858585856, + "acc_norm_stderr": 0.0341690364039152 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.35172413793103446, + "acc_stderr": 0.0397923663749741, + "acc_norm": 0.35172413793103446, + "acc_norm_stderr": 0.0397923663749741 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.031041941304059288, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.031041941304059288 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34102564102564104, + "acc_stderr": 0.02403548967633505, + "acc_norm": 0.34102564102564104, + "acc_norm_stderr": 0.02403548967633505 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.0466840803302493, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.0466840803302493 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.027666182075539628, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.027666182075539628 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.032745319388423504, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.032745319388423504 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3132075471698113, + "acc_stderr": 0.02854479331905533, + "acc_norm": 0.3132075471698113, + "acc_norm_stderr": 0.02854479331905533 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670238, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670238 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.46766169154228854, + "acc_stderr": 0.03528131472933608, + "acc_norm": 0.46766169154228854, + "acc_norm_stderr": 0.03528131472933608 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.035149425512674394, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.035149425512674394 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101813, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101813 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.025816756791584183, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.025816756791584183 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924034, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924034 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02712511551316686, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02712511551316686 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32642487046632124, + "acc_stderr": 0.03384028621143295, + "acc_norm": 0.32642487046632124, + "acc_norm_stderr": 0.03384028621143295 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3119266055045872, + "acc_stderr": 0.019862967976707245, + "acc_norm": 0.3119266055045872, + "acc_norm_stderr": 0.019862967976707245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102148, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102148 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.027121956071388856, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.027121956071388856 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.48760330578512395, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.03823428969926605, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.03823428969926605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28594771241830064, + "acc_stderr": 0.01828048507295467, + "acc_norm": 0.28594771241830064, + "acc_norm_stderr": 0.01828048507295467 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460976, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460976 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.040073418097558065, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.040073418097558065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.027033041151681456, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.027033041151681456 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3469387755102041, + "acc_stderr": 0.03047252602672649, + "acc_norm": 0.3469387755102041, + "acc_norm_stderr": 0.03047252602672649 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.32489451476793246, + "acc_stderr": 0.03048603938910531, + "acc_norm": 0.32489451476793246, + "acc_norm_stderr": 0.03048603938910531 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2588005215123859, + "acc_stderr": 0.011186109046564608, + "acc_norm": 0.2588005215123859, + "acc_norm_stderr": 0.011186109046564608 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.032702871814820796, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.032702871814820796 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.03608541011573967, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.03608541011573967 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4376394343341208, + "mc2_stderr": 0.015504984254453123 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2845336481700118, + "acc_stderr": 0.01551230165497177, + "acc_norm": 0.41440377804014167, + "acc_norm_stderr": 0.01693658338394363 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "haoranxu/ALMA-13B-Pretrain", + "model_sha": "b69ebad694274b929cfcf3db29dd7bb93d752e39", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/haoranxu/ALMA-13B-R/result_2024-05-16 16:06:11.json b/haoranxu/ALMA-13B-R/result_2024-05-16 16:06:11.json new file mode 100644 index 0000000000000000000000000000000000000000..90b06bbbb236811f001d641e93976c24e8060a3f --- /dev/null +++ b/haoranxu/ALMA-13B-R/result_2024-05-16 16:06:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26535836177474403, + "acc_stderr": 0.012902554762313967, + "acc_norm": 0.3037542662116041, + "acc_norm_stderr": 0.013438909184778757 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32742481577375027, + "acc_stderr": 0.004683146373232266, + "acc_norm": 0.4028082055367457, + "acc_norm_stderr": 0.004894604293405646 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.046202840822800406, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.046202840822800406 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.40357598978288634, + "acc_stderr": 0.017544332237926417, + "acc_norm": 0.40357598978288634, + "acc_norm_stderr": 0.017544332237926417 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.03097669299853442, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.03097669299853442 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683228, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683228 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3890675241157556, + "acc_stderr": 0.027690337536485376, + "acc_norm": 0.3890675241157556, + "acc_norm_stderr": 0.027690337536485376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3991031390134529, + "acc_stderr": 0.032867453125679603, + "acc_norm": 0.3991031390134529, + "acc_norm_stderr": 0.032867453125679603 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.32323232323232326, + "acc_stderr": 0.03332299921070645, + "acc_norm": 0.32323232323232326, + "acc_norm_stderr": 0.03332299921070645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31932773109243695, + "acc_stderr": 0.030283995525884396, + "acc_norm": 0.31932773109243695, + "acc_norm_stderr": 0.030283995525884396 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3487179487179487, + "acc_stderr": 0.02416278028401772, + "acc_norm": 0.3487179487179487, + "acc_norm_stderr": 0.02416278028401772 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3709677419354839, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.3709677419354839, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.48717948717948717, + "acc_stderr": 0.032745319388423504, + "acc_norm": 0.48717948717948717, + "acc_norm_stderr": 0.032745319388423504 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.02815283794249386, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.02815283794249386 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844054, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844054 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3880597014925373, + "acc_stderr": 0.034457899643627506, + "acc_norm": 0.3880597014925373, + "acc_norm_stderr": 0.034457899643627506 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918424, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.025906632631016127, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.025906632631016127 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33024691358024694, + "acc_stderr": 0.026168298456732846, + "acc_norm": 0.33024691358024694, + "acc_norm_stderr": 0.026168298456732846 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3160621761658031, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.3160621761658031, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30275229357798167, + "acc_stderr": 0.019698711434756357, + "acc_norm": 0.30275229357798167, + "acc_norm_stderr": 0.019698711434756357 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047182, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047182 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.026992544339297236, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.026992544339297236 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29248366013071897, + "acc_stderr": 0.01840341571010979, + "acc_norm": 0.29248366013071897, + "acc_norm_stderr": 0.01840341571010979 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460997, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460997 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005344, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005344 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.027682979522960234, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.027682979522960234 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29957805907172996, + "acc_stderr": 0.0298180247497531, + "acc_norm": 0.29957805907172996, + "acc_norm_stderr": 0.0298180247497531 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501954, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501954 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.035014387062967806, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.035014387062967806 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.43092237168707725, + "mc2_stderr": 0.015507565145148897 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2715466351829988, + "acc_stderr": 0.015291071117310378, + "acc_norm": 0.33530106257378983, + "acc_norm_stderr": 0.016230981232989813 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "haoranxu/ALMA-13B-R", + "model_sha": "f0a3613c5da62cbe85fb90ea348932ddfc022b22", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/haoranxu/ALMA-7B-R/result_2024-07-29 21:47:13.json b/haoranxu/ALMA-7B-R/result_2024-07-29 21:47:13.json new file mode 100644 index 0000000000000000000000000000000000000000..a7b97d80db9db06bc0ec0c387592e92b8083d26c --- /dev/null +++ b/haoranxu/ALMA-7B-R/result_2024-07-29 21:47:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.23378839590443687, + "acc_stderr": 0.012368225378507123, + "acc_norm": 0.26535836177474403, + "acc_norm_stderr": 0.012902554762313964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3042222664807807, + "acc_stderr": 0.00459136985327653, + "acc_norm": 0.35789683330013944, + "acc_norm_stderr": 0.0047840184976797985 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2573099415204678, + "acc_stderr": 0.03352799844161865, + "acc_norm": 0.2573099415204678, + "acc_norm_stderr": 0.03352799844161865 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.31417624521072796, + "acc_stderr": 0.01659929173588491, + "acc_norm": 0.31417624521072796, + "acc_norm_stderr": 0.01659929173588491 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617722, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617722 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745657, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745657 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553026, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553026 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.026858825879488554, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.026858825879488554 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.031493846709941306, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.031493846709941306 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.03074630074212451, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.03074630074212451 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2773109243697479, + "acc_stderr": 0.029079374539480007, + "acc_norm": 0.2773109243697479, + "acc_norm_stderr": 0.029079374539480007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.02102067268082791, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.02102067268082791 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.031947400722655395, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.031947400722655395 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.02598850079241189, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.02598850079241189 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.37606837606837606, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.37606837606837606, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.28679245283018867, + "acc_stderr": 0.027834912527544067, + "acc_norm": 0.28679245283018867, + "acc_norm_stderr": 0.027834912527544067 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712173, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712173 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.024405173935783234, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.024405173935783234 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.02517104191530968, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.02517104191530968 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.031410247805653164, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.031410247805653164 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24220183486238533, + "acc_stderr": 0.01836817630659862, + "acc_norm": 0.24220183486238533, + "acc_norm_stderr": 0.01836817630659862 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24673202614379086, + "acc_stderr": 0.0174408203674025, + "acc_norm": 0.24673202614379086, + "acc_norm_stderr": 0.0174408203674025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180844, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180844 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.029886910547626943, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.029886910547626943 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1801470588235294, + "acc_stderr": 0.02334516361654486, + "acc_norm": 0.1801470588235294, + "acc_norm_stderr": 0.02334516361654486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174923, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174923 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2588005215123859, + "acc_stderr": 0.011186109046564613, + "acc_norm": 0.2588005215123859, + "acc_norm_stderr": 0.011186109046564613 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03477691162163659, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03477691162163659 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219374, + "mc2": 0.44803624915578977, + "mc2_stderr": 0.015848738653805608 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2550177095631641, + "acc_stderr": 0.014985559533428576, + "acc_norm": 0.35064935064935066, + "acc_norm_stderr": 0.0164055569038933 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "haoranxu/ALMA-7B-R", + "model_sha": "bb7e3cb4acd4211c7054949bc37366c752f75819", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/haoranxu/Llama-3-Instruct-8B-CPO-SimPO/result_2024-08-03 03:57:58.json b/haoranxu/Llama-3-Instruct-8B-CPO-SimPO/result_2024-08-03 03:57:58.json new file mode 100644 index 0000000000000000000000000000000000000000..500cd52cc60d15dd65266be0e5bf9161c57e65b1 --- /dev/null +++ b/haoranxu/Llama-3-Instruct-8B-CPO-SimPO/result_2024-08-03 03:57:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39761092150170646, + "acc_stderr": 0.014301752223279538, + "acc_norm": 0.4658703071672355, + "acc_norm_stderr": 0.014577311315231102 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36825333598884685, + "acc_stderr": 0.004813448615404438, + "acc_norm": 0.48376817367058356, + "acc_norm_stderr": 0.004987151381091179 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4559386973180077, + "acc_stderr": 0.017810403925435363, + "acc_norm": 0.4559386973180077, + "acc_norm_stderr": 0.017810403925435363 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5498392282958199, + "acc_stderr": 0.02825666072336018, + "acc_norm": 0.5498392282958199, + "acc_norm_stderr": 0.02825666072336018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6068965517241379, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.6068965517241379, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5051282051282051, + "acc_stderr": 0.025349672906838636, + "acc_norm": 0.5051282051282051, + "acc_norm_stderr": 0.025349672906838636 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.49261083743842365, + "acc_stderr": 0.035176035403610084, + "acc_norm": 0.49261083743842365, + "acc_norm_stderr": 0.035176035403610084 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5419354838709678, + "acc_stderr": 0.028343787250540615, + "acc_norm": 0.5419354838709678, + "acc_norm_stderr": 0.028343787250540615 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083015, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083015 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.037940126746970296, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.037940126746970296 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.025355741263055277, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.025355741263055277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.02687408588351835, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.02687408588351835 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5216049382716049, + "acc_stderr": 0.02779476010500873, + "acc_norm": 0.5216049382716049, + "acc_norm_stderr": 0.02779476010500873 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.563302752293578, + "acc_stderr": 0.021264820158714205, + "acc_norm": 0.563302752293578, + "acc_norm_stderr": 0.021264820158714205 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.040675331363091746, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.040675331363091746 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.02003639376835263, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.02003639376835263 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696044, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29497206703910617, + "acc_stderr": 0.015251931579208176, + "acc_norm": 0.29497206703910617, + "acc_norm_stderr": 0.015251931579208176 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.02952009569768776, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.02952009569768776 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5959183673469388, + "acc_stderr": 0.03141470802586589, + "acc_norm": 0.5959183673469388, + "acc_norm_stderr": 0.03141470802586589 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3644067796610169, + "acc_stderr": 0.012291694983056477, + "acc_norm": 0.3644067796610169, + "acc_norm_stderr": 0.012291694983056477 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03756335775187896, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03756335775187896 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32802937576499386, + "mc1_stderr": 0.01643563293281505, + "mc2": 0.5112192870582224, + "mc2_stderr": 0.016020678314324396 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4982290436835891, + "acc_stderr": 0.017190246276231863, + "acc_norm": 0.5147579693034239, + "acc_norm_stderr": 0.017182864434998564 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "haoranxu/Llama-3-Instruct-8B-CPO-SimPO", + "model_sha": "3ca4b5c3a6395ff090e1039d55ac1f6120777302", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hchung1017/linear-merge/result_2024-03-14 07:48:28.json b/hchung1017/linear-merge/result_2024-03-14 07:48:28.json new file mode 100644 index 0000000000000000000000000000000000000000..4af50b4ba4eef47247ad9b2b26d12745f6886712 --- /dev/null +++ b/hchung1017/linear-merge/result_2024-03-14 07:48:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44197952218430037, + "acc_stderr": 0.014512682523128343, + "acc_norm": 0.5008532423208191, + "acc_norm_stderr": 0.014611369529813279 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4395538737303326, + "acc_stderr": 0.004953184534223987, + "acc_norm": 0.5971917944632543, + "acc_norm_stderr": 0.00489460429340565 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.632183908045977, + "acc_stderr": 0.01724382889184628, + "acc_norm": 0.632183908045977, + "acc_norm_stderr": 0.01724382889184628 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789959, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789959 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.547085201793722, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.547085201793722, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.02535100632816969, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02535100632816969 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5516129032258065, + "acc_stderr": 0.02829205683011273, + "acc_norm": 0.5516129032258065, + "acc_norm_stderr": 0.02829205683011273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.030770900763851316, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.030770900763851316 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.024594975128920935, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.024594975128920935 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.02690290045866664, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.02690290045866664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5308641975308642, + "acc_stderr": 0.027767689606833932, + "acc_norm": 0.5308641975308642, + "acc_norm_stderr": 0.027767689606833932 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.03555300319557669, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.03555300319557669 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270697, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270697 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6495412844036698, + "acc_stderr": 0.020456077599824457, + "acc_norm": 0.6495412844036698, + "acc_norm_stderr": 0.020456077599824457 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.019576953122088857, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.019576953122088857 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.028538650028878645, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.028538650028878645 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468657, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468657 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5110294117647058, + "acc_stderr": 0.030365446477275668, + "acc_norm": 0.5110294117647058, + "acc_norm_stderr": 0.030365446477275668 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.729957805907173, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.729957805907173, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41134289439374183, + "acc_stderr": 0.012567882673803689, + "acc_norm": 0.41134289439374183, + "acc_norm_stderr": 0.012567882673803689 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713613, + "mc2": 0.4511652632717576, + "mc2_stderr": 0.015299783001739373 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4946871310507674, + "acc_stderr": 0.017189383627229687, + "acc_norm": 0.5926800472255017, + "acc_norm_stderr": 0.01689245669519127 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hchung1017/linear-merge", + "model_sha": "6fae4b6f188a9c2bf066e949e70e69b66a1c6d3f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heavytail/kullm-mistral-S/result_2024-01-28 12:10:15.json b/heavytail/kullm-mistral-S/result_2024-01-28 12:10:15.json new file mode 100644 index 0000000000000000000000000000000000000000..d59d4c1d3244180ba16b04f7688cda856d4e5e4e --- /dev/null +++ b/heavytail/kullm-mistral-S/result_2024-01-28 12:10:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5546075085324232, + "acc_stderr": 0.014523987638344072, + "acc_norm": 0.6092150170648464, + "acc_norm_stderr": 0.014258563880513778 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3036247759410476, + "acc_stderr": 0.004588827958775114, + "acc_norm": 0.3511252738498307, + "acc_norm_stderr": 0.00476346513903856 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.03660298834049164, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.03660298834049164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260595, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260595 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36015325670498083, + "acc_stderr": 0.017166362471369292, + "acc_norm": 0.36015325670498083, + "acc_norm_stderr": 0.017166362471369292 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720385, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720385 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.02924188386962881, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.02924188386962881 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3215434083601286, + "acc_stderr": 0.026527724079528872, + "acc_norm": 0.3215434083601286, + "acc_norm_stderr": 0.026527724079528872 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.0332319730294294, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.0332319730294294 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.04260735157644559, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.04260735157644559 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.03208779558786753, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.03208779558786753 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083287, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083287 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3067226890756303, + "acc_stderr": 0.02995382389188704, + "acc_norm": 0.3067226890756303, + "acc_norm_stderr": 0.02995382389188704 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2923076923076923, + "acc_stderr": 0.023060438380857726, + "acc_norm": 0.2923076923076923, + "acc_norm_stderr": 0.023060438380857726 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970104, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.02598850079241189, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.02598850079241189 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.032485775115784, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.032485775115784 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3169811320754717, + "acc_stderr": 0.028637235639800928, + "acc_norm": 0.3169811320754717, + "acc_norm_stderr": 0.028637235639800928 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670238, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670238 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.40298507462686567, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.40298507462686567, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.02241804289111395, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.02241804289111395 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869355, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869355 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.37283236994219654, + "acc_stderr": 0.026033890613576284, + "acc_norm": 0.37283236994219654, + "acc_norm_stderr": 0.026033890613576284 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.03512385283705051, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.03512385283705051 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4012345679012346, + "acc_stderr": 0.02727258284983979, + "acc_norm": 0.4012345679012346, + "acc_norm_stderr": 0.02727258284983979 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3160621761658031, + "acc_stderr": 0.03355397369686174, + "acc_norm": 0.3160621761658031, + "acc_norm_stderr": 0.03355397369686174 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29357798165137616, + "acc_stderr": 0.019525151122639667, + "acc_norm": 0.29357798165137616, + "acc_norm_stderr": 0.019525151122639667 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790604, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790604 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4132231404958678, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.4132231404958678, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.018875682938069446, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.018875682938069446 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.029624663581159703, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.029624663581159703 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2489451476793249, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.2489451476793249, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31486310299869624, + "acc_stderr": 0.01186256175571594, + "acc_norm": 0.31486310299869624, + "acc_norm_stderr": 0.01186256175571594 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.032702871814820796, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.032702871814820796 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5532435740514076, + "mc1_stderr": 0.017403977522557144, + "mc2": 0.680298700277158, + "mc2_stderr": 0.014970122499037683 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3482880755608028, + "acc_stderr": 0.016379926739148037, + "acc_norm": 0.45336481700118064, + "acc_norm_stderr": 0.017115418225226862 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heavytail/kullm-mistral-S", + "model_sha": "fb4afbe49cdb3f281d420d5f7b210f84704510bf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heavytail/kullm-mistral/result_2024-01-28 11:42:12.json b/heavytail/kullm-mistral/result_2024-01-28 11:42:12.json new file mode 100644 index 0000000000000000000000000000000000000000..631d5cc4575cb13faee3cedf0e1074e4c528ab78 --- /dev/null +++ b/heavytail/kullm-mistral/result_2024-01-28 11:42:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5443686006825939, + "acc_stderr": 0.014553749939306863, + "acc_norm": 0.5981228668941979, + "acc_norm_stderr": 0.01432726861457827 + }, + "harness|ko_hellaswag|10": { + "acc": 0.28649671380203146, + "acc_stderr": 0.004512002459757947, + "acc_norm": 0.3240390360485959, + "acc_norm_stderr": 0.00467058188478118 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579215, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579215 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.34610472541507026, + "acc_stderr": 0.01701196526641207, + "acc_norm": 0.34610472541507026, + "acc_norm_stderr": 0.01701196526641207 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.19148936170212766, + "acc_stderr": 0.02572214999263777, + "acc_norm": 0.19148936170212766, + "acc_norm_stderr": 0.02572214999263777 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611549, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611549 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.02608270069539966, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.02608270069539966 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.03154449888270285, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.03154449888270285 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.03921545312467122, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.03921545312467122 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.026653531596715484, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.026653531596715484 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36129032258064514, + "acc_stderr": 0.027327548447957536, + "acc_norm": 0.36129032258064514, + "acc_norm_stderr": 0.027327548447957536 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03255326307272485, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03255326307272485 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.027495663683724077, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.027495663683724077 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.0449429086625209, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.0449429086625209 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3034825870646766, + "acc_stderr": 0.03251006816458619, + "acc_norm": 0.3034825870646766, + "acc_norm_stderr": 0.03251006816458619 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641145, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641145 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624732, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624732 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.026229649178821163, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.026229649178821163 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565318, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565318 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25504587155963304, + "acc_stderr": 0.01868850085653583, + "acc_norm": 0.25504587155963304, + "acc_norm_stderr": 0.01868850085653583 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.032684540130117436, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.032684540130117436 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.02582916327275748, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.02582916327275748 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2892561983471074, + "acc_stderr": 0.04139112727635464, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.04139112727635464 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.01855063450295296, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.01855063450295296 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.042032772914677614, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.042032772914677614 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767867, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767867 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.33877551020408164, + "acc_stderr": 0.030299506562154185, + "acc_norm": 0.33877551020408164, + "acc_norm_stderr": 0.030299506562154185 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842555, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842555 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2953063885267275, + "acc_stderr": 0.011651061936208826, + "acc_norm": 0.2953063885267275, + "acc_norm_stderr": 0.011651061936208826 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373616, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373616 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885415, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885415 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4834761321909425, + "mc1_stderr": 0.01749394019005773, + "mc2": 0.6063802185148195, + "mc2_stderr": 0.015877965041312773 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29634002361275086, + "acc_stderr": 0.015699701628594232, + "acc_norm": 0.4002361275088548, + "acc_norm_stderr": 0.016844693510505063 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heavytail/kullm-mistral", + "model_sha": "3c3af36e2013270ba186d99c5a8c68378cbcd072", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heavytail/kullm-solar-S/result_2024-01-28 12:08:17.json b/heavytail/kullm-solar-S/result_2024-01-28 12:08:17.json new file mode 100644 index 0000000000000000000000000000000000000000..8def66878adc88eeacc3ca1cb45dbff4f9bdfde2 --- /dev/null +++ b/heavytail/kullm-solar-S/result_2024-01-28 12:08:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6390784982935154, + "acc_stderr": 0.014034761386175458, + "acc_norm": 0.7013651877133106, + "acc_norm_stderr": 0.013374078615068754 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35321649073889666, + "acc_stderr": 0.004769924131304647, + "acc_norm": 0.43537143995220073, + "acc_norm_stderr": 0.004947922692688834 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.03833185275213025, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.03833185275213025 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299798, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299798 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.04319223625811331, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.04319223625811331 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236784, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236784 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5421686746987951, + "acc_stderr": 0.038786267710023595, + "acc_norm": 0.5421686746987951, + "acc_norm_stderr": 0.038786267710023595 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.02839442137098453, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.02839442137098453 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.03343577705583065, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.03343577705583065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.02521731518484648, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.02521731518484648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962956, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.031804252043840985, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.031804252043840985 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.03493231777421282, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.03493231777421282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.03812400565974835, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.03812400565974835 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983067, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983067 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5617283950617284, + "acc_stderr": 0.02760791408740049, + "acc_norm": 0.5617283950617284, + "acc_norm_stderr": 0.02760791408740049 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5651376146788991, + "acc_stderr": 0.021254631465609266, + "acc_norm": 0.5651376146788991, + "acc_norm_stderr": 0.021254631465609266 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5, + "acc_stderr": 0.028629916715693413, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028629916715693413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.512396694214876, + "acc_stderr": 0.045629515481807666, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.045629515481807666 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874141, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874141 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4526143790849673, + "acc_stderr": 0.020136790918492523, + "acc_norm": 0.4526143790849673, + "acc_norm_stderr": 0.020136790918492523 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.03406315360711507, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.03406315360711507 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2681564245810056, + "acc_stderr": 0.014816119635316996, + "acc_norm": 0.2681564245810056, + "acc_norm_stderr": 0.014816119635316996 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.030254372573976715, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.030254372573976715 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.46835443037974683, + "acc_stderr": 0.03248197400511075, + "acc_norm": 0.46835443037974683, + "acc_norm_stderr": 0.03248197400511075 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39374185136897, + "acc_stderr": 0.012478532272564423, + "acc_norm": 0.39374185136897, + "acc_norm_stderr": 0.012478532272564423 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.03374499356319355, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.03374499356319355 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.037131580674819135, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.037131580674819135 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.605875152998776, + "mc1_stderr": 0.01710658814070033, + "mc2": 0.7145811998930518, + "mc2_stderr": 0.01433447372434486 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3577331759149941, + "acc_stderr": 0.01647980893574998, + "acc_norm": 0.46635182998819363, + "acc_norm_stderr": 0.017151384117131865 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heavytail/kullm-solar-S", + "model_sha": "663e0a007364de6866d636d01e853544e26ae17a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heavytail/kullm-solar/result_2024-01-28 12:12:35.json b/heavytail/kullm-solar/result_2024-01-28 12:12:35.json new file mode 100644 index 0000000000000000000000000000000000000000..af07180748ed3efa983d8171bfdb23d1217e2516 --- /dev/null +++ b/heavytail/kullm-solar/result_2024-01-28 12:12:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6168941979522184, + "acc_stderr": 0.014206472661672881, + "acc_norm": 0.6808873720136519, + "acc_norm_stderr": 0.0136216961191733 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32951603266281615, + "acc_stderr": 0.004690768393854471, + "acc_norm": 0.40967934674367656, + "acc_norm_stderr": 0.00490769472793569 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.03722965741385539, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.03722965741385539 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.017797751493865636, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.017797751493865636 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.03252909619613197, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.03252909619613197 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.038913644958358196, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.038913644958358196 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.41479099678456594, + "acc_stderr": 0.02798268045975956, + "acc_norm": 0.41479099678456594, + "acc_norm_stderr": 0.02798268045975956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.048786087144669955, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.048786087144669955 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4948717948717949, + "acc_stderr": 0.02534967290683866, + "acc_norm": 0.4948717948717949, + "acc_norm_stderr": 0.02534967290683866 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568385, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.030463656747340254, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.030463656747340254 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.04653429807913507, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.04653429807913507 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228402, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228402 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983067, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983067 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.02648339204209818, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.02648339204209818 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3374233128834356, + "acc_stderr": 0.03714908409935575, + "acc_norm": 0.3374233128834356, + "acc_norm_stderr": 0.03714908409935575 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327228, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327228 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6091743119266055, + "acc_stderr": 0.02092005834611106, + "acc_norm": 0.6091743119266055, + "acc_norm_stderr": 0.02092005834611106 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.512396694214876, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.020087362076702846, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.020087362076702846 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963758, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963758 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219588, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219588 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5509259259259259, + "acc_stderr": 0.03392238405321616, + "acc_norm": 0.5509259259259259, + "acc_norm_stderr": 0.03392238405321616 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331144, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331144 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5514705882352942, + "acc_stderr": 0.030211479609121596, + "acc_norm": 0.5514705882352942, + "acc_norm_stderr": 0.030211479609121596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4050632911392405, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.4050632911392405, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37222946544980445, + "acc_stderr": 0.012346241297204368, + "acc_norm": 0.37222946544980445, + "acc_norm_stderr": 0.012346241297204368 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5348837209302325, + "mc1_stderr": 0.017460849975873962, + "mc2": 0.6645815726883131, + "mc2_stderr": 0.015220316752175892 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.017177301992342547, + "acc_norm": 0.5974025974025974, + "acc_norm_stderr": 0.01686102048640777 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heavytail/kullm-solar", + "model_sha": "ebad9b88d16b2c7be85aca6adbe253f33e3f1442", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/1222-42dot-1.3B-Ko-CoT-Collection-2e-5/result_2023-12-23 05:39:38.json b/heegyu/1222-42dot-1.3B-Ko-CoT-Collection-2e-5/result_2023-12-23 05:39:38.json new file mode 100644 index 0000000000000000000000000000000000000000..20648c831350d58b211ba0fb40e623776b4ff718 --- /dev/null +++ b/heegyu/1222-42dot-1.3B-Ko-CoT-Collection-2e-5/result_2023-12-23 05:39:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29266211604095566, + "acc_stderr": 0.013295916103619406, + "acc_norm": 0.34726962457337884, + "acc_norm_stderr": 0.013913034529620444 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3556064528978291, + "acc_stderr": 0.004777183508949815, + "acc_norm": 0.4439354710217088, + "acc_norm_stderr": 0.004958314114266502 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2554278416347382, + "acc_stderr": 0.015594955384455765, + "acc_norm": 0.2554278416347382, + "acc_norm_stderr": 0.015594955384455765 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.029241883869628834, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.029241883869628834 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.0355092018568963, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.0355092018568963 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632938, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632938 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728744, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728744 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.031353050095330855, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.031353050095330855 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03724563619774632, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03724563619774632 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.27310924369747897, + "acc_stderr": 0.02894200404099817, + "acc_norm": 0.27310924369747897, + "acc_norm_stderr": 0.02894200404099817 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2923076923076923, + "acc_stderr": 0.023060438380857737, + "acc_norm": 0.2923076923076923, + "acc_norm_stderr": 0.023060438380857737 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.04414343666854933, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.04414343666854933 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358611, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358611 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885196, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885196 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23504273504273504, + "acc_stderr": 0.02777883590493543, + "acc_norm": 0.23504273504273504, + "acc_norm_stderr": 0.02777883590493543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.026616482980501715, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.026616482980501715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.02752859921034049, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.02752859921034049 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.021851509822031708, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.021851509822031708 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.25722543352601157, + "acc_stderr": 0.023532925431044283, + "acc_norm": 0.25722543352601157, + "acc_norm_stderr": 0.023532925431044283 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.025630824975621334, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.025630824975621334 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735703, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735703 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25504587155963304, + "acc_stderr": 0.01868850085653584, + "acc_norm": 0.25504587155963304, + "acc_norm_stderr": 0.01868850085653584 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790605, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790605 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.025553169991826517, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.025553169991826517 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.16, + "acc_stderr": 0.036845294917747094, + "acc_norm": 0.16, + "acc_norm_stderr": 0.036845294917747094 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.33884297520661155, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998905, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998905 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.017630827375148383, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.017630827375148383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729906, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729906 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347019, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347019 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03372343271653063, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03372343271653063 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220513, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220513 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.028666857790274648, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.028666857790274648 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3206751054852321, + "acc_stderr": 0.03038193194999041, + "acc_norm": 0.3206751054852321, + "acc_norm_stderr": 0.03038193194999041 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.010986307870045502, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.010986307870045502 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.01494881267906214, + "mc2": 0.39153887048235514, + "mc2_stderr": 0.014723691720257542 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2255017709563164, + "acc_stderr": 0.01436812214953219, + "acc_norm": 0.3435655253837072, + "acc_norm_stderr": 0.01632733480642914 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/1222-42dot-1.3B-Ko-CoT-Collection-2e-5", + "model_sha": "af8700ffcbcc7e3b1db1fb5688d42cbdc7201644", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/42dot-1.3B-KOR-OpenOrca-Platypus-1e-5/result_2023-11-28 14:58:23.json b/heegyu/42dot-1.3B-KOR-OpenOrca-Platypus-1e-5/result_2023-11-28 14:58:23.json new file mode 100644 index 0000000000000000000000000000000000000000..d169a3c1fc3efb6724254cd1d68feb289cef62a2 --- /dev/null +++ b/heegyu/42dot-1.3B-KOR-OpenOrca-Platypus-1e-5/result_2023-11-28 14:58:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28071672354948807, + "acc_stderr": 0.013131238126975583, + "acc_norm": 0.3361774744027304, + "acc_norm_stderr": 0.013804855026205758 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3580959968133838, + "acc_stderr": 0.004784607222774637, + "acc_norm": 0.45279824736108343, + "acc_norm_stderr": 0.004967497130451344 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209196, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209196 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2388250319284802, + "acc_stderr": 0.015246803197398698, + "acc_norm": 0.2388250319284802, + "acc_norm_stderr": 0.015246803197398698 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.03712537833614866, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.03712537833614866 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.02880998985410297, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.02880998985410297 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.036293353299478595, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.036293353299478595 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632938, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632938 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2242152466367713, + "acc_stderr": 0.027991534258519527, + "acc_norm": 0.2242152466367713, + "acc_norm_stderr": 0.027991534258519527 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.040393149787245626, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.040393149787245626 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30808080808080807, + "acc_stderr": 0.032894773300986155, + "acc_norm": 0.30808080808080807, + "acc_norm_stderr": 0.032894773300986155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.03375672449560554, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.03375672449560554 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.030388353551886838, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.030388353551886838 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.024537591572830513, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.024537591572830513 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.02967833314144445, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.02967833314144445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.025736542745594525, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.025736542745594525 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.02760192138141759, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.02760192138141759 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04265792110940589, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04265792110940589 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959316, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959316 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3034825870646766, + "acc_stderr": 0.032510068164586174, + "acc_norm": 0.3034825870646766, + "acc_norm_stderr": 0.032510068164586174 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.03414014007044036, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.03414014007044036 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.0358687928008034, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.0358687928008034 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.023176298203992002, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.023176298203992002 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2808641975308642, + "acc_stderr": 0.02500646975579921, + "acc_norm": 0.2808641975308642, + "acc_norm_stderr": 0.02500646975579921 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35751295336787564, + "acc_stderr": 0.034588160421810066, + "acc_norm": 0.35751295336787564, + "acc_norm_stderr": 0.034588160421810066 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30458715596330277, + "acc_stderr": 0.019732299420354038, + "acc_norm": 0.30458715596330277, + "acc_norm_stderr": 0.019732299420354038 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02564686309713792, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02564686309713792 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909281, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909281 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.034260594244031654, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.034260594244031654 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.01798661530403031, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.01798661530403031 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.02866685779027465, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.02866685779027465 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293423, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293423 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25358539765319427, + "acc_stderr": 0.011111715336101138, + "acc_norm": 0.25358539765319427, + "acc_norm_stderr": 0.011111715336101138 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.028867431449849313, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.028867431449849313 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.03317505930009179, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.03317505930009179 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826824, + "mc2": 0.406729238769047, + "mc2_stderr": 0.01490927480363471 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2408500590318772, + "acc_stderr": 0.01470117266258392, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.01653869160332771 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/42dot-1.3B-KOR-OpenOrca-Platypus-1e-5", + "model_sha": "56a4dde8ef71b89abad939d88ef1f23d12442ae6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/42dot_LLM-PLM-1.3B-mt-steps-50000/result_2023-10-29 14:09:52.json b/heegyu/42dot_LLM-PLM-1.3B-mt-steps-50000/result_2023-10-29 14:09:52.json new file mode 100644 index 0000000000000000000000000000000000000000..f2ea3a922909dfb9ec4f2a7d0da6c6143112ff86 --- /dev/null +++ b/heegyu/42dot_LLM-PLM-1.3B-mt-steps-50000/result_2023-10-29 14:09:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2636518771331058, + "acc_stderr": 0.012875929151297061, + "acc_norm": 0.3216723549488055, + "acc_norm_stderr": 0.013650488084494164 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3345947022505477, + "acc_stderr": 0.004708842600177437, + "acc_norm": 0.41884086835291773, + "acc_norm_stderr": 0.004923609207861533 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.031267817146631786, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.031267817146631786 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.14563106796116504, + "acc_stderr": 0.03492606476623792, + "acc_norm": 0.14563106796116504, + "acc_norm_stderr": 0.03492606476623792 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27330779054916987, + "acc_stderr": 0.015936681062628556, + "acc_norm": 0.27330779054916987, + "acc_norm_stderr": 0.015936681062628556 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2, + "acc_stderr": 0.0261488180184245, + "acc_norm": 0.2, + "acc_norm_stderr": 0.0261488180184245 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.033844291552331346, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.033844291552331346 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2645739910313901, + "acc_stderr": 0.02960510321703834, + "acc_norm": 0.2645739910313901, + "acc_norm_stderr": 0.02960510321703834 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.031156269519646836, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.031156269519646836 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.037932811853078084, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.037932811853078084 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.021444547301560486, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.021444547301560486 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.03893542518824847, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.03893542518824847 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21182266009852216, + "acc_stderr": 0.02874898368994107, + "acc_norm": 0.21182266009852216, + "acc_norm_stderr": 0.02874898368994107 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.02489246917246283, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.02489246917246283 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.1794871794871795, + "acc_stderr": 0.02514093595033545, + "acc_norm": 0.1794871794871795, + "acc_norm_stderr": 0.02514093595033545 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2, + "acc_stderr": 0.02461829819586651, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02461829819586651 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918424, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0230836585869842, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0230836585869842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25, + "acc_stderr": 0.02409347123262133, + "acc_norm": 0.25, + "acc_norm_stderr": 0.02409347123262133 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.031618779179354094, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.031618779179354094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21834862385321102, + "acc_stderr": 0.017712600528722738, + "acc_norm": 0.21834862385321102, + "acc_norm_stderr": 0.017712600528722738 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.033954900208561116, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.033954900208561116 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.024630048979824775, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.024630048979824775 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2434640522875817, + "acc_stderr": 0.017362473762146616, + "acc_norm": 0.2434640522875817, + "acc_norm_stderr": 0.017362473762146616 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.02612957252718085, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.02612957252718085 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755808, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755808 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010083, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010083 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682487, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682487 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.028263889943784593, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.028263889943784593 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25945241199478486, + "acc_stderr": 0.0111952620763503, + "acc_norm": 0.25945241199478486, + "acc_norm_stderr": 0.0111952620763503 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.03096451792692341, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.03096451792692341 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.031922715695483, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.031922715695483 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766373, + "mc2": 0.40710844276153646, + "mc2_stderr": 0.014826228669308838 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2526564344746163, + "acc_stderr": 0.014939640598798442, + "acc_norm": 0.3116883116883117, + "acc_norm_stderr": 0.015924567607358345 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/42dot_LLM-PLM-1.3B-mt-steps-50000", + "model_sha": "96228db523495871e1b856e0f29d82eb0efd9d2d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/42dot_LLM-PLM-1.3B-mt/result_2023-10-15 11:19:51.json b/heegyu/42dot_LLM-PLM-1.3B-mt/result_2023-10-15 11:19:51.json new file mode 100644 index 0000000000000000000000000000000000000000..cc10300a6b5890b0ace635ecc2df4620ad526850 --- /dev/null +++ b/heegyu/42dot_LLM-PLM-1.3B-mt/result_2023-10-15 11:19:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2175767918088737, + "acc_stderr": 0.012057262020972506, + "acc_norm": 0.2627986348122867, + "acc_norm_stderr": 0.012862523175351331 + }, + "harness|ko_hellaswag|10": { + "acc": 0.31447918741286596, + "acc_stderr": 0.004633592029065801, + "acc_norm": 0.37890858394742083, + "acc_norm_stderr": 0.004841238763529378 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.22349936143039592, + "acc_stderr": 0.01489723522945071, + "acc_norm": 0.22349936143039592, + "acc_norm_stderr": 0.01489723522945071 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.02895734278834235, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.02895734278834235 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511116, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511116 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24758842443729903, + "acc_stderr": 0.024513879973621967, + "acc_norm": 0.24758842443729903, + "acc_norm_stderr": 0.024513879973621967 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19282511210762332, + "acc_stderr": 0.026478240960489365, + "acc_norm": 0.19282511210762332, + "acc_norm_stderr": 0.026478240960489365 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.03427308652999934, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.03427308652999934 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31932773109243695, + "acc_stderr": 0.0302839955258844, + "acc_norm": 0.31932773109243695, + "acc_norm_stderr": 0.0302839955258844 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243838, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243838 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2903225806451613, + "acc_stderr": 0.02582210611941589, + "acc_norm": 0.2903225806451613, + "acc_norm_stderr": 0.02582210611941589 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.02528839450289137, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.02528839450289137 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.19402985074626866, + "acc_stderr": 0.02796267760476893, + "acc_norm": 0.19402985074626866, + "acc_norm_stderr": 0.02796267760476893 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1791907514450867, + "acc_stderr": 0.02924251305906327, + "acc_norm": 0.1791907514450867, + "acc_norm_stderr": 0.02924251305906327 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.02289408248992599, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.02289408248992599 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.023468429832451163, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.023468429832451163 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27461139896373055, + "acc_stderr": 0.032210245080411544, + "acc_norm": 0.27461139896373055, + "acc_norm_stderr": 0.032210245080411544 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3522935779816514, + "acc_stderr": 0.020480568843999, + "acc_norm": 0.3522935779816514, + "acc_norm_stderr": 0.020480568843999 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.024848018263875195, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.024848018263875195 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.23140495867768596, + "acc_stderr": 0.03849856098794087, + "acc_norm": 0.23140495867768596, + "acc_norm_stderr": 0.03849856098794087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.037150621549989035, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.037150621549989035 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.238562091503268, + "acc_stderr": 0.017242385828779606, + "acc_norm": 0.238562091503268, + "acc_norm_stderr": 0.017242385828779606 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23049645390070922, + "acc_stderr": 0.025123739226872405, + "acc_norm": 0.23049645390070922, + "acc_norm_stderr": 0.025123739226872405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.03018753206032938, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.03018753206032938 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.026537045312145315, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.026537045312145315 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.22784810126582278, + "acc_stderr": 0.02730348459906942, + "acc_norm": 0.22784810126582278, + "acc_norm_stderr": 0.02730348459906942 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.01099615663514269, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.01099615663514269 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.0291022543896741, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.0291022543896741 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.0151274270965207, + "mc2": 0.3837063373774927, + "mc2_stderr": 0.01511245687075564 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2668240850059032, + "acc_stderr": 0.015206575684565885, + "acc_norm": 0.30342384887839435, + "acc_norm_stderr": 0.01580607271790957 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/42dot_LLM-PLM-1.3B-mt", + "model_sha": "b6ad84ea5edcb8b397824634213e7008f08fbd06", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/AULM-5.8b-v0804-hf/result_2023-10-15 11:21:40.json b/heegyu/AULM-5.8b-v0804-hf/result_2023-10-15 11:21:40.json new file mode 100644 index 0000000000000000000000000000000000000000..5888b60da9bec97b2da0f5396b91dab07433d128 --- /dev/null +++ b/heegyu/AULM-5.8b-v0804-hf/result_2023-10-15 11:21:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2619453924914676, + "acc_stderr": 0.012849054826858114, + "acc_norm": 0.3302047781569966, + "acc_norm_stderr": 0.013743085603760427 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3604859589723163, + "acc_stderr": 0.004791601975612766, + "acc_norm": 0.45429197371041624, + "acc_norm_stderr": 0.004968888130290065 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.1871345029239766, + "acc_stderr": 0.029913127232368025, + "acc_norm": 0.1871345029239766, + "acc_norm_stderr": 0.029913127232368025 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21966794380587484, + "acc_stderr": 0.014805384478371163, + "acc_norm": 0.21966794380587484, + "acc_norm_stderr": 0.014805384478371163 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.026754391348039783, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.026754391348039783 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.02575586592263294, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.02575586592263294 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.11659192825112108, + "acc_stderr": 0.02153963981624447, + "acc_norm": 0.11659192825112108, + "acc_norm_stderr": 0.02153963981624447 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.032586303838365555, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.032586303838365555 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.030176808288974337, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.030176808288974337 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.32564102564102565, + "acc_stderr": 0.02375966576741229, + "acc_norm": 0.32564102564102565, + "acc_norm_stderr": 0.02375966576741229 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233485, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335134, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335134 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.02749566368372406, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.02749566368372406 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.02549753263960954, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.02549753263960954 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.034355680560478746, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.034355680560478746 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184756, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184756 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.023786203255508297, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.023786203255508297 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.03351953879521272, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.03351953879521272 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.19753086419753085, + "acc_stderr": 0.02215288992789894, + "acc_norm": 0.19753086419753085, + "acc_norm_stderr": 0.02215288992789894 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32124352331606215, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.32124352331606215, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3669724770642202, + "acc_stderr": 0.02066467565952053, + "acc_norm": 0.3669724770642202, + "acc_norm_stderr": 0.02066467565952053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.026173908506718576, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.026173908506718576 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.1322314049586777, + "acc_stderr": 0.030922788320445812, + "acc_norm": 0.1322314049586777, + "acc_norm_stderr": 0.030922788320445812 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.21631205673758866, + "acc_stderr": 0.024561720560562793, + "acc_norm": 0.21631205673758866, + "acc_norm_stderr": 0.024561720560562793 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.22362869198312235, + "acc_stderr": 0.027123298205229972, + "acc_norm": 0.22362869198312235, + "acc_norm_stderr": 0.027123298205229972 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24967405475880053, + "acc_stderr": 0.011054538377832327, + "acc_norm": 0.24967405475880053, + "acc_norm_stderr": 0.011054538377832327 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015016, + "mc2": 0.40797537743571977, + "mc2_stderr": 0.014976707161150397 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2975206611570248, + "acc_stderr": 0.01571774220508993, + "acc_norm": 0.3742621015348288, + "acc_norm_stderr": 0.01663791778979874 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/AULM-5.8b-v0804-hf", + "model_sha": "ddcfd46cc8b42d7fb6ad822d97b6c30dfd3c028b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/LIMA-13b-hf/result_2023-10-15 11:18:07.json b/heegyu/LIMA-13b-hf/result_2023-10-15 11:18:07.json new file mode 100644 index 0000000000000000000000000000000000000000..dc8c89a261dfe210566fc34b7af9f6002f627b50 --- /dev/null +++ b/heegyu/LIMA-13b-hf/result_2023-10-15 11:18:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21331058020477817, + "acc_stderr": 0.011970971742326334, + "acc_norm": 0.26023890784982934, + "acc_norm_stderr": 0.012821930225112552 + }, + "harness|ko_hellaswag|10": { + "acc": 0.30302728540131446, + "acc_stderr": 0.004586276903267076, + "acc_norm": 0.3558056164110735, + "acc_norm_stderr": 0.00477778258481779 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.36257309941520466, + "acc_stderr": 0.036871306155620606, + "acc_norm": 0.36257309941520466, + "acc_norm_stderr": 0.036871306155620606 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.30268199233716475, + "acc_stderr": 0.01642878158174936, + "acc_norm": 0.30268199233716475, + "acc_norm_stderr": 0.01642878158174936 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03820169914517905, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03820169914517905 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.30638297872340425, + "acc_stderr": 0.03013590647851756, + "acc_norm": 0.30638297872340425, + "acc_norm_stderr": 0.03013590647851756 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3086816720257235, + "acc_stderr": 0.026236965881153266, + "acc_norm": 0.3086816720257235, + "acc_norm_stderr": 0.026236965881153266 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.03242497958178815, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.03242497958178815 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03960933549451209, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03960933549451209 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.040925639582376536, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.040925639582376536 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3277310924369748, + "acc_stderr": 0.03048991141767323, + "acc_norm": 0.3277310924369748, + "acc_norm_stderr": 0.03048991141767323 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3487179487179487, + "acc_stderr": 0.02416278028401772, + "acc_norm": 0.3487179487179487, + "acc_norm_stderr": 0.02416278028401772 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.452991452991453, + "acc_stderr": 0.032610998730986204, + "acc_norm": 0.452991452991453, + "acc_norm_stderr": 0.032610998730986204 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.02761116340239972, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.02761116340239972 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.025040443877000693, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.025040443877000693 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360385, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360385 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.34328358208955223, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.34328358208955223, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267437, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267437 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0251901813276084, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0251901813276084 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.026105673861409814, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.026105673861409814 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3160621761658031, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.3160621761658031, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.037124548537213684, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.037124548537213684 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30825688073394497, + "acc_stderr": 0.01979836669836725, + "acc_norm": 0.30825688073394497, + "acc_norm_stderr": 0.01979836669836725 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.035834961763610625, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.035834961763610625 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.018311653053648222, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.018311653053648222 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.025518731049537773, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.025518731049537773 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915206, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915206 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.014676252009319463, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.014676252009319463 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.02576725201085595, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.02576725201085595 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.030932858792789848, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.030932858792789848 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.21940928270042195, + "acc_stderr": 0.026939106581553945, + "acc_norm": 0.21940928270042195, + "acc_norm_stderr": 0.026939106581553945 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26792698826597133, + "acc_stderr": 0.011311347690633886, + "acc_norm": 0.26792698826597133, + "acc_norm_stderr": 0.011311347690633886 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501943, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501943 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875835, + "mc2": 0.43296733660801473, + "mc2_stderr": 0.015927191551239974 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2255017709563164, + "acc_stderr": 0.01436812214953218, + "acc_norm": 0.30460448642266824, + "acc_norm_stderr": 0.01582336727312938 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/LIMA-13b-hf", + "model_sha": "98faa74a9b41cbd9033904cd58420705936849eb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/LIMA2-7b-hf/result_2023-10-15 11:18:57.json b/heegyu/LIMA2-7b-hf/result_2023-10-15 11:18:57.json new file mode 100644 index 0000000000000000000000000000000000000000..30cc2bf12bc48e9112636b08b047003c600b77ce --- /dev/null +++ b/heegyu/LIMA2-7b-hf/result_2023-10-15 11:18:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26706484641638223, + "acc_stderr": 0.01292893319649633, + "acc_norm": 0.3046075085324232, + "acc_norm_stderr": 0.013449522109932492 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3309101772555268, + "acc_stderr": 0.004695791340502858, + "acc_norm": 0.4010157339175463, + "acc_norm_stderr": 0.0048910255336330226 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2937420178799489, + "acc_stderr": 0.016287759388491675, + "acc_norm": 0.2937420178799489, + "acc_norm_stderr": 0.016287759388491675 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2297872340425532, + "acc_stderr": 0.027501752944412424, + "acc_norm": 0.2297872340425532, + "acc_norm_stderr": 0.027501752944412424 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.03329394119073532, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.03329394119073532 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.02558306248998482, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.02558306248998482 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19730941704035873, + "acc_stderr": 0.02670985334496796, + "acc_norm": 0.19730941704035873, + "acc_norm_stderr": 0.02670985334496796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.040393149787245605, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.040393149787245605 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35858585858585856, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.35858585858585856, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.27310924369747897, + "acc_stderr": 0.028942004040998167, + "acc_norm": 0.27310924369747897, + "acc_norm_stderr": 0.028942004040998167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3153846153846154, + "acc_stderr": 0.023559646983189957, + "acc_norm": 0.3153846153846154, + "acc_norm_stderr": 0.023559646983189957 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.32903225806451614, + "acc_stderr": 0.02672949906834996, + "acc_norm": 0.32903225806451614, + "acc_norm_stderr": 0.02672949906834996 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493857, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493857 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.36318407960199006, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.36318407960199006, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.033450369167889904, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.033450369167889904 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.02320139293819498, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.02320139293819498 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2976878612716763, + "acc_stderr": 0.024617055388677003, + "acc_norm": 0.2976878612716763, + "acc_norm_stderr": 0.024617055388677003 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33641975308641975, + "acc_stderr": 0.02628973494595293, + "acc_norm": 0.33641975308641975, + "acc_norm_stderr": 0.02628973494595293 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32124352331606215, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.32124352331606215, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30091743119266057, + "acc_stderr": 0.019664751366802114, + "acc_norm": 0.30091743119266057, + "acc_norm_stderr": 0.019664751366802114 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.027057974624494382, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.027057974624494382 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2809917355371901, + "acc_stderr": 0.041032038305145124, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.041032038305145124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316091, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316091 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.017952449196987866, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.017952449196987866 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.17857142857142858, + "acc_stderr": 0.036352091215778065, + "acc_norm": 0.17857142857142858, + "acc_norm_stderr": 0.036352091215778065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.01444415780826145, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.01444415780826145 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932267, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932267 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16911764705882354, + "acc_stderr": 0.02277086801011303, + "acc_norm": 0.16911764705882354, + "acc_norm_stderr": 0.02277086801011303 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3306122448979592, + "acc_stderr": 0.03011642629654059, + "acc_norm": 0.3306122448979592, + "acc_norm_stderr": 0.03011642629654059 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2489451476793249, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.2489451476793249, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26988265971316816, + "acc_stderr": 0.011337381084250404, + "acc_norm": 0.26988265971316816, + "acc_norm_stderr": 0.011337381084250404 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.0313217980308329, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.0313217980308329 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885416, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.45638880812290744, + "mc2_stderr": 0.01588078280533526 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24557260920897284, + "acc_stderr": 0.014798357154972823, + "acc_norm": 0.2987012987012987, + "acc_norm_stderr": 0.015735657391438285 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/LIMA2-7b-hf", + "model_sha": "6a1aa59cb7624f059728840ce68b20b1070ebdcb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/Mistral-7B-v0.1-OKI-v20231124-1e-5/result_2023-11-27 11:21:57.json b/heegyu/Mistral-7B-v0.1-OKI-v20231124-1e-5/result_2023-11-27 11:21:57.json new file mode 100644 index 0000000000000000000000000000000000000000..7a1de2160ac2cb36c3c5404134d6d653e36a12ad --- /dev/null +++ b/heegyu/Mistral-7B-v0.1-OKI-v20231124-1e-5/result_2023-11-27 11:21:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34897610921501704, + "acc_stderr": 0.013928933461382501, + "acc_norm": 0.39419795221843, + "acc_norm_stderr": 0.01428052266746732 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37582154949213303, + "acc_stderr": 0.004833444556338624, + "acc_norm": 0.4905397331208923, + "acc_norm_stderr": 0.004988888194063274 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107675, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107675 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.017869330154003705, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.017869330154003705 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079021, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079021 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.02527589207024063, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.02527589207024063 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785742, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785742 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.02891120880274947, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.02891120880274947 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961827, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961827 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.034288678487786564, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.034288678487786564 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342654, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342654 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.026817718130348913, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.026817718130348913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.039158572914369714, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.039158572914369714 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.04343525428949098, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.04343525428949098 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762633, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762633 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125146, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010212, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010212 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.015131608849963753, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.015131608849963753 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877746, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897634, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897634 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842883, + "mc2": 0.45246749534262715, + "mc2_stderr": 0.015187331640958925 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49940968122786306, + "acc_stderr": 0.01719034212344866, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.0171427361176433 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/Mistral-7B-v0.1-OKI-v20231124-1e-5", + "model_sha": "34992c8cda5bbebf1cfbf5d0d0ecc71dcbaa77e0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/WizardVicuna-3B-0719/result_2023-10-15 11:20:31.json b/heegyu/WizardVicuna-3B-0719/result_2023-10-15 11:20:31.json new file mode 100644 index 0000000000000000000000000000000000000000..7853cf5b44b55ad1a96f4d98c9a42d9ec3ef8462 --- /dev/null +++ b/heegyu/WizardVicuna-3B-0719/result_2023-10-15 11:20:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19112627986348124, + "acc_stderr": 0.011490055292778599, + "acc_norm": 0.22781569965870307, + "acc_norm_stderr": 0.012256708602326907 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2822146982672774, + "acc_stderr": 0.004491574539441884, + "acc_norm": 0.30770762796255724, + "acc_norm_stderr": 0.004606015773125627 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24393358876117496, + "acc_stderr": 0.015357212665829479, + "acc_norm": 0.24393358876117496, + "acc_norm_stderr": 0.015357212665829479 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.30638297872340425, + "acc_stderr": 0.030135906478517563, + "acc_norm": 0.30638297872340425, + "acc_norm_stderr": 0.030135906478517563 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24758842443729903, + "acc_stderr": 0.024513879973621963, + "acc_norm": 0.24758842443729903, + "acc_norm_stderr": 0.024513879973621963 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.32286995515695066, + "acc_stderr": 0.031381476375754995, + "acc_norm": 0.32286995515695066, + "acc_norm_stderr": 0.031381476375754995 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.1919191919191919, + "acc_stderr": 0.028057791672989017, + "acc_norm": 0.1919191919191919, + "acc_norm_stderr": 0.028057791672989017 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03724563619774633, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03724563619774633 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.226890756302521, + "acc_stderr": 0.027205371538279472, + "acc_norm": 0.226890756302521, + "acc_norm_stderr": 0.027205371538279472 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371383, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371383 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678243, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678243 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.22903225806451613, + "acc_stderr": 0.023904914311782658, + "acc_norm": 0.22903225806451613, + "acc_norm_stderr": 0.023904914311782658 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22641509433962265, + "acc_stderr": 0.025757559893106737, + "acc_norm": 0.22641509433962265, + "acc_norm_stderr": 0.025757559893106737 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072775, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473835, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473835 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.02970528405677243, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.02970528405677243 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641144, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641144 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.020842290930114665, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.020842290930114665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.02335736578587403, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.02335736578587403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.033519538795212696, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.033519538795212696 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.16580310880829016, + "acc_stderr": 0.026839845022314415, + "acc_norm": 0.16580310880829016, + "acc_norm_stderr": 0.026839845022314415 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.2036697247706422, + "acc_stderr": 0.017266742087630783, + "acc_norm": 0.2036697247706422, + "acc_norm_stderr": 0.017266742087630783 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.02405102973991225, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.02405102973991225 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.041733491480835, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.041733491480835 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2236842105263158, + "acc_stderr": 0.03391160934343604, + "acc_norm": 0.2236842105263158, + "acc_norm_stderr": 0.03391160934343604 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.01755581809132227, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.01755581809132227 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953775, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953775 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1712962962962963, + "acc_stderr": 0.025695341643824688, + "acc_norm": 0.1712962962962963, + "acc_norm_stderr": 0.025695341643824688 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.01410222362315258, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.01410222362315258 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22857142857142856, + "acc_stderr": 0.026882144922307744, + "acc_norm": 0.22857142857142856, + "acc_norm_stderr": 0.026882144922307744 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.02845882099146029, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.02845882099146029 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2301173402868318, + "acc_stderr": 0.010750183177375559, + "acc_norm": 0.2301173402868318, + "acc_norm_stderr": 0.010750183177375559 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.20606060606060606, + "acc_stderr": 0.0315841532404771, + "acc_norm": 0.20606060606060606, + "acc_norm_stderr": 0.0315841532404771 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394805, + "mc2": 0.4653887573676535, + "mc2_stderr": 0.01614389294463642 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22077922077922077, + "acc_stderr": 0.014260152803540035, + "acc_norm": 0.27863046044864226, + "acc_norm_stderr": 0.015413739494345689 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/WizardVicuna-3B-0719", + "model_sha": "66621ebc9e2fa15e4fe229dfbea725c916cb7c5e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/WizardVicuna-open-llama-3b-v2/result_2023-10-14 16:00:50.json b/heegyu/WizardVicuna-open-llama-3b-v2/result_2023-10-14 16:00:50.json new file mode 100644 index 0000000000000000000000000000000000000000..8ba997695f5af29a75b5478934ed1dcd72594916 --- /dev/null +++ b/heegyu/WizardVicuna-open-llama-3b-v2/result_2023-10-14 16:00:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20563139931740615, + "acc_stderr": 0.011810745260742581, + "acc_norm": 0.257679180887372, + "acc_norm_stderr": 0.012780770562768414 + }, + "harness|ko_hellaswag|10": { + "acc": 0.27693686516630156, + "acc_stderr": 0.004465704810893538, + "acc_norm": 0.30611431985660226, + "acc_norm_stderr": 0.004599358920909526 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.34502923976608185, + "acc_stderr": 0.03645981377388807, + "acc_norm": 0.34502923976608185, + "acc_norm_stderr": 0.03645981377388807 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24904214559386972, + "acc_stderr": 0.015464676163395983, + "acc_norm": 0.24904214559386972, + "acc_norm_stderr": 0.015464676163395983 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.02895734278834235, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.02895734278834235 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2289156626506024, + "acc_stderr": 0.03270745277352477, + "acc_norm": 0.2289156626506024, + "acc_norm_stderr": 0.03270745277352477 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.20257234726688103, + "acc_stderr": 0.022827317491059682, + "acc_norm": 0.20257234726688103, + "acc_norm_stderr": 0.022827317491059682 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.030216831011508762, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.030216831011508762 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596918, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596918 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.029126522834586825, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.029126522834586825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2689655172413793, + "acc_stderr": 0.03695183311650232, + "acc_norm": 0.2689655172413793, + "acc_norm_stderr": 0.03695183311650232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863773, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863773 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2230769230769231, + "acc_stderr": 0.021107730127243998, + "acc_norm": 0.2230769230769231, + "acc_norm_stderr": 0.021107730127243998 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.22258064516129034, + "acc_stderr": 0.02366421667164251, + "acc_norm": 0.22258064516129034, + "acc_norm_stderr": 0.02366421667164251 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674054, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674054 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23018867924528302, + "acc_stderr": 0.025907897122408173, + "acc_norm": 0.23018867924528302, + "acc_norm_stderr": 0.025907897122408173 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.030965903123573037, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.030965903123573037 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28034682080924855, + "acc_stderr": 0.024182427496577622, + "acc_norm": 0.28034682080924855, + "acc_norm_stderr": 0.024182427496577622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.034089978868575295, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.034089978868575295 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2808641975308642, + "acc_stderr": 0.025006469755799215, + "acc_norm": 0.2808641975308642, + "acc_norm_stderr": 0.025006469755799215 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21243523316062177, + "acc_stderr": 0.02951928261681725, + "acc_norm": 0.21243523316062177, + "acc_norm_stderr": 0.02951928261681725 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.2036697247706422, + "acc_stderr": 0.01726674208763079, + "acc_norm": 0.2036697247706422, + "acc_norm_stderr": 0.01726674208763079 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102148, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102148 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2826797385620915, + "acc_stderr": 0.018217269552053435, + "acc_norm": 0.2826797385620915, + "acc_norm_stderr": 0.018217269552053435 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510934, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510934 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.014149575348976264, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.014149575348976264 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.025336848563332372, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.025336848563332372 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2489451476793249, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.2489451476793249, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2301173402868318, + "acc_stderr": 0.01075018317737556, + "acc_norm": 0.2301173402868318, + "acc_norm_stderr": 0.01075018317737556 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.029554292605695063, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.029554292605695063 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396718, + "mc2": 0.46188658792557263, + "mc2_stderr": 0.016386200757722597 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21487603305785125, + "acc_stderr": 0.01412140552290331, + "acc_norm": 0.28807556080283353, + "acc_norm_stderr": 0.015569869674838374 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/WizardVicuna-open-llama-3b-v2", + "model_sha": "0946550dfbf40d926d6ba816d0ca13e9c810fa72", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/WizardVicuna2-13b-hf/result_2023-10-15 11:17:29.json b/heegyu/WizardVicuna2-13b-hf/result_2023-10-15 11:17:29.json new file mode 100644 index 0000000000000000000000000000000000000000..9b9449f46e8a157627f2cdfc02ec62742d956259 --- /dev/null +++ b/heegyu/WizardVicuna2-13b-hf/result_2023-10-15 11:17:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2551194539249147, + "acc_stderr": 0.012739038695202105, + "acc_norm": 0.3191126279863481, + "acc_norm_stderr": 0.013621696119173306 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32812188807010556, + "acc_stderr": 0.0046856987521048075, + "acc_norm": 0.39225253933479387, + "acc_norm_stderr": 0.004872546302641858 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.03762738699917055, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.03762738699917055 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3300970873786408, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.3300970873786408, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.017268607560005773, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.017268607560005773 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610334, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610334 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.03711725190740749, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.03711725190740749 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3408360128617363, + "acc_stderr": 0.026920841260776165, + "acc_norm": 0.3408360128617363, + "acc_norm_stderr": 0.026920841260776165 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35858585858585856, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.35858585858585856, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3067226890756303, + "acc_stderr": 0.029953823891887048, + "acc_norm": 0.3067226890756303, + "acc_norm_stderr": 0.029953823891887048 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.023119362758232294, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.023119362758232294 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3580645161290323, + "acc_stderr": 0.027273890594300642, + "acc_norm": 0.3580645161290323, + "acc_norm_stderr": 0.027273890594300642 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.49145299145299143, + "acc_stderr": 0.032751303000970296, + "acc_norm": 0.49145299145299143, + "acc_norm_stderr": 0.032751303000970296 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3471698113207547, + "acc_stderr": 0.029300101705549655, + "acc_norm": 0.3471698113207547, + "acc_norm_stderr": 0.029300101705549655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.047093069786618966, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.047093069786618966 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.40298507462686567, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.40298507462686567, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047873, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047873 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.02226181769240016, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.02226181769240016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.025624723994030457, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.025624723994030457 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.02700252103451647, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.02700252103451647 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.034107802518361846, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.034107802518361846 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3155963302752294, + "acc_stderr": 0.019926117513869662, + "acc_norm": 0.3155963302752294, + "acc_norm_stderr": 0.019926117513869662 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.026336613469046637, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.026336613469046637 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.036906779861372814, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.036906779861372814 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.018550634502952964, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.018550634502952964 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.02718712701150379, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.02718712701150379 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.02813968944485967, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.02813968944485967 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.24632352941176472, + "acc_stderr": 0.02617343857052, + "acc_norm": 0.24632352941176472, + "acc_norm_stderr": 0.02617343857052 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3020408163265306, + "acc_stderr": 0.02939360931987982, + "acc_norm": 0.3020408163265306, + "acc_norm_stderr": 0.02939360931987982 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2646675358539765, + "acc_stderr": 0.011267332992845535, + "acc_norm": 0.2646675358539765, + "acc_norm_stderr": 0.011267332992845535 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.03149328104507956, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.03149328104507956 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.033175059300091805, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.033175059300091805 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006509, + "mc2": 0.44330415731488865, + "mc2_stderr": 0.015557823529945149 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2585596221959858, + "acc_stderr": 0.015053354438963988, + "acc_norm": 0.3482880755608028, + "acc_norm_stderr": 0.01637992673914804 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/WizardVicuna2-13b-hf", + "model_sha": "6cfd95e2dcdb6996afa9eb5c63273a1a3524c6c6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/ajoublue-gpt2-base/result_2023-10-29 14:08:36.json b/heegyu/ajoublue-gpt2-base/result_2023-10-29 14:08:36.json new file mode 100644 index 0000000000000000000000000000000000000000..592d2abac725f8f8f6e35b8116937320b75d3e60 --- /dev/null +++ b/heegyu/ajoublue-gpt2-base/result_2023-10-29 14:08:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1962457337883959, + "acc_stderr": 0.01160601988141628, + "acc_norm": 0.25, + "acc_norm_stderr": 0.012653835621466646 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2848038239394543, + "acc_stderr": 0.004503985839041979, + "acc_norm": 0.31876120294762, + "acc_norm_stderr": 0.004650438781745302 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24521072796934865, + "acc_stderr": 0.015384352284543936, + "acc_norm": 0.24521072796934865, + "acc_norm_stderr": 0.015384352284543936 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.028659179374292326, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.028659179374292326 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.03484331592680588, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.03484331592680588 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.22186495176848875, + "acc_stderr": 0.02359885829286305, + "acc_norm": 0.22186495176848875, + "acc_norm_stderr": 0.02359885829286305 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.030769352008229136, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.030769352008229136 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30808080808080807, + "acc_stderr": 0.032894773300986155, + "acc_norm": 0.30808080808080807, + "acc_norm_stderr": 0.032894773300986155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21182266009852216, + "acc_stderr": 0.02874898368994106, + "acc_norm": 0.21182266009852216, + "acc_norm_stderr": 0.02874898368994106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.20398009950248755, + "acc_stderr": 0.02849317624532607, + "acc_norm": 0.20398009950248755, + "acc_norm_stderr": 0.02849317624532607 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.022779719088733396, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.022779719088733396 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28440366972477066, + "acc_stderr": 0.0193420365877026, + "acc_norm": 0.28440366972477066, + "acc_norm_stderr": 0.0193420365877026 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242564, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242564 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.02564555362226673, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.02564555362226673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.03562367850095391, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.03562367850095391 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.01098630787004552, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.01098630787004552 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015008, + "mc2": 0.4679072270766072, + "mc2_stderr": 0.01544600714675648 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29043683589138136, + "acc_stderr": 0.01560760256981463, + "acc_norm": 0.4037780401416765, + "acc_norm_stderr": 0.016869031540298625 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/ajoublue-gpt2-base", + "model_sha": "528c5d0f568ed796e0d87064d72a1baf961a5485", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/ajoublue-gpt2-medium/result_2023-10-29 14:08:46.json b/heegyu/ajoublue-gpt2-medium/result_2023-10-29 14:08:46.json new file mode 100644 index 0000000000000000000000000000000000000000..7c6c752ec988437594a22a0d062a604c87ce3bcd --- /dev/null +++ b/heegyu/ajoublue-gpt2-medium/result_2023-10-29 14:08:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1885665529010239, + "acc_stderr": 0.011430897647675815, + "acc_norm": 0.25426621160409557, + "acc_norm_stderr": 0.012724999945157746 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2983469428400717, + "acc_stderr": 0.004565974937793719, + "acc_norm": 0.33808006373232424, + "acc_norm_stderr": 0.004720891597174735 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.031267817146631786, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.031267817146631786 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26309067688378035, + "acc_stderr": 0.015745497169049053, + "acc_norm": 0.26309067688378035, + "acc_norm_stderr": 0.015745497169049053 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.030363582197238174, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.030363582197238174 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23737373737373738, + "acc_stderr": 0.03031371053819892, + "acc_norm": 0.23737373737373738, + "acc_norm_stderr": 0.03031371053819892 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.031041941304059285, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.031041941304059285 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.024433016466052452, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.024433016466052452 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.043300437496507437, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.043300437496507437 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764815, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764815 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.025819233256483727, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.025819233256483727 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708094, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708094 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1907514450867052, + "acc_stderr": 0.029957851329869327, + "acc_norm": 0.1907514450867052, + "acc_norm_stderr": 0.029957851329869327 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.02361867831006937, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.02361867831006937 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2147239263803681, + "acc_stderr": 0.03226219377286775, + "acc_norm": 0.2147239263803681, + "acc_norm_stderr": 0.03226219377286775 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.23765432098765432, + "acc_stderr": 0.02368359183700856, + "acc_norm": 0.23765432098765432, + "acc_norm_stderr": 0.02368359183700856 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2849740932642487, + "acc_stderr": 0.0325771407770966, + "acc_norm": 0.2849740932642487, + "acc_norm_stderr": 0.0325771407770966 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24403669724770644, + "acc_stderr": 0.018415286351416402, + "acc_norm": 0.24403669724770644, + "acc_norm_stderr": 0.018415286351416402 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.03512207412302054, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.03512207412302054 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.023929155517351287, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.023929155517351287 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.30578512396694213, + "acc_stderr": 0.04205953933884125, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.04205953933884125 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.017322789207784326, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.017322789207784326 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.02577001564429038, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.02577001564429038 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24445893089960888, + "acc_stderr": 0.010976425013113895, + "acc_norm": 0.24445893089960888, + "acc_norm_stderr": 0.010976425013113895 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.015201522246299956, + "mc2": 0.4441202464488538, + "mc2_stderr": 0.015239234605842715 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31759149940968123, + "acc_stderr": 0.01600558187622931, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.017014038119297463 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/ajoublue-gpt2-medium", + "model_sha": "97f502306274301f8406956b485f868a8f416e85", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/koalpaca-355m/result_2023-10-15 11:22:22.json b/heegyu/koalpaca-355m/result_2023-10-15 11:22:22.json new file mode 100644 index 0000000000000000000000000000000000000000..2cbcba41d248bdc5fb796b410360020e8aa90e0e --- /dev/null +++ b/heegyu/koalpaca-355m/result_2023-10-15 11:22:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21075085324232082, + "acc_stderr": 0.011918271754852189, + "acc_norm": 0.2687713310580205, + "acc_norm_stderr": 0.01295506596371068 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3009360685122486, + "acc_stderr": 0.004577275844432453, + "acc_norm": 0.3458474407488548, + "acc_norm_stderr": 0.004746716805735747 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03188578017686398, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03188578017686398 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27330779054916987, + "acc_stderr": 0.015936681062628556, + "acc_norm": 0.27330779054916987, + "acc_norm_stderr": 0.015936681062628556 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.03547854198560826, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.03547854198560826 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.03115852213135778, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.03115852213135778 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.03384429155233137, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.03384429155233137 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26366559485530544, + "acc_stderr": 0.025025538500532338, + "acc_norm": 0.26366559485530544, + "acc_norm_stderr": 0.025025538500532338 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19730941704035873, + "acc_stderr": 0.02670985334496796, + "acc_norm": 0.19730941704035873, + "acc_norm_stderr": 0.02670985334496796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.031156269519646836, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.031156269519646836 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.03375672449560554, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.03375672449560554 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.031041941304059288, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.031041941304059288 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2923076923076923, + "acc_stderr": 0.023060438380857726, + "acc_norm": 0.2923076923076923, + "acc_norm_stderr": 0.023060438380857726 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.02948036054954119, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.02948036054954119 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.0264803571798957, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.0264803571798957 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.029705284056772436, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.029705284056772436 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2275132275132275, + "acc_stderr": 0.021591269407823764, + "acc_norm": 0.2275132275132275, + "acc_norm_stderr": 0.021591269407823764 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.02289408248992599, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.02289408248992599 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.034089978868575295, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.034089978868575295 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25617283950617287, + "acc_stderr": 0.0242885336377261, + "acc_norm": 0.25617283950617287, + "acc_norm_stderr": 0.0242885336377261 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.18652849740932642, + "acc_stderr": 0.02811209121011746, + "acc_norm": 0.18652849740932642, + "acc_norm_stderr": 0.02811209121011746 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21467889908256882, + "acc_stderr": 0.017604304149256483, + "acc_norm": 0.21467889908256882, + "acc_norm_stderr": 0.017604304149256483 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.02617390850671858, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.02617390850671858 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2809917355371901, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.16447368421052633, + "acc_stderr": 0.030167533468632726, + "acc_norm": 0.16447368421052633, + "acc_norm_stderr": 0.030167533468632726 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.017401816711427653, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.017401816711427653 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.024987106365642973, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.024987106365642973 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697625, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697625 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.033981108902946366, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.033981108902946366 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596452, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596452 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.1836734693877551, + "acc_stderr": 0.024789071332007633, + "acc_norm": 0.1836734693877551, + "acc_norm_stderr": 0.024789071332007633 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.242503259452412, + "acc_stderr": 0.01094657096634878, + "acc_norm": 0.242503259452412, + "acc_norm_stderr": 0.01094657096634878 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23623011015911874, + "mc1_stderr": 0.014869755015871098, + "mc2": 0.428122521678851, + "mc2_stderr": 0.015366900048399064 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.358913813459268, + "acc_stderr": 0.01649180210299904, + "acc_norm": 0.43565525383707204, + "acc_norm_stderr": 0.01704741522947634 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/koalpaca-355m", + "model_sha": "a1f4b5022e95bd808e2375dd3ed4c9bfbb64df32", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/kodialogpt-v1/result_2023-10-15 11:16:23.json b/heegyu/kodialogpt-v1/result_2023-10-15 11:16:23.json new file mode 100644 index 0000000000000000000000000000000000000000..b197f44b866788bf853e0759e01b848bd1fa9f09 --- /dev/null +++ b/heegyu/kodialogpt-v1/result_2023-10-15 11:16:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19283276450511946, + "acc_stderr": 0.011529055465663324, + "acc_norm": 0.23122866894197952, + "acc_norm_stderr": 0.012320858834772274 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25652260505875324, + "acc_stderr": 0.004358210689442262, + "acc_norm": 0.2560246962756423, + "acc_norm_stderr": 0.004355436696716298 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822582, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822582 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2835249042145594, + "acc_stderr": 0.016117318166832283, + "acc_norm": 0.2835249042145594, + "acc_norm_stderr": 0.016117318166832283 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.24663677130044842, + "acc_stderr": 0.028930413120910877, + "acc_norm": 0.24663677130044842, + "acc_norm_stderr": 0.028930413120910877 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.20610687022900764, + "acc_stderr": 0.03547771004159462, + "acc_norm": 0.20610687022900764, + "acc_norm_stderr": 0.03547771004159462 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2184873949579832, + "acc_stderr": 0.02684151432295893, + "acc_norm": 0.2184873949579832, + "acc_norm_stderr": 0.02684151432295893 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.028120966503914404, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.028120966503914404 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891363, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891363 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014666, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014666 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.034355680560478746, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.034355680560478746 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680814, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.02402774515526503, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.02402774515526503 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868038, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.0181256691808615, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.0181256691808615 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276865, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276865 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.025457756696667878, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.025457756696667878 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.033176727875331574, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.033176727875331574 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23366013071895425, + "acc_stderr": 0.017119158496044506, + "acc_norm": 0.23366013071895425, + "acc_norm_stderr": 0.017119158496044506 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19852941176470587, + "acc_stderr": 0.02423101337054111, + "acc_norm": 0.19852941176470587, + "acc_norm_stderr": 0.02423101337054111 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598018, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598018 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539265, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539265 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.028379449451588667, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.028379449451588667 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875833, + "mc2": 0.5203988868301895, + "mc2_stderr": 0.016282877106771964 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.16646989374262103, + "acc_stderr": 0.01280687925641312, + "acc_norm": 0.36835891381345925, + "acc_norm_stderr": 0.01658385898263907 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/kodialogpt-v1", + "model_sha": "f8b2ddbf8feed75a3e4b8b9de8b17b37efb4d5e0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/kogpt-j-350m/result_2023-10-29 14:08:59.json b/heegyu/kogpt-j-350m/result_2023-10-29 14:08:59.json new file mode 100644 index 0000000000000000000000000000000000000000..02ce13259303bd6723e3d234d00911672510aeb4 --- /dev/null +++ b/heegyu/kogpt-j-350m/result_2023-10-29 14:08:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.197098976109215, + "acc_stderr": 0.01162504766988062, + "acc_norm": 0.25426621160409557, + "acc_norm_stderr": 0.01272499994515774 + }, + "harness|ko_hellaswag|10": { + "acc": 0.29864568810993825, + "acc_stderr": 0.00456728777570055, + "acc_norm": 0.3429595698068114, + "acc_norm_stderr": 0.0047372796910361975 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.03446296217088427, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.03446296217088427 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2656449553001277, + "acc_stderr": 0.015794302487888722, + "acc_norm": 0.2656449553001277, + "acc_norm_stderr": 0.015794302487888722 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2170212765957447, + "acc_stderr": 0.026947483121496238, + "acc_norm": 0.2170212765957447, + "acc_norm_stderr": 0.026947483121496238 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818777, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818777 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3487179487179487, + "acc_stderr": 0.024162780284017724, + "acc_norm": 0.3487179487179487, + "acc_norm_stderr": 0.024162780284017724 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23870967741935484, + "acc_stderr": 0.02425107126220884, + "acc_norm": 0.23870967741935484, + "acc_norm_stderr": 0.02425107126220884 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.027601921381417583, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.027601921381417583 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641143, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641143 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2191358024691358, + "acc_stderr": 0.023016705640262185, + "acc_norm": 0.2191358024691358, + "acc_norm_stderr": 0.023016705640262185 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21243523316062177, + "acc_stderr": 0.02951928261681727, + "acc_norm": 0.21243523316062177, + "acc_norm_stderr": 0.02951928261681727 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011744, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.024288619466046112, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.024288619466046112 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22712418300653595, + "acc_stderr": 0.01694985327921237, + "acc_norm": 0.22712418300653595, + "acc_norm_stderr": 0.01694985327921237 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902006, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902006 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.242503259452412, + "acc_stderr": 0.010946570966348787, + "acc_norm": 0.242503259452412, + "acc_norm_stderr": 0.010946570966348787 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.20098039215686275, + "acc_stderr": 0.028125972265654373, + "acc_norm": 0.20098039215686275, + "acc_norm_stderr": 0.028125972265654373 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766379, + "mc2": 0.4573493572313282, + "mc2_stderr": 0.015233367138630003 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29043683589138136, + "acc_stderr": 0.01560760256981463, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.016977101932601532 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/kogpt-j-350m", + "model_sha": "4020a790a09b76074102be8fc9013d58bcdaf385", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/kogpt-j-base/result_2023-10-29 14:09:11.json b/heegyu/kogpt-j-base/result_2023-10-29 14:09:11.json new file mode 100644 index 0000000000000000000000000000000000000000..c1333a6b2e24b4e1a4540455370ca39bc001e69d --- /dev/null +++ b/heegyu/kogpt-j-base/result_2023-10-29 14:09:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20563139931740615, + "acc_stderr": 0.011810745260742581, + "acc_norm": 0.24573378839590443, + "acc_norm_stderr": 0.012581033453730107 + }, + "harness|ko_hellaswag|10": { + "acc": 0.28540131447918743, + "acc_stderr": 0.0045068240943332985, + "acc_norm": 0.3209520015933081, + "acc_norm_stderr": 0.004658882929099508 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245233, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245233 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26181353767560667, + "acc_stderr": 0.015720838678445266, + "acc_norm": 0.26181353767560667, + "acc_norm_stderr": 0.015720838678445266 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.22127659574468084, + "acc_stderr": 0.02713634960242405, + "acc_norm": 0.22127659574468084, + "acc_norm_stderr": 0.02713634960242405 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.035294868015111155, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.035294868015111155 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426115, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426115 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.242152466367713, + "acc_stderr": 0.028751392398694755, + "acc_norm": 0.242152466367713, + "acc_norm_stderr": 0.028751392398694755 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467766, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467766 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.19696969696969696, + "acc_stderr": 0.02833560973246335, + "acc_norm": 0.19696969696969696, + "acc_norm_stderr": 0.02833560973246335 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2, + "acc_stderr": 0.0333333333333333, + "acc_norm": 0.2, + "acc_norm_stderr": 0.0333333333333333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.029472485833136098, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.029472485833136098 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.024121125416941187, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.024121125416941187 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18719211822660098, + "acc_stderr": 0.027444924966882618, + "acc_norm": 0.18719211822660098, + "acc_norm_stderr": 0.027444924966882618 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.02509189237885928, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.02509189237885928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2, + "acc_stderr": 0.024618298195866507, + "acc_norm": 0.2, + "acc_norm_stderr": 0.024618298195866507 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.02794045713622841, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02794045713622841 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165581, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165581 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.023445826276545543, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.023445826276545543 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.032910995786157714, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.032910995786157714 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.023576881744005723, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.023576881744005723 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518754, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518754 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.01827257581023186, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.01827257581023186 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790606, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21241830065359477, + "acc_stderr": 0.02342037547829613, + "acc_norm": 0.21241830065359477, + "acc_norm_stderr": 0.02342037547829613 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516303, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516303 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.01747948700136476, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.01747948700136476 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266726, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266726 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.17857142857142858, + "acc_stderr": 0.036352091215778065, + "acc_norm": 0.17857142857142858, + "acc_norm_stderr": 0.036352091215778065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252336, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252336 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859919, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859919 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.02993534270787775, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.02993534270787775 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2, + "acc_stderr": 0.02560737598657916, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02560737598657916 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.242503259452412, + "acc_stderr": 0.010946570966348783, + "acc_norm": 0.242503259452412, + "acc_norm_stderr": 0.010946570966348783 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083292, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083292 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.01522589934082682, + "mc2": 0.4666916578437702, + "mc2_stderr": 0.015201094715829425 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3412042502951594, + "acc_stderr": 0.016300368742137302, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.017185069732676517 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/kogpt-j-base", + "model_sha": "212ebff345958e108fc47ae0daa892328ca6ece2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/llama-2-ko-7b-chat/result_2023-09-27 06:17:19.json b/heegyu/llama-2-ko-7b-chat/result_2023-09-27 06:17:19.json new file mode 100644 index 0000000000000000000000000000000000000000..b7b6bdff530fee2ae4e5cbc3f394dbf0030c2050 --- /dev/null +++ b/heegyu/llama-2-ko-7b-chat/result_2023-09-27 06:17:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30802047781569963, + "acc_stderr": 0.013491429517292038, + "acc_norm": 0.34726962457337884, + "acc_norm_stderr": 0.013913034529620439 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37004580760804623, + "acc_stderr": 0.004818298991012552, + "acc_norm": 0.47231627165903206, + "acc_norm_stderr": 0.004982127315605219 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.26900584795321636, + "acc_stderr": 0.03401052620104088, + "acc_norm": 0.26900584795321636, + "acc_norm_stderr": 0.03401052620104088 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2988505747126437, + "acc_stderr": 0.016369256815093127, + "acc_norm": 0.2988505747126437, + "acc_norm_stderr": 0.016369256815093127 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617724, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617724 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.02732107841738753, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.02732107841738753 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.034843315926805875, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.034843315926805875 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.02685882587948855, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.02685882587948855 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2645739910313901, + "acc_stderr": 0.029605103217038332, + "acc_norm": 0.2645739910313901, + "acc_norm_stderr": 0.029605103217038332 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.038073871163060866, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.038073871163060866 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713547, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713547 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.03831226048850333, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.03831226048850333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714506, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714506 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.02665353159671549, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.02665353159671549 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.02093244577446317, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.02093244577446317 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.046166311118017125, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.046166311118017125 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.03127090713297698, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.03127090713297698 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.267741935483871, + "acc_stderr": 0.025189006660212385, + "acc_norm": 0.267741935483871, + "acc_norm_stderr": 0.025189006660212385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.030236389942173106, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.030236389942173106 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.041723430387053825, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.041723430387053825 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3482587064676617, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.3482587064676617, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.024685316867257806, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.024685316867257806 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.02563082497562135, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.02563082497562135 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29541284403669726, + "acc_stderr": 0.019560619182976, + "acc_norm": 0.29541284403669726, + "acc_norm_stderr": 0.019560619182976 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.14285714285714285, + "acc_stderr": 0.0312984318574381, + "acc_norm": 0.14285714285714285, + "acc_norm_stderr": 0.0312984318574381 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.026336613469046644, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.026336613469046644 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3022875816993464, + "acc_stderr": 0.018579232711113877, + "acc_norm": 0.3022875816993464, + "acc_norm_stderr": 0.018579232711113877 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.0420327729146776, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.0420327729146776 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.02746740180405799, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.02746740180405799 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.17279411764705882, + "acc_stderr": 0.022966067585581788, + "acc_norm": 0.17279411764705882, + "acc_norm_stderr": 0.022966067585581788 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2163265306122449, + "acc_stderr": 0.026358916334904028, + "acc_norm": 0.2163265306122449, + "acc_norm_stderr": 0.026358916334904028 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3080168776371308, + "acc_stderr": 0.030052389335605695, + "acc_norm": 0.3080168776371308, + "acc_norm_stderr": 0.030052389335605695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26140808344198174, + "acc_stderr": 0.011222528169771312, + "acc_norm": 0.26140808344198174, + "acc_norm_stderr": 0.011222528169771312 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373618, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373618 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.014896277441041867, + "mc2": 0.3946101299678252, + "mc2_stderr": 0.01496139592173614 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.19008264462809918, + "acc_stderr": 0.013489827742736766, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.015588800386053557 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/llama-2-ko-7b-chat", + "model_sha": "98096a3f4d095e42ba10daec38ad329d9576f4cd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/llama-2-koen-13b-OKI-v20231124-1e-5/result_2023-11-28 06:34:03.json b/heegyu/llama-2-koen-13b-OKI-v20231124-1e-5/result_2023-11-28 06:34:03.json new file mode 100644 index 0000000000000000000000000000000000000000..6166c7a8998d9a87d53ca8945cab7e3e8c280aa3 --- /dev/null +++ b/heegyu/llama-2-koen-13b-OKI-v20231124-1e-5/result_2023-11-28 06:34:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.01423008476191047, + "acc_norm": 0.44795221843003413, + "acc_norm_stderr": 0.014532011498211667 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42620991834295957, + "acc_stderr": 0.004935143791573814, + "acc_norm": 0.5728938458474407, + "acc_norm_stderr": 0.004936470085238486 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5376756066411239, + "acc_stderr": 0.017829131764287184, + "acc_norm": 0.5376756066411239, + "acc_norm_stderr": 0.017829131764287184 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.47474747474747475, + "acc_stderr": 0.035578062450873145, + "acc_norm": 0.47474747474747475, + "acc_norm_stderr": 0.035578062450873145 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102318, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102318 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009798, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009798 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.023865206836972602, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.023865206836972602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960717, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960717 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.03587014986075658, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.03587014986075658 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779207, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779207 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.019506291693954854, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.019506291693954854 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101362, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101362 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353603, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353603 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.0318421386668758, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.0318421386668758 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3044328552803129, + "acc_stderr": 0.011752877592597579, + "acc_norm": 0.3044328552803129, + "acc_norm_stderr": 0.011752877592597579 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842885, + "mc2": 0.4422768606291578, + "mc2_stderr": 0.01506617476188256 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49940968122786306, + "acc_stderr": 0.01719034212344866, + "acc_norm": 0.5631641086186541, + "acc_norm_stderr": 0.01705263355985607 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/llama-2-koen-13b-OKI-v20231124-1e-5", + "model_sha": "8a9a4c042bebc53d1e3fee972cb49752ddabda95", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/polyglot-ko-1.3b-chat/result_2023-10-14 15:59:35.json b/heegyu/polyglot-ko-1.3b-chat/result_2023-10-14 15:59:35.json new file mode 100644 index 0000000000000000000000000000000000000000..e50702301673ba18f5ea33470f95222714b44b14 --- /dev/null +++ b/heegyu/polyglot-ko-1.3b-chat/result_2023-10-14 15:59:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2295221843003413, + "acc_stderr": 0.012288926760890793, + "acc_norm": 0.27559726962457337, + "acc_norm_stderr": 0.013057169655761838 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3253335988846843, + "acc_stderr": 0.004675418774314241, + "acc_norm": 0.3995220075682135, + "acc_norm_stderr": 0.004887991225950282 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.03424042924691583, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.03424042924691583 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2784163473818646, + "acc_stderr": 0.016028295188992455, + "acc_norm": 0.2784163473818646, + "acc_norm_stderr": 0.016028295188992455 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03591444084196969, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03591444084196969 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036843, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036843 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838752, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838752 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.031417842916639245, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.031417842916639245 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24758842443729903, + "acc_stderr": 0.024513879973621967, + "acc_norm": 0.24758842443729903, + "acc_norm_stderr": 0.024513879973621967 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21076233183856502, + "acc_stderr": 0.027373095500540193, + "acc_norm": 0.21076233183856502, + "acc_norm_stderr": 0.027373095500540193 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.035954616117746904, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.035954616117746904 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124498, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124498 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237656, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237656 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.33613445378151263, + "acc_stderr": 0.03068473711513537, + "acc_norm": 0.33613445378151263, + "acc_norm_stderr": 0.03068473711513537 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31025641025641026, + "acc_stderr": 0.02345467488940429, + "acc_norm": 0.31025641025641026, + "acc_norm_stderr": 0.02345467488940429 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.031618563353586114, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.031618563353586114 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335137, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335137 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.027236013946196687, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.027236013946196687 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.026749899771241235, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.026749899771241235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.041723430387053825, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.041723430387053825 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.27860696517412936, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.27860696517412936, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.03496101481191179, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.03496101481191179 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633345, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633345 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21098265895953758, + "acc_stderr": 0.021966309947043117, + "acc_norm": 0.21098265895953758, + "acc_norm_stderr": 0.021966309947043117 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25308641975308643, + "acc_stderr": 0.024191808600713002, + "acc_norm": 0.25308641975308643, + "acc_norm_stderr": 0.024191808600713002 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.20550458715596331, + "acc_stderr": 0.017324352325016015, + "acc_norm": 0.20550458715596331, + "acc_norm_stderr": 0.017324352325016015 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.02526169121972948, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.02526169121972948 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.04026187527591206, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.04026187527591206 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17105263157894737, + "acc_stderr": 0.03064360707167709, + "acc_norm": 0.17105263157894737, + "acc_norm_stderr": 0.03064360707167709 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.20261437908496732, + "acc_stderr": 0.01626105528374612, + "acc_norm": 0.20261437908496732, + "acc_norm_stderr": 0.01626105528374612 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.027374128882631157, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.027374128882631157 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372432, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372432 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.02993534270787775, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.02993534270787775 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.025991117672813292, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.025991117672813292 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994927, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994927 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2379400260756193, + "acc_stderr": 0.010875700787694243, + "acc_norm": 0.2379400260756193, + "acc_norm_stderr": 0.010875700787694243 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501933, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501933 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.034277431758165236, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.034277431758165236 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.014948812679062137, + "mc2": 0.4105215346532836, + "mc2_stderr": 0.015140606421446082 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29043683589138136, + "acc_stderr": 0.01560760256981463, + "acc_norm": 0.3447461629279811, + "acc_norm_stderr": 0.016340649905418697 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/polyglot-ko-1.3b-chat", + "model_sha": "156656e44a70bc0905777f682f16237758d16b16", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/polyglot-ko-3.8b-chat/result_2023-10-14 16:00:21.json b/heegyu/polyglot-ko-3.8b-chat/result_2023-10-14 16:00:21.json new file mode 100644 index 0000000000000000000000000000000000000000..eb9ffc2771dcbbaf45242beb1ecbc240ab06d886 --- /dev/null +++ b/heegyu/polyglot-ko-3.8b-chat/result_2023-10-14 16:00:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2568259385665529, + "acc_stderr": 0.012766923794116801, + "acc_norm": 0.30887372013651876, + "acc_norm_stderr": 0.013501770929344003 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35172276438956385, + "acc_stderr": 0.004765320784902128, + "acc_norm": 0.4396534554869548, + "acc_norm_stderr": 0.004953305461311753 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393161, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393161 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2656449553001277, + "acc_stderr": 0.01579430248788873, + "acc_norm": 0.2656449553001277, + "acc_norm_stderr": 0.01579430248788873 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174022, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174022 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.02655698211783875, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.02655698211783875 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2289156626506024, + "acc_stderr": 0.03270745277352477, + "acc_norm": 0.2289156626506024, + "acc_norm_stderr": 0.03270745277352477 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.27009646302250806, + "acc_stderr": 0.025218040373410622, + "acc_norm": 0.27009646302250806, + "acc_norm_stderr": 0.025218040373410622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.027157150479563824, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.027157150479563824 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.030088629490217487, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.030088629490217487 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.03831226048850333, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.03831226048850333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380558, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380558 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.022556551010132354, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.022556551010132354 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764833, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764833 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22641509433962265, + "acc_stderr": 0.025757559893106734, + "acc_norm": 0.22641509433962265, + "acc_norm_stderr": 0.025757559893106734 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.17272727272727273, + "acc_stderr": 0.03620691833929217, + "acc_norm": 0.17272727272727273, + "acc_norm_stderr": 0.03620691833929217 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145668, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145668 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.03076944496729602, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.03076944496729602 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.03214737302029468, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.03214737302029468 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708607, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708607 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566016, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2630057803468208, + "acc_stderr": 0.023703099525258158, + "acc_norm": 0.2630057803468208, + "acc_norm_stderr": 0.023703099525258158 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.024922001168886338, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.024922001168886338 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.03423465100104283, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.03423465100104283 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24220183486238533, + "acc_stderr": 0.01836817630659862, + "acc_norm": 0.24220183486238533, + "acc_norm_stderr": 0.01836817630659862 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26633986928104575, + "acc_stderr": 0.017883188134667192, + "acc_norm": 0.26633986928104575, + "acc_norm_stderr": 0.017883188134667192 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755808, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755808 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.03324708911809117, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.03324708911809117 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.26838235294117646, + "acc_stderr": 0.02691748122437723, + "acc_norm": 0.26838235294117646, + "acc_norm_stderr": 0.02691748122437723 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2979591836734694, + "acc_stderr": 0.029279567411065684, + "acc_norm": 0.2979591836734694, + "acc_norm_stderr": 0.029279567411065684 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25945241199478486, + "acc_stderr": 0.011195262076350309, + "acc_norm": 0.25945241199478486, + "acc_norm_stderr": 0.011195262076350309 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885416, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522517, + "mc2": 0.42818983286182555, + "mc2_stderr": 0.015309048799107149 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.282172373081464, + "acc_stderr": 0.01547327158398843, + "acc_norm": 0.3412042502951594, + "acc_norm_stderr": 0.016300368742137306 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/polyglot-ko-3.8b-chat", + "model_sha": "0e8739e22d15d44f6196fb281895856a0372564a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/polyglot-ko-5.8b-chat/result_2023-10-14 16:01:20.json b/heegyu/polyglot-ko-5.8b-chat/result_2023-10-14 16:01:20.json new file mode 100644 index 0000000000000000000000000000000000000000..8768999ef3aad47f2e8bb54b795000ac3b19deef --- /dev/null +++ b/heegyu/polyglot-ko-5.8b-chat/result_2023-10-14 16:01:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2815699658703072, + "acc_stderr": 0.013143376735009007, + "acc_norm": 0.3165529010238908, + "acc_norm_stderr": 0.01359243151906808 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35899223262298346, + "acc_stderr": 0.004787245377967104, + "acc_norm": 0.4522007568213503, + "acc_norm_stderr": 0.004966928094797578 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23391812865497075, + "acc_stderr": 0.03246721765117827, + "acc_norm": 0.23391812865497075, + "acc_norm_stderr": 0.03246721765117827 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1553398058252427, + "acc_stderr": 0.03586594738573974, + "acc_norm": 0.1553398058252427, + "acc_norm_stderr": 0.03586594738573974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.29757343550446996, + "acc_stderr": 0.016349111912909418, + "acc_norm": 0.29757343550446996, + "acc_norm_stderr": 0.016349111912909418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.03502553170678316, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.03502553170678316 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3021276595744681, + "acc_stderr": 0.030017554471880557, + "acc_norm": 0.3021276595744681, + "acc_norm_stderr": 0.030017554471880557 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26366559485530544, + "acc_stderr": 0.02502553850053234, + "acc_norm": 0.26366559485530544, + "acc_norm_stderr": 0.02502553850053234 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134988, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134988 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.03008862949021749, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.03008862949021749 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.029719142876342853, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.029719142876342853 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.32564102564102565, + "acc_stderr": 0.02375966576741229, + "acc_norm": 0.32564102564102565, + "acc_norm_stderr": 0.02375966576741229 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.026226485652553883, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.026226485652553883 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.02860595370200425, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.02860595370200425 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.028049186315695245, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.028049186315695245 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2, + "acc_stderr": 0.038313051408846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.038313051408846 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.27860696517412936, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.27860696517412936, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.030631145539198823, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.030631145539198823 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.20520231213872833, + "acc_stderr": 0.021742519835276284, + "acc_norm": 0.20520231213872833, + "acc_norm_stderr": 0.021742519835276284 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.025407197798890162, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.025407197798890162 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916646, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916646 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26055045871559634, + "acc_stderr": 0.01881918203485007, + "acc_norm": 0.26055045871559634, + "acc_norm_stderr": 0.01881918203485007 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790606, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.238562091503268, + "acc_stderr": 0.024404394928087873, + "acc_norm": 0.238562091503268, + "acc_norm_stderr": 0.024404394928087873 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.039849796533028704, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.039849796533028704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.01690661592728815, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.01690661592728815 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902013, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902013 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.044939490686135404, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.044939490686135404 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293646, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293646 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.02747227447323382, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.02747227447323382 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3551020408163265, + "acc_stderr": 0.03063565515038764, + "acc_norm": 0.3551020408163265, + "acc_norm_stderr": 0.03063565515038764 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29957805907172996, + "acc_stderr": 0.029818024749753102, + "acc_norm": 0.29957805907172996, + "acc_norm_stderr": 0.029818024749753102 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25488917861799215, + "acc_stderr": 0.011130509812662979, + "acc_norm": 0.25488917861799215, + "acc_norm_stderr": 0.011130509812662979 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693285, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693285 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156475, + "mc2": 0.4027649410811347, + "mc2_stderr": 0.014993381048704797 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.282172373081464, + "acc_stderr": 0.01547327158398843, + "acc_norm": 0.3412042502951594, + "acc_norm_stderr": 0.0163003687421373 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/polyglot-ko-5.8b-chat", + "model_sha": "58d274dbd13bd1829a6bd17d90c493bd9039564f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/heegyu/zephyr-7b-beta-KOR-OpenOrca-Platypus-1e-5/result_2023-11-27 11:04:41.json b/heegyu/zephyr-7b-beta-KOR-OpenOrca-Platypus-1e-5/result_2023-11-27 11:04:41.json new file mode 100644 index 0000000000000000000000000000000000000000..881b0a988b42f89e340a92bd4e7c278f5d95c009 --- /dev/null +++ b/heegyu/zephyr-7b-beta-KOR-OpenOrca-Platypus-1e-5/result_2023-11-27 11:04:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3242320819112628, + "acc_stderr": 0.01367881039951882, + "acc_norm": 0.37372013651877134, + "acc_norm_stderr": 0.014137708601759079 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36675960963951404, + "acc_stderr": 0.004809352075008939, + "acc_norm": 0.47171878111929894, + "acc_norm_stderr": 0.004981793089848261 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.039992628766177235, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.039992628766177235 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101737, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101737 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071722, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071722 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006938, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006938 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764194, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684973, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684973 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507748, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507748 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.03053333843046751, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.03053333843046751 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.04760548821460325, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.04760548821460325 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.03528131472933606, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.03528131472933606 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.024419234966819074, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.024419234966819074 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.026803720583206188, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.026803720583206188 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442205, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442205 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47339449541284406, + "acc_stderr": 0.021406952688151588, + "acc_norm": 0.47339449541284406, + "acc_norm_stderr": 0.021406952688151588 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.01929196189506638, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.01929196189506638 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467763, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467763 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29720670391061454, + "acc_stderr": 0.015285313353641595, + "acc_norm": 0.29720670391061454, + "acc_norm_stderr": 0.015285313353641595 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464622, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464622 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.032002553478937816, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.032002553478937816 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3155149934810952, + "acc_stderr": 0.011869184843058638, + "acc_norm": 0.3155149934810952, + "acc_norm_stderr": 0.011869184843058638 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512567, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512567 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.01598359510181139, + "mc2": 0.4845124425990411, + "mc2_stderr": 0.01549474022798638 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4155844155844156, + "acc_stderr": 0.016943586313076568, + "acc_norm": 0.44510035419126326, + "acc_norm_stderr": 0.017086417431005467 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "heegyu/zephyr-7b-beta-KOR-OpenOrca-Platypus-1e-5", + "model_sha": "e2a745cc691255a55c3880b49b374d27305faac3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hkss/hk-SOLAR-10.7B-v1.1/result_2024-03-20 08:02:14.json b/hkss/hk-SOLAR-10.7B-v1.1/result_2024-03-20 08:02:14.json new file mode 100644 index 0000000000000000000000000000000000000000..66caa12224673002772af58b9c53518c235a80a8 --- /dev/null +++ b/hkss/hk-SOLAR-10.7B-v1.1/result_2024-03-20 08:02:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46928327645051193, + "acc_stderr": 0.014583792546304037, + "acc_norm": 0.5324232081911263, + "acc_norm_stderr": 0.01458063756999543 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4642501493726349, + "acc_stderr": 0.0049770106704365505, + "acc_norm": 0.6421031666998606, + "acc_norm_stderr": 0.004784018497679798 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7192982456140351, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.7192982456140351, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.04541609446503947, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.04541609446503947 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7266922094508301, + "acc_stderr": 0.015936681062628556, + "acc_norm": 0.7266922094508301, + "acc_norm_stderr": 0.015936681062628556 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5404255319148936, + "acc_stderr": 0.03257901482099834, + "acc_norm": 0.5404255319148936, + "acc_norm_stderr": 0.03257901482099834 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5829596412556054, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.5829596412556054, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.031156269519646857, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.031156269519646857 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383887, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383887 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5615384615384615, + "acc_stderr": 0.025158266016868613, + "acc_norm": 0.5615384615384615, + "acc_norm_stderr": 0.025158266016868613 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.03502544650845872, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.03502544650845872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6290322580645161, + "acc_stderr": 0.02748054188795359, + "acc_norm": 0.6290322580645161, + "acc_norm_stderr": 0.02748054188795359 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.02704685763071666, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.02704685763071666 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5811320754716981, + "acc_stderr": 0.030365050829115208, + "acc_norm": 0.5811320754716981, + "acc_norm_stderr": 0.030365050829115208 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.03807301726504513, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.03807301726504513 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155254, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155254 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5763888888888888, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.5763888888888888, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.81, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.81, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.615606936416185, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.615606936416185, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.02672586880910079, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.02672586880910079 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366596, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.708256880733945, + "acc_stderr": 0.019489300968876532, + "acc_norm": 0.708256880733945, + "acc_norm_stderr": 0.019489300968876532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6503267973856209, + "acc_stderr": 0.0273053080762747, + "acc_norm": 0.6503267973856209, + "acc_norm_stderr": 0.0273053080762747 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.042059539338841226, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.042059539338841226 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.625, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.625, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5179738562091504, + "acc_stderr": 0.020214761037872404, + "acc_norm": 0.5179738562091504, + "acc_norm_stderr": 0.020214761037872404 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.029097675599463926, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.029097675599463926 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2122905027932961, + "acc_stderr": 0.013676644685831728, + "acc_norm": 0.2122905027932961, + "acc_norm_stderr": 0.013676644685831728 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.030254372573976715, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.030254372573976715 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6775510204081633, + "acc_stderr": 0.029923100563683913, + "acc_norm": 0.6775510204081633, + "acc_norm_stderr": 0.029923100563683913 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.40352020860495436, + "acc_stderr": 0.012530241301193188, + "acc_norm": 0.40352020860495436, + "acc_norm_stderr": 0.012530241301193188 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7107843137254902, + "acc_stderr": 0.03182231867647553, + "acc_norm": 0.7107843137254902, + "acc_norm_stderr": 0.03182231867647553 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155062, + "mc2": 0.45078285529254825, + "mc2_stderr": 0.015218767234402358 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6080283353010626, + "acc_stderr": 0.01678433211942408, + "acc_norm": 0.6257378984651711, + "acc_norm_stderr": 0.016637917789798746 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hkss/hk-SOLAR-10.7B-v1.1", + "model_sha": "bfaeb58a9a483b53f6822577d8fd77660f395cd0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hkss/hk-SOLAR-10.7B-v1.2/result_2024-03-20 08:02:53.json b/hkss/hk-SOLAR-10.7B-v1.2/result_2024-03-20 08:02:53.json new file mode 100644 index 0000000000000000000000000000000000000000..67a91e81519ce7baca3729cc67f670e32c0021fe --- /dev/null +++ b/hkss/hk-SOLAR-10.7B-v1.2/result_2024-03-20 08:02:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4718430034129693, + "acc_stderr": 0.014588204105102203, + "acc_norm": 0.5324232081911263, + "acc_norm_stderr": 0.01458063756999543 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4627564230233021, + "acc_stderr": 0.004975919665116535, + "acc_norm": 0.6424019119697272, + "acc_norm_stderr": 0.004783133725599501 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7076023391812866, + "acc_stderr": 0.03488647713457922, + "acc_norm": 0.7076023391812866, + "acc_norm_stderr": 0.03488647713457922 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.04541609446503947, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.04541609446503947 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7241379310344828, + "acc_stderr": 0.015982814774695632, + "acc_norm": 0.7241379310344828, + "acc_norm_stderr": 0.015982814774695632 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5404255319148936, + "acc_stderr": 0.03257901482099834, + "acc_norm": 0.5404255319148936, + "acc_norm_stderr": 0.03257901482099834 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5695067264573991, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.5695067264573991, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.031156269519646857, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.031156269519646857 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383887, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383887 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6638655462184874, + "acc_stderr": 0.03068473711513536, + "acc_norm": 0.6638655462184874, + "acc_norm_stderr": 0.03068473711513536 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5615384615384615, + "acc_stderr": 0.025158266016868616, + "acc_norm": 0.5615384615384615, + "acc_norm_stderr": 0.025158266016868616 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6258064516129033, + "acc_stderr": 0.027528904299845704, + "acc_norm": 0.6258064516129033, + "acc_norm_stderr": 0.027528904299845704 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392933, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5584905660377358, + "acc_stderr": 0.030561590426731837, + "acc_norm": 0.5584905660377358, + "acc_norm_stderr": 0.030561590426731837 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.0250107491161376, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.0250107491161376 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5763888888888888, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.5763888888888888, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.8, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.8, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6127167630057804, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.6127167630057804, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6419753086419753, + "acc_stderr": 0.02667561192603709, + "acc_norm": 0.6419753086419753, + "acc_norm_stderr": 0.02667561192603709 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366596, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.708256880733945, + "acc_stderr": 0.019489300968876532, + "acc_norm": 0.708256880733945, + "acc_norm_stderr": 0.019489300968876532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.027184498909941616, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.027184498909941616 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.0201965949335412, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.0201965949335412 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.02904919034254346, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.02904919034254346 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2122905027932961, + "acc_stderr": 0.01367664468583173, + "acc_norm": 0.2122905027932961, + "acc_norm_stderr": 0.01367664468583173 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5551470588235294, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.5551470588235294, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6775510204081633, + "acc_stderr": 0.029923100563683913, + "acc_norm": 0.6775510204081633, + "acc_norm_stderr": 0.029923100563683913 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7637130801687764, + "acc_stderr": 0.02765215314415926, + "acc_norm": 0.7637130801687764, + "acc_norm_stderr": 0.02765215314415926 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4048239895697523, + "acc_stderr": 0.012536743830953986, + "acc_norm": 0.4048239895697523, + "acc_norm_stderr": 0.012536743830953986 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7058823529411765, + "acc_stderr": 0.03198001660115071, + "acc_norm": 0.7058823529411765, + "acc_norm_stderr": 0.03198001660115071 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3072215422276622, + "mc1_stderr": 0.016150201321322995, + "mc2": 0.44734805674975225, + "mc2_stderr": 0.015172933885140617 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5879574970484062, + "acc_stderr": 0.01692227673852836, + "acc_norm": 0.6139315230224321, + "acc_norm_stderr": 0.01673813076032175 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hkss/hk-SOLAR-10.7B-v1.2", + "model_sha": "30aef7a1b44621c3c8532787247075875747d279", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hkss/hk-SOLAR-10.7B-v1.4/result_2024-03-21 06:40:54.json b/hkss/hk-SOLAR-10.7B-v1.4/result_2024-03-21 06:40:54.json new file mode 100644 index 0000000000000000000000000000000000000000..61fce566fa1d9feaddd205afd3def77c9ef0f890 --- /dev/null +++ b/hkss/hk-SOLAR-10.7B-v1.4/result_2024-03-21 06:40:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6988054607508533, + "acc_stderr": 0.013406741767847634, + "acc_norm": 0.7389078498293515, + "acc_norm_stderr": 0.01283552390947385 + }, + "harness|ko_hellaswag|10": { + "acc": 0.568213503286198, + "acc_stderr": 0.004943127583290915, + "acc_norm": 0.7216689902409879, + "acc_norm_stderr": 0.004472613148508924 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7309941520467836, + "acc_stderr": 0.03401052620104089, + "acc_norm": 0.7309941520467836, + "acc_norm_stderr": 0.03401052620104089 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7037037037037037, + "acc_stderr": 0.016328814422102052, + "acc_norm": 0.7037037037037037, + "acc_norm_stderr": 0.016328814422102052 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5234042553191489, + "acc_stderr": 0.0326501947503358, + "acc_norm": 0.5234042553191489, + "acc_norm_stderr": 0.0326501947503358 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6559485530546624, + "acc_stderr": 0.026981478043648043, + "acc_norm": 0.6559485530546624, + "acc_norm_stderr": 0.026981478043648043 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6367713004484304, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.6367713004484304, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.797979797979798, + "acc_stderr": 0.028606204289229876, + "acc_norm": 0.797979797979798, + "acc_norm_stderr": 0.028606204289229876 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.02432173848460235, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.02432173848460235 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.035158955511656986, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.035158955511656986 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6387096774193548, + "acc_stderr": 0.027327548447957546, + "acc_norm": 0.6387096774193548, + "acc_norm_stderr": 0.027327548447957546 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8333333333333334, + "acc_stderr": 0.024414947304543688, + "acc_norm": 0.8333333333333334, + "acc_norm_stderr": 0.024414947304543688 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.030437794342983052, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.030437794342983052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.046313813194254656, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.046313813194254656 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3814814814814815, + "acc_stderr": 0.02961671892749759, + "acc_norm": 0.3814814814814815, + "acc_norm_stderr": 0.02961671892749759 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.030965903123573037, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.030965903123573037 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02568056464005688, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02568056464005688 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6527777777777778, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.6527777777777778, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932263, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932263 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.02626167760780665, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.02626167760780665 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6012269938650306, + "acc_stderr": 0.03847021420456024, + "acc_norm": 0.6012269938650306, + "acc_norm_stderr": 0.03847021420456024 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6728395061728395, + "acc_stderr": 0.026105673861409825, + "acc_norm": 0.6728395061728395, + "acc_norm_stderr": 0.026105673861409825 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7486238532110092, + "acc_stderr": 0.01859920636028741, + "acc_norm": 0.7486238532110092, + "acc_norm_stderr": 0.01859920636028741 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6339869281045751, + "acc_stderr": 0.02758281141515961, + "acc_norm": 0.6339869281045751, + "acc_norm_stderr": 0.02758281141515961 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6644736842105263, + "acc_stderr": 0.03842498559395268, + "acc_norm": 0.6644736842105263, + "acc_norm_stderr": 0.03842498559395268 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5849673202614379, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.5849673202614379, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3971631205673759, + "acc_stderr": 0.029189805673587102, + "acc_norm": 0.3971631205673759, + "acc_norm_stderr": 0.029189805673587102 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.41675977653631285, + "acc_stderr": 0.01648913496243895, + "acc_norm": 0.41675977653631285, + "acc_norm_stderr": 0.01648913496243895 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5514705882352942, + "acc_stderr": 0.030211479609121596, + "acc_norm": 0.5514705882352942, + "acc_norm_stderr": 0.030211479609121596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6816326530612244, + "acc_stderr": 0.02982253379398204, + "acc_norm": 0.6816326530612244, + "acc_norm_stderr": 0.02982253379398204 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.02782078198114968, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.02782078198114968 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44589308996088656, + "acc_stderr": 0.012695244711379781, + "acc_norm": 0.44589308996088656, + "acc_norm_stderr": 0.012695244711379781 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.0332057461294543, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.0332057461294543 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6976744186046512, + "mc1_stderr": 0.016077509266133022, + "mc2": 0.7952172830277647, + "mc2_stderr": 0.013332334182266132 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5371900826446281, + "acc_stderr": 0.0171427361176433, + "acc_norm": 0.5560802833530106, + "acc_norm_stderr": 0.017081884623542543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hkss/hk-SOLAR-10.7B-v1.4", + "model_sha": "39ecd413e5c8a823e27f02bf312a0234b3bdc781", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hkss/hk-SOLAR-10.7B-v2.0/result_2024-04-01 06:11:21.json b/hkss/hk-SOLAR-10.7B-v2.0/result_2024-04-01 06:11:21.json new file mode 100644 index 0000000000000000000000000000000000000000..00cdd9f5e86e65288cce65531a33584763e6fc83 --- /dev/null +++ b/hkss/hk-SOLAR-10.7B-v2.0/result_2024-04-01 06:11:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46928327645051193, + "acc_stderr": 0.014583792546304037, + "acc_norm": 0.5324232081911263, + "acc_norm_stderr": 0.01458063756999543 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46464847639912366, + "acc_stderr": 0.004977294024778005, + "acc_norm": 0.6421031666998606, + "acc_norm_stderr": 0.004784018497679798 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7192982456140351, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.7192982456140351, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.04541609446503947, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.04541609446503947 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7254150702426565, + "acc_stderr": 0.01595982993308406, + "acc_norm": 0.7254150702426565, + "acc_norm_stderr": 0.01595982993308406 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5446808510638298, + "acc_stderr": 0.032555253593403555, + "acc_norm": 0.5446808510638298, + "acc_norm_stderr": 0.032555253593403555 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5829596412556054, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.5829596412556054, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.031156269519646857, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.031156269519646857 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383887, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383887 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.558974358974359, + "acc_stderr": 0.02517404838400071, + "acc_norm": 0.558974358974359, + "acc_norm_stderr": 0.02517404838400071 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.03502544650845872, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.03502544650845872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.632258064516129, + "acc_stderr": 0.027430866579973467, + "acc_norm": 0.632258064516129, + "acc_norm_stderr": 0.027430866579973467 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.02704685763071666, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.02704685763071666 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5811320754716981, + "acc_stderr": 0.030365050829115208, + "acc_norm": 0.5811320754716981, + "acc_norm_stderr": 0.030365050829115208 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.03807301726504513, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.03807301726504513 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155247, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155247 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5763888888888888, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.5763888888888888, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.8, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.8, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.615606936416185, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.615606936416185, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6358024691358025, + "acc_stderr": 0.026774929899722324, + "acc_norm": 0.6358024691358025, + "acc_norm_stderr": 0.026774929899722324 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04644602091222317, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04644602091222317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.708256880733945, + "acc_stderr": 0.019489300968876532, + "acc_norm": 0.708256880733945, + "acc_norm_stderr": 0.019489300968876532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6535947712418301, + "acc_stderr": 0.02724561304721536, + "acc_norm": 0.6535947712418301, + "acc_norm_stderr": 0.02724561304721536 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.042059539338841226, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.042059539338841226 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.625, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.625, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5179738562091504, + "acc_stderr": 0.020214761037872404, + "acc_norm": 0.5179738562091504, + "acc_norm_stderr": 0.020214761037872404 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596147, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21340782122905028, + "acc_stderr": 0.013702859932196098, + "acc_norm": 0.21340782122905028, + "acc_norm_stderr": 0.013702859932196098 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5477941176470589, + "acc_stderr": 0.03023375855159644, + "acc_norm": 0.5477941176470589, + "acc_norm_stderr": 0.03023375855159644 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6775510204081633, + "acc_stderr": 0.029923100563683913, + "acc_norm": 0.6775510204081633, + "acc_norm_stderr": 0.029923100563683913 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.40352020860495436, + "acc_stderr": 0.012530241301193188, + "acc_norm": 0.40352020860495436, + "acc_norm_stderr": 0.012530241301193188 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7107843137254902, + "acc_stderr": 0.03182231867647553, + "acc_norm": 0.7107843137254902, + "acc_norm_stderr": 0.03182231867647553 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155062, + "mc2": 0.45102270748767687, + "mc2_stderr": 0.015222644625077442 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6068476977567887, + "acc_stderr": 0.01679326280128707, + "acc_norm": 0.6257378984651711, + "acc_norm_stderr": 0.016637917789798746 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hkss/hk-SOLAR-10.7B-v2.0", + "model_sha": "7aaca023cfc19803a6bfb97260ccfa269e800642", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hometax/llama-8b-base/result_2024-07-10 14:23:00.json b/hometax/llama-8b-base/result_2024-07-10 14:23:00.json new file mode 100644 index 0000000000000000000000000000000000000000..9b33f52df1d47fc6b9b5430e6b6cd45fd98754e3 --- /dev/null +++ b/hometax/llama-8b-base/result_2024-07-10 14:23:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37457337883959047, + "acc_stderr": 0.014144193471893452, + "acc_norm": 0.42918088737201365, + "acc_norm_stderr": 0.014464085894870657 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39145588528181635, + "acc_stderr": 0.004870785036708282, + "acc_norm": 0.527185819557857, + "acc_norm_stderr": 0.004982400368939668 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03615507630310935, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03615507630310935 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7475728155339806, + "acc_stderr": 0.043012503996908764, + "acc_norm": 0.7475728155339806, + "acc_norm_stderr": 0.043012503996908764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5696040868454662, + "acc_stderr": 0.01770586877629239, + "acc_norm": 0.5696040868454662, + "acc_norm_stderr": 0.01770586877629239 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542124, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542124 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936337, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305693, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305693 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6919191919191919, + "acc_stderr": 0.03289477330098614, + "acc_norm": 0.6919191919191919, + "acc_norm_stderr": 0.03289477330098614 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5793103448275863, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.5793103448275863, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383887, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383887 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.03225294232399639, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.03225294232399639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.025323990861736246, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.025323990861736246 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5387096774193548, + "acc_stderr": 0.028358634859836945, + "acc_norm": 0.5387096774193548, + "acc_norm_stderr": 0.028358634859836945 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.030770900763851302, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.030770900763851302 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.029723278961476668, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.029723278961476668 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681681, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681681 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115978, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.569364161849711, + "acc_stderr": 0.026658800273672376, + "acc_norm": 0.569364161849711, + "acc_norm_stderr": 0.026658800273672376 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5339506172839507, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.5339506172839507, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6321243523316062, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.6321243523316062, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070435, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070435 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5889908256880734, + "acc_stderr": 0.02109505068727765, + "acc_norm": 0.5889908256880734, + "acc_norm_stderr": 0.02109505068727765 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5620915032679739, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.5620915032679739, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.0404633688397825, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.0404633688397825 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.020154685712590888, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.020154685712590888 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.03408655867977749, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.03408655867977749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3139664804469274, + "acc_stderr": 0.015521923933523647, + "acc_norm": 0.3139664804469274, + "acc_norm_stderr": 0.015521923933523647 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001663, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001663 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556165, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.679324894514768, + "acc_stderr": 0.030381931949990417, + "acc_norm": 0.679324894514768, + "acc_norm_stderr": 0.030381931949990417 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3683181225554107, + "acc_stderr": 0.012319403369564644, + "acc_norm": 0.3683181225554107, + "acc_norm_stderr": 0.012319403369564644 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512569, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512569 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875833, + "mc2": 0.42645500375193074, + "mc2_stderr": 0.014956153456701296 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4722550177095632, + "acc_stderr": 0.017163867979456016, + "acc_norm": 0.6103896103896104, + "acc_norm_stderr": 0.01676616167189351 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hometax/llama-8b-base", + "model_sha": "39474e30843e3d8109ee86a42422765d57992b3e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hometax/llama3-8b-merge/result_2024-07-25 04:04:31.json b/hometax/llama3-8b-merge/result_2024-07-25 04:04:31.json new file mode 100644 index 0000000000000000000000000000000000000000..fa558fe5681452b61eb9d15b7a5028bc4fd716ec --- /dev/null +++ b/hometax/llama3-8b-merge/result_2024-07-25 04:04:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19112627986348124, + "acc_stderr": 0.011490055292778594, + "acc_norm": 0.2593856655290102, + "acc_norm_stderr": 0.012808273573927102 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2593108942441745, + "acc_stderr": 0.004373608212561026, + "acc_norm": 0.2638916550487951, + "acc_norm_stderr": 0.004398404992933858 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1832797427652733, + "acc_stderr": 0.02197419884826581, + "acc_norm": 0.1832797427652733, + "acc_norm_stderr": 0.02197419884826581 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727756, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727756 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925314, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925314 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476202, + "mc2": 0.5146662873885536, + "mc2_stderr": 0.016809757736381388 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.12514757969303425, + "acc_stderr": 0.011376101146401418, + "acc_norm": 0.42621015348288077, + "acc_norm_stderr": 0.01700212260948926 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hometax/llama3-8b-merge", + "model_sha": "54e465e8738c456f19db7f0baba159d93511c407", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hometax/llama3-8b-phase1_2/result_2024-07-23 02:00:19.json b/hometax/llama3-8b-phase1_2/result_2024-07-23 02:00:19.json new file mode 100644 index 0000000000000000000000000000000000000000..d1b149f27892c2c2ae883f01dded974b4f2d227d --- /dev/null +++ b/hometax/llama3-8b-phase1_2/result_2024-07-23 02:00:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2090443686006826, + "acc_stderr": 0.011882746987406453, + "acc_norm": 0.2551194539249147, + "acc_norm_stderr": 0.012739038695202105 + }, + "harness|ko_hellaswag|10": { + "acc": 0.252141007767377, + "acc_stderr": 0.004333543083293473, + "acc_norm": 0.25184226249751046, + "acc_norm_stderr": 0.004331840012787853 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796617, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539265, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539265 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.01529807750948508, + "mc2": 0.5144535793324322, + "mc2_stderr": 0.017006514178845196 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09681227863046045, + "acc_stderr": 0.010166443512074711, + "acc_norm": 0.48642266824085006, + "acc_norm_stderr": 0.01718401506040145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hometax/llama3-8b-phase1_2", + "model_sha": "51db97102b4c0ddc127f9f26c21c29cbe313393f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hometax/llama3-8b-sft-r16-lora-deepspeed_2/result_2024-07-08 10:27:11.json b/hometax/llama3-8b-sft-r16-lora-deepspeed_2/result_2024-07-08 10:27:11.json new file mode 100644 index 0000000000000000000000000000000000000000..d4b53fc52be28adf1a914b0f5bfe492592a743e7 --- /dev/null +++ b/hometax/llama3-8b-sft-r16-lora-deepspeed_2/result_2024-07-08 10:27:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36945392491467577, + "acc_stderr": 0.014104578366491892, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.014494421584256519 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3655646285600478, + "acc_stderr": 0.004806039039008956, + "acc_norm": 0.4823740290778729, + "acc_norm_stderr": 0.004986680048438313 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.04689765937278134, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.04689765937278134 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47126436781609193, + "acc_stderr": 0.017850410794380166, + "acc_norm": 0.47126436781609193, + "acc_norm_stderr": 0.017850410794380166 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5063829787234042, + "acc_stderr": 0.03268335899936336, + "acc_norm": 0.5063829787234042, + "acc_norm_stderr": 0.03268335899936336 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5659163987138264, + "acc_stderr": 0.028150232244535608, + "acc_norm": 0.5659163987138264, + "acc_norm_stderr": 0.028150232244535608 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006939, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006939 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5862068965517241, + "acc_stderr": 0.04104269211806231, + "acc_norm": 0.5862068965517241, + "acc_norm_stderr": 0.04104269211806231 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.047551296160629454, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.047551296160629454 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.025342671293807247, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.025342671293807247 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5516129032258065, + "acc_stderr": 0.02829205683011273, + "acc_norm": 0.5516129032258065, + "acc_norm_stderr": 0.02829205683011273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924336, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924336 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630882, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630882 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.039837983066598075, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.039837983066598075 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.02501074911613759, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.02501074911613759 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.026788811931562757, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.026788811931562757 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5908256880733945, + "acc_stderr": 0.02108067026443373, + "acc_norm": 0.5908256880733945, + "acc_norm_stderr": 0.02108067026443373 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.44281045751633985, + "acc_stderr": 0.020095083154577354, + "acc_norm": 0.44281045751633985, + "acc_norm_stderr": 0.020095083154577354 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475356, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475356 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.03093285879278986, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.03093285879278986 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6624472573839663, + "acc_stderr": 0.03078154910202622, + "acc_norm": 0.6624472573839663, + "acc_norm_stderr": 0.03078154910202622 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37809647979139505, + "acc_stderr": 0.012384878406798095, + "acc_norm": 0.37809647979139505, + "acc_norm_stderr": 0.012384878406798095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559696, + "mc2": 0.4787622901747856, + "mc2_stderr": 0.015558684156011079 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43683589138134593, + "acc_stderr": 0.01705263355985607, + "acc_norm": 0.48288075560802834, + "acc_norm_stderr": 0.017180275246085633 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hometax/llama3-8b-sft-r16-lora-deepspeed_2", + "model_sha": "f872ee84b7ae452deb113a4003adb4adee08a6f2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hometax/llama3-8b-sft-r32-lora-deepspeed_2/result_2024-07-08 11:12:11.json b/hometax/llama3-8b-sft-r32-lora-deepspeed_2/result_2024-07-08 11:12:11.json new file mode 100644 index 0000000000000000000000000000000000000000..1d55300f7e9a2ae4cec97e389236d943fc2811b3 --- /dev/null +++ b/hometax/llama3-8b-sft-r32-lora-deepspeed_2/result_2024-07-08 11:12:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3703071672354949, + "acc_stderr": 0.01411129875167495, + "acc_norm": 0.43430034129692835, + "acc_norm_stderr": 0.014484703048857352 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36456881099382593, + "acc_stderr": 0.004803253812881046, + "acc_norm": 0.481876120294762, + "acc_norm_stderr": 0.0049865022969311805 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.04721188506097172, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.04721188506097172 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.017867695938429778, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.017867695938429778 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5106382978723404, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.5106382978723404, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.028173917761762906, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.028173917761762906 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828064, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828064 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5793103448275863, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.5793103448275863, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.047551296160629454, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.047551296160629454 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.02531764972644867, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.02531764972644867 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5516129032258065, + "acc_stderr": 0.028292056830112728, + "acc_norm": 0.5516129032258065, + "acc_norm_stderr": 0.028292056830112728 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417618, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417618 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3962962962962963, + "acc_stderr": 0.029822619458534, + "acc_norm": 0.3962962962962963, + "acc_norm_stderr": 0.029822619458534 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.039837983066598075, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.039837983066598075 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3941798941798942, + "acc_stderr": 0.025167982333894143, + "acc_norm": 0.3941798941798942, + "acc_norm_stderr": 0.025167982333894143 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.026772990653361823, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.026772990653361823 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5908256880733945, + "acc_stderr": 0.02108067026443373, + "acc_norm": 0.5908256880733945, + "acc_norm_stderr": 0.02108067026443373 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.020087362076702853, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.020087362076702853 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053756, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053756 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.033953227263757976, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.033953227263757976 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.015005762446786159, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.015005762446786159 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.03093285879278986, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.03093285879278986 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6624472573839663, + "acc_stderr": 0.03078154910202622, + "acc_norm": 0.6624472573839663, + "acc_norm_stderr": 0.03078154910202622 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38396349413298564, + "acc_stderr": 0.01242158783313423, + "acc_norm": 0.38396349413298564, + "acc_norm_stderr": 0.01242158783313423 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.03476099060501635, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.03476099060501635 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559696, + "mc2": 0.47940533103336175, + "mc2_stderr": 0.015553492130672565 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44037780401416765, + "acc_stderr": 0.01706769977431297, + "acc_norm": 0.4852420306965762, + "acc_norm_stderr": 0.017182864434998557 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hometax/llama3-8b-sft-r32-lora-deepspeed_2", + "model_sha": "28dc1b11970a9694c988f48c2cc101812e055d51", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hometax/llama3-8b-sft_3-1/result_2024-07-15 07:23:07.json b/hometax/llama3-8b-sft_3-1/result_2024-07-15 07:23:07.json new file mode 100644 index 0000000000000000000000000000000000000000..97383c69d74303014acb909854dc26112c42d50b --- /dev/null +++ b/hometax/llama3-8b-sft_3-1/result_2024-07-15 07:23:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.014150631435111726, + "acc_norm": 0.4300341296928328, + "acc_norm_stderr": 0.014467631559138 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39165504879506075, + "acc_stderr": 0.004871226629346399, + "acc_norm": 0.527185819557857, + "acc_norm_stderr": 0.0049824003689396676 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03615507630310935, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03615507630310935 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7475728155339806, + "acc_stderr": 0.043012503996908764, + "acc_norm": 0.7475728155339806, + "acc_norm_stderr": 0.043012503996908764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5696040868454662, + "acc_stderr": 0.01770586877629239, + "acc_norm": 0.5696040868454662, + "acc_norm_stderr": 0.01770586877629239 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542124, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542124 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936337, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305693, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305693 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.03274287914026869, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.03274287914026869 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5793103448275863, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.5793103448275863, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196156, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196156 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.03228410626716391, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.03228410626716391 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.025323990861736246, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.025323990861736246 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.535483870967742, + "acc_stderr": 0.02837228779796294, + "acc_norm": 0.535483870967742, + "acc_norm_stderr": 0.02837228779796294 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.02977384701253297, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.02977384701253297 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.03983798306659808, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.03983798306659808 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.569364161849711, + "acc_stderr": 0.026658800273672376, + "acc_norm": 0.569364161849711, + "acc_norm_stderr": 0.026658800273672376 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5339506172839507, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.5339506172839507, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6269430051813472, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.6269430051813472, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070435, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070435 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5889908256880734, + "acc_stderr": 0.02109505068727765, + "acc_norm": 0.5889908256880734, + "acc_norm_stderr": 0.02109505068727765 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.0404633688397825, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.0404633688397825 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.020142974553795198, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.020142974553795198 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.03408655867977749, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.03408655867977749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3128491620111732, + "acc_stderr": 0.015506892594647274, + "acc_norm": 0.3128491620111732, + "acc_norm_stderr": 0.015506892594647274 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556165, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.030486039389105313, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.030486039389105313 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36766623207301175, + "acc_stderr": 0.012314845910071707, + "acc_norm": 0.36766623207301175, + "acc_norm_stderr": 0.012314845910071707 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205983, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205983 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087305, + "mc2": 0.42639018277004337, + "mc2_stderr": 0.01495183635571153 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4734356552538371, + "acc_stderr": 0.017166075717577747, + "acc_norm": 0.6127508854781583, + "acc_norm_stderr": 0.016747577991642792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hometax/llama3-8b-sft_3-1", + "model_sha": "5ef1a4e9434472a06c4c2f07c9f666ee1e95cf11", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hometax/llama3-8b-sft_4/result_2024-07-15 02:18:39.json b/hometax/llama3-8b-sft_4/result_2024-07-15 02:18:39.json new file mode 100644 index 0000000000000000000000000000000000000000..7f1c07c75558d1a71aa215bb516c77017d19b937 --- /dev/null +++ b/hometax/llama3-8b-sft_4/result_2024-07-15 02:18:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37627986348122866, + "acc_stderr": 0.014157022555407163, + "acc_norm": 0.42918088737201365, + "acc_norm_stderr": 0.014464085894870657 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39185421230830514, + "acc_stderr": 0.004871667371060541, + "acc_norm": 0.5272854013144792, + "acc_norm_stderr": 0.004982346155911128 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03615507630310935, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03615507630310935 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7475728155339806, + "acc_stderr": 0.043012503996908764, + "acc_norm": 0.7475728155339806, + "acc_norm_stderr": 0.043012503996908764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5708812260536399, + "acc_stderr": 0.017699388483126785, + "acc_norm": 0.5708812260536399, + "acc_norm_stderr": 0.017699388483126785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542124, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542124 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936337, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5434083601286174, + "acc_stderr": 0.028290869054197604, + "acc_norm": 0.5434083601286174, + "acc_norm_stderr": 0.028290869054197604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6919191919191919, + "acc_stderr": 0.03289477330098614, + "acc_norm": 0.6919191919191919, + "acc_norm_stderr": 0.03289477330098614 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.04122737111370333, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.04122737111370333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196156, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196156 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.03225294232399639, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.03225294232399639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.025323990861736246, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.025323990861736246 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5387096774193548, + "acc_stderr": 0.028358634859836945, + "acc_norm": 0.5387096774193548, + "acc_norm_stderr": 0.028358634859836945 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808086, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808086 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630882, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630882 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.03983798306659808, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.03983798306659808 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115978, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.569364161849711, + "acc_stderr": 0.026658800273672376, + "acc_norm": 0.569364161849711, + "acc_norm_stderr": 0.026658800273672376 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5339506172839507, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.5339506172839507, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6269430051813472, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.6269430051813472, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070435, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070435 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5926605504587156, + "acc_stderr": 0.021065986244412898, + "acc_norm": 0.5926605504587156, + "acc_norm_stderr": 0.021065986244412898 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5592105263157895, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.5592105263157895, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.020154685712590888, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.020154685712590888 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.03406315360711507, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.03406315360711507 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30837988826815643, + "acc_stderr": 0.01544571691099887, + "acc_norm": 0.30837988826815643, + "acc_norm_stderr": 0.01544571691099887 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5918367346938775, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.5918367346938775, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.03048603938910531, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.03048603938910531 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3683181225554107, + "acc_stderr": 0.012319403369564644, + "acc_norm": 0.3683181225554107, + "acc_norm_stderr": 0.012319403369564644 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205983, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205983 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842881, + "mc2": 0.4265293509670618, + "mc2_stderr": 0.014953967257464005 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47107438016528924, + "acc_stderr": 0.017161563949916348, + "acc_norm": 0.6139315230224321, + "acc_norm_stderr": 0.01673813076032175 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hometax/llama3-8b-sft_4", + "model_sha": "f6adf26e98694fcd179ae775c07147a6b1da0d90", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hometax/llama3-pre1-pre2-ds-lora3-mergkit/result_2024-07-31 06:41:05.json b/hometax/llama3-pre1-pre2-ds-lora3-mergkit/result_2024-07-31 06:41:05.json new file mode 100644 index 0000000000000000000000000000000000000000..77f7580afaa40d32a6aa7d7cd7e59b28b9694ab6 --- /dev/null +++ b/hometax/llama3-pre1-pre2-ds-lora3-mergkit/result_2024-07-31 06:41:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3515358361774744, + "acc_stderr": 0.013952413699600938, + "acc_norm": 0.42235494880546076, + "acc_norm_stderr": 0.014434138713379984 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3318064130651265, + "acc_stderr": 0.004698995789478821, + "acc_norm": 0.4060944035052778, + "acc_norm_stderr": 0.004900988997414232 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6257309941520468, + "acc_stderr": 0.03711601185389481, + "acc_norm": 0.6257309941520468, + "acc_norm_stderr": 0.03711601185389481 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041695, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041695 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5389527458492975, + "acc_stderr": 0.017825621793239012, + "acc_norm": 0.5389527458492975, + "acc_norm_stderr": 0.017825621793239012 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5466237942122186, + "acc_stderr": 0.02827435985489424, + "acc_norm": 0.5466237942122186, + "acc_norm_stderr": 0.02827435985489424 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223264, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6, + "acc_stderr": 0.040824829046386284, + "acc_norm": 0.6, + "acc_norm_stderr": 0.040824829046386284 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.047551296160629475, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.047551296160629475 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5102564102564102, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.5102564102564102, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.02931820364520686, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.02931820364520686 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.4105960264900662, + "acc_stderr": 0.04016689594849929, + "acc_norm": 0.4105960264900662, + "acc_norm_stderr": 0.04016689594849929 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6865671641791045, + "acc_stderr": 0.03280188205348643, + "acc_norm": 0.6865671641791045, + "acc_norm_stderr": 0.03280188205348643 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155247, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155247 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111502, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111502 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5432098765432098, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.5432098765432098, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.045796394220704334, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.045796394220704334 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5944954128440367, + "acc_stderr": 0.021050997991896837, + "acc_norm": 0.5944954128440367, + "acc_norm_stderr": 0.021050997991896837 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.545751633986928, + "acc_stderr": 0.02850980780262659, + "acc_norm": 0.545751633986928, + "acc_norm_stderr": 0.02850980780262659 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.040633027314866725, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.040633027314866725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4624183006535948, + "acc_stderr": 0.020170614974969768, + "acc_norm": 0.4624183006535948, + "acc_norm_stderr": 0.020170614974969768 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.03395322726375798, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.03395322726375798 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.014635185616527836, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.014635185616527836 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.030932858792789862, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.030932858792789862 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36897001303780963, + "acc_stderr": 0.01232393665017486, + "acc_norm": 0.36897001303780963, + "acc_norm_stderr": 0.01232393665017486 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.03434131164719129, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.03434131164719129 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512569, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512569 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627877, + "mc2": 0.4416491045073314, + "mc2_stderr": 0.016117449778989282 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48288075560802834, + "acc_stderr": 0.017180275246085622, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.01705775370216029 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hometax/llama3-pre1-pre2-ds-lora3-mergkit", + "model_sha": "b0bfef63559a60981c8eb43c67523292fe6f5a69", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hometax/nox-dpo/result_2024-07-09 00:02:28.json b/hometax/nox-dpo/result_2024-07-09 00:02:28.json new file mode 100644 index 0000000000000000000000000000000000000000..d99b419d3d28c48494d1a0f952c64d22dd407e7f --- /dev/null +++ b/hometax/nox-dpo/result_2024-07-09 00:02:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6936860068259386, + "acc_stderr": 0.013470584417276513, + "acc_norm": 0.734641638225256, + "acc_norm_stderr": 0.01290255476231397 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5207130053774148, + "acc_stderr": 0.00498549805519036, + "acc_norm": 0.6740689105755826, + "acc_norm_stderr": 0.004677637463391414 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7426900584795322, + "acc_stderr": 0.03352799844161865, + "acc_norm": 0.7426900584795322, + "acc_norm_stderr": 0.03352799844161865 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7281553398058253, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.7281553398058253, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7330779054916986, + "acc_stderr": 0.015818450894777587, + "acc_norm": 0.7330779054916986, + "acc_norm_stderr": 0.015818450894777587 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5319148936170213, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.5319148936170213, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5481927710843374, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.5481927710843374, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6495176848874598, + "acc_stderr": 0.02709865262130175, + "acc_norm": 0.6495176848874598, + "acc_norm_stderr": 0.02709865262130175 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.672645739910314, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.672645739910314, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7676767676767676, + "acc_stderr": 0.030088629490217487, + "acc_norm": 0.7676767676767676, + "acc_norm_stderr": 0.030088629490217487 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6638655462184874, + "acc_stderr": 0.03068473711513536, + "acc_norm": 0.6638655462184874, + "acc_norm_stderr": 0.03068473711513536 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6307692307692307, + "acc_stderr": 0.024468615241478933, + "acc_norm": 0.6307692307692307, + "acc_norm_stderr": 0.024468615241478933 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301812, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301812 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6387096774193548, + "acc_stderr": 0.027327548447957553, + "acc_norm": 0.6387096774193548, + "acc_norm_stderr": 0.027327548447957553 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8504273504273504, + "acc_stderr": 0.023365051491753722, + "acc_norm": 0.8504273504273504, + "acc_norm_stderr": 0.023365051491753722 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6226415094339622, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.6226415094339622, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.045820048415054174, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.045820048415054174 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.029958249250082114, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.029958249250082114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555402, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555402 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4497354497354497, + "acc_stderr": 0.02562085704293665, + "acc_norm": 0.4497354497354497, + "acc_norm_stderr": 0.02562085704293665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5972222222222222, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.5972222222222222, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.8, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.8, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6502890173410405, + "acc_stderr": 0.02567428145653102, + "acc_norm": 0.6502890173410405, + "acc_norm_stderr": 0.02567428145653102 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6380368098159509, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.6380368098159509, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6512345679012346, + "acc_stderr": 0.02651759772446501, + "acc_norm": 0.6512345679012346, + "acc_norm_stderr": 0.02651759772446501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.03097543638684543, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.03097543638684543 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.045981880578165414, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.045981880578165414 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7798165137614679, + "acc_stderr": 0.017765978652327583, + "acc_norm": 0.7798165137614679, + "acc_norm_stderr": 0.017765978652327583 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.026992544339297233, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.026992544339297233 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.03984979653302873, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.03984979653302873 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5849673202614379, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.5849673202614379, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4326241134751773, + "acc_stderr": 0.029555454236778855, + "acc_norm": 0.4326241134751773, + "acc_norm_stderr": 0.029555454236778855 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3843575418994413, + "acc_stderr": 0.0162690886639594, + "acc_norm": 0.3843575418994413, + "acc_norm_stderr": 0.0162690886639594 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.029896163033125474, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.029896163033125474 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.03086214492108757, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.03086214492108757 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036416, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036416 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4517601043024772, + "acc_stderr": 0.012710662233660245, + "acc_norm": 0.4517601043024772, + "acc_norm_stderr": 0.012710662233660245 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.03296245110172229, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.03296245110172229 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6193390452876377, + "mc1_stderr": 0.01699762787190791, + "mc2": 0.7192521748057265, + "mc2_stderr": 0.014376321164097795 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5194805194805194, + "acc_stderr": 0.017177301992342544, + "acc_norm": 0.5548996458087367, + "acc_norm_stderr": 0.017086417431005464 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hometax/nox-dpo", + "model_sha": "1e19b4b5f95373b8a567252e778b6255c5f151e5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hometax/shooting_red/result_2024-07-05 04:27:40.json b/hometax/shooting_red/result_2024-07-05 04:27:40.json new file mode 100644 index 0000000000000000000000000000000000000000..eaa196cca0fb709f29faf8c1c3195fde2bbdeea7 --- /dev/null +++ b/hometax/shooting_red/result_2024-07-05 04:27:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39761092150170646, + "acc_stderr": 0.014301752223279535, + "acc_norm": 0.44197952218430037, + "acc_norm_stderr": 0.014512682523128345 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3800039832702649, + "acc_stderr": 0.004843954338451435, + "acc_norm": 0.5033857797251543, + "acc_norm_stderr": 0.0049896670093726465 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48659003831417624, + "acc_stderr": 0.017873531736510385, + "acc_norm": 0.48659003831417624, + "acc_norm_stderr": 0.017873531736510385 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4851063829787234, + "acc_stderr": 0.03267151848924777, + "acc_norm": 0.4851063829787234, + "acc_norm_stderr": 0.03267151848924777 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840625, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840625 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5793103448275863, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.5793103448275863, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5462184873949579, + "acc_stderr": 0.032339434681820885, + "acc_norm": 0.5462184873949579, + "acc_norm_stderr": 0.032339434681820885 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5076923076923077, + "acc_stderr": 0.025348006031534743, + "acc_norm": 0.5076923076923077, + "acc_norm_stderr": 0.025348006031534743 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5064516129032258, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.5064516129032258, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.02977384701253297, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.02977384701253297 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.02507598176760168, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.02507598176760168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.041227287076512825, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.041227287076512825 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5308641975308642, + "acc_stderr": 0.02776768960683392, + "acc_norm": 0.5308641975308642, + "acc_norm_stderr": 0.02776768960683392 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.0358701498607566, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.0358701498607566 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5761467889908257, + "acc_stderr": 0.021187263209087533, + "acc_norm": 0.5761467889908257, + "acc_norm_stderr": 0.021187263209087533 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.04065771002562603, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.04065771002562603 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.01987380200506117, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.01987380200506117 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.028538650028878638, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.028538650028878638 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160834, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160834 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30837988826815643, + "acc_stderr": 0.015445716910998877, + "acc_norm": 0.30837988826815643, + "acc_norm_stderr": 0.015445716910998877 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.02976826352893311, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.02976826352893311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6, + "acc_stderr": 0.03136250240935895, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03136250240935895 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3559322033898305, + "acc_stderr": 0.01222864553727757, + "acc_norm": 0.3559322033898305, + "acc_norm_stderr": 0.01222864553727757 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396708, + "mc2": 0.44273457369835273, + "mc2_stderr": 0.015288497161255953 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3435655253837072, + "acc_stderr": 0.01632733480642914, + "acc_norm": 0.3730814639905549, + "acc_norm_stderr": 0.016627318275137432 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hometax/shooting_red", + "model_sha": "f8c6b0e4d5b1b5f708ada481489735e516236038", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hongzoh/Yi-Ko-6B_Open-Platypus/result_2023-12-29 06:19:40.json b/hongzoh/Yi-Ko-6B_Open-Platypus/result_2023-12-29 06:19:40.json new file mode 100644 index 0000000000000000000000000000000000000000..9bb75b721a5d9aa027a28cbd046cce7acf623dfa --- /dev/null +++ b/hongzoh/Yi-Ko-6B_Open-Platypus/result_2023-12-29 06:19:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3361774744027304, + "acc_stderr": 0.013804855026205763, + "acc_norm": 0.4044368600682594, + "acc_norm_stderr": 0.014342036483436174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.392850029874527, + "acc_stderr": 0.004873858323840787, + "acc_norm": 0.5269866560446126, + "acc_norm_stderr": 0.00498250819858427 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5325670498084292, + "acc_stderr": 0.017841995750520867, + "acc_norm": 0.5325670498084292, + "acc_norm_stderr": 0.017841995750520867 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.034812853382329645, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.034812853382329645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962956, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.024594975128920938, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.024594975128920938 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.041124909746707884, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.041124909746707884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.616580310880829, + "acc_stderr": 0.03508984236295342, + "acc_norm": 0.616580310880829, + "acc_norm_stderr": 0.03508984236295342 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6146788990825688, + "acc_stderr": 0.02086585085279411, + "acc_norm": 0.6146788990825688, + "acc_norm_stderr": 0.02086585085279411 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061173, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061173 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639886, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639886 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.33854748603351953, + "acc_stderr": 0.01582670009648135, + "acc_norm": 0.33854748603351953, + "acc_norm_stderr": 0.01582670009648135 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3469387755102041, + "acc_stderr": 0.030472526026726492, + "acc_norm": 0.3469387755102041, + "acc_norm_stderr": 0.030472526026726492 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.03178471874564729, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.03178471874564729 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.318122555410691, + "acc_stderr": 0.011895407281104095, + "acc_norm": 0.318122555410691, + "acc_norm_stderr": 0.011895407281104095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.4177084248121788, + "mc2_stderr": 0.014895946713066507 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4946871310507674, + "acc_stderr": 0.01718938362722969, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.017019847535972205 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hongzoh/Yi-Ko-6B_Open-Platypus", + "model_sha": "3096318116f2d7da1bd293ad21888ac91ec5cc46", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/huggyllama/llama-13b/result_2023-09-27 04:58:53.json b/huggyllama/llama-13b/result_2023-09-27 04:58:53.json new file mode 100644 index 0000000000000000000000000000000000000000..952a5c4fb8f5a10f58d9fb8fd6cb163caf39ac1c --- /dev/null +++ b/huggyllama/llama-13b/result_2023-09-27 04:58:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2098976109215017, + "acc_stderr": 0.01190054874804745, + "acc_norm": 0.2593856655290102, + "acc_norm_stderr": 0.012808273573927092 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3014339772953595, + "acc_stderr": 0.004579429184835869, + "acc_norm": 0.3571001792471619, + "acc_norm_stderr": 0.004781654610857135 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.036155076303109344, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.036155076303109344 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.04453254836326467, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.04453254836326467 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3065134099616858, + "acc_stderr": 0.016486952893041515, + "acc_norm": 0.3065134099616858, + "acc_norm_stderr": 0.016486952893041515 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800254, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800254 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.03036358219723816, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.03036358219723816 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370519, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370519 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3536977491961415, + "acc_stderr": 0.02715520810320086, + "acc_norm": 0.3536977491961415, + "acc_norm_stderr": 0.02715520810320086 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.27802690582959644, + "acc_stderr": 0.03006958487449405, + "acc_norm": 0.27802690582959644, + "acc_norm_stderr": 0.03006958487449405 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677697, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677697 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.03258630383836556, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.03258630383836556 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.03921545312467121, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.03921545312467121 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.030388353551886838, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.030388353551886838 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33076923076923076, + "acc_stderr": 0.0238547956809711, + "acc_norm": 0.33076923076923076, + "acc_norm_stderr": 0.0238547956809711 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.031089826002937523, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.031089826002937523 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.33548387096774196, + "acc_stderr": 0.02686020644472434, + "acc_norm": 0.33548387096774196, + "acc_norm_stderr": 0.02686020644472434 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.03265903381186195, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.03265903381186195 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670716, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670716 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.21851851851851853, + "acc_stderr": 0.025195752251823793, + "acc_norm": 0.21851851851851853, + "acc_norm_stderr": 0.025195752251823793 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3283582089552239, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.3283582089552239, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321659, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321659 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.021851509822031705, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.021851509822031705 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.025722802200895817, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.025722802200895817 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.025702640260603753, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.025702640260603753 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3316062176165803, + "acc_stderr": 0.03397636541089117, + "acc_norm": 0.3316062176165803, + "acc_norm_stderr": 0.03397636541089117 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29724770642201837, + "acc_stderr": 0.019595707224643533, + "acc_norm": 0.29724770642201837, + "acc_norm_stderr": 0.019595707224643533 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.02778014120702335, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.02778014120702335 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04545454545454546, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04545454545454546 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.03690677986137282, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.03690677986137282 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2826797385620915, + "acc_stderr": 0.018217269552053446, + "acc_norm": 0.2826797385620915, + "acc_norm_stderr": 0.018217269552053446 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880585, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880585 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2536764705882353, + "acc_stderr": 0.026431329870789513, + "acc_norm": 0.2536764705882353, + "acc_norm_stderr": 0.026431329870789513 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29535864978902954, + "acc_stderr": 0.029696338713422893, + "acc_norm": 0.29535864978902954, + "acc_norm_stderr": 0.029696338713422893 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26140808344198174, + "acc_stderr": 0.011222528169771312, + "acc_norm": 0.26140808344198174, + "acc_norm_stderr": 0.011222528169771312 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.031980016601150706, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.031980016601150706 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03588624800091709, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03588624800091709 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.01555077833284288, + "mc2": 0.43560981343267496, + "mc2_stderr": 0.01587676917939091 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22077922077922077, + "acc_stderr": 0.014260152803540045, + "acc_norm": 0.3435655253837072, + "acc_norm_stderr": 0.016327334806429145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "huggyllama/llama-13b", + "model_sha": "bf57045473f207bb1de1ed035ace226f4d9f9bba", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/huggyllama/llama-7b/result_2023-09-28 00:26:14.json b/huggyllama/llama-7b/result_2023-09-28 00:26:14.json new file mode 100644 index 0000000000000000000000000000000000000000..f72099bdea5e97287e4049d569e0ee0fb62f8af3 --- /dev/null +++ b/huggyllama/llama-7b/result_2023-09-28 00:26:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2175767918088737, + "acc_stderr": 0.012057262020972508, + "acc_norm": 0.2525597269624573, + "acc_norm_stderr": 0.012696728980207708 + }, + "harness|ko_hellaswag|10": { + "acc": 0.29197371041625175, + "acc_stderr": 0.004537410615572941, + "acc_norm": 0.3343955387373033, + "acc_norm_stderr": 0.004708145393411397 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0330140594698725, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0330140594698725 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.040580420156460344, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.040580420156460344 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2554278416347382, + "acc_stderr": 0.01559495538445577, + "acc_norm": 0.2554278416347382, + "acc_norm_stderr": 0.01559495538445577 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.02895734278834235, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.02895734278834235 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.0368078369072758, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.0368078369072758 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2347266881028939, + "acc_stderr": 0.024071805887677048, + "acc_norm": 0.2347266881028939, + "acc_norm_stderr": 0.024071805887677048 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19282511210762332, + "acc_stderr": 0.026478240960489365, + "acc_norm": 0.19282511210762332, + "acc_norm_stderr": 0.026478240960489365 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.038808483010823944, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.038808483010823944 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23949579831932774, + "acc_stderr": 0.027722065493361266, + "acc_norm": 0.23949579831932774, + "acc_norm_stderr": 0.027722065493361266 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23846153846153847, + "acc_stderr": 0.021606294494647727, + "acc_norm": 0.23846153846153847, + "acc_norm_stderr": 0.021606294494647727 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.04524596007030049, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.04524596007030049 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0317852971064275, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0317852971064275 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885193, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.029872577708891172, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.029872577708891172 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.025497532639609546, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.025497532639609546 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696525, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916718, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916718 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708607, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708607 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.024257901705323374, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.024257901705323374 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.025251173936495033, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.025251173936495033 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909906, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909906 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.326605504587156, + "acc_stderr": 0.020106990889937303, + "acc_norm": 0.326605504587156, + "acc_norm_stderr": 0.020106990889937303 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242494, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242494 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.018718067052623227, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.018718067052623227 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180844, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180844 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.0356236785009539, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.0356236785009539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.03588702812826371, + "acc_norm": 0.15, + "acc_norm_stderr": 0.03588702812826371 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.025336848563332372, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.025336848563332372 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788163, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788163 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842555, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842555 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2542372881355932, + "acc_stderr": 0.011121129007840685, + "acc_norm": 0.2542372881355932, + "acc_norm_stderr": 0.011121129007840685 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.02955429260569506, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.02955429260569506 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237272, + "mc2": 0.4405577919486417, + "mc2_stderr": 0.01601590664012013 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27390791027154665, + "acc_stderr": 0.015332499474791022, + "acc_norm": 0.3707201889020071, + "acc_norm_stderr": 0.0166058012892126 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "huggyllama/llama-7b", + "model_sha": "8416d3fefb0cb3ff5775a7b13c1692d10ff1aa16", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/huiwonLee/sft_v4_reverse_lora_8bit_v1/result_2024-04-09 07:27:52.json b/huiwonLee/sft_v4_reverse_lora_8bit_v1/result_2024-04-09 07:27:52.json new file mode 100644 index 0000000000000000000000000000000000000000..e98f16323d7bef2b10db5dfda89600d0912e2646 --- /dev/null +++ b/huiwonLee/sft_v4_reverse_lora_8bit_v1/result_2024-04-09 07:27:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.431740614334471, + "acc_stderr": 0.01447459142719621, + "acc_norm": 0.5418088737201365, + "acc_norm_stderr": 0.014560220308714698 + }, + "harness|ko_hellaswag|10": { + "acc": 0.30920135431189005, + "acc_stderr": 0.004612198061600094, + "acc_norm": 0.4070902210714997, + "acc_norm_stderr": 0.004902878806733036 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6257309941520468, + "acc_stderr": 0.03711601185389481, + "acc_norm": 0.6257309941520468, + "acc_norm_stderr": 0.03711601185389481 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5900383141762452, + "acc_stderr": 0.017587672312336062, + "acc_norm": 0.5900383141762452, + "acc_norm_stderr": 0.017587672312336062 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.028217683556652315, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.028217683556652315 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4595959595959596, + "acc_stderr": 0.03550702465131342, + "acc_norm": 0.4595959595959596, + "acc_norm_stderr": 0.03550702465131342 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40336134453781514, + "acc_stderr": 0.03186608121408831, + "acc_norm": 0.40336134453781514, + "acc_norm_stderr": 0.03186608121408831 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.382051282051282, + "acc_stderr": 0.024635549163908217, + "acc_norm": 0.382051282051282, + "acc_norm_stderr": 0.024635549163908217 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465918, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465918 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04668408033024932, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04668408033024932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.027869320571664618, + "acc_norm": 0.4, + "acc_norm_stderr": 0.027869320571664618 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3622641509433962, + "acc_stderr": 0.029582245128384296, + "acc_norm": 0.3622641509433962, + "acc_norm_stderr": 0.029582245128384296 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911498, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911498 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.46766169154228854, + "acc_stderr": 0.03528131472933607, + "acc_norm": 0.46766169154228854, + "acc_norm_stderr": 0.03528131472933607 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.0240268463928735, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.0240268463928735 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3554913294797688, + "acc_stderr": 0.025770292082977264, + "acc_norm": 0.3554913294797688, + "acc_norm_stderr": 0.025770292082977264 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02764847787741332, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02764847787741332 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40414507772020725, + "acc_stderr": 0.035415085788840193, + "acc_norm": 0.40414507772020725, + "acc_norm_stderr": 0.035415085788840193 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.02141822475426464, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.02141822475426464 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.038932596106046734, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.038932596106046734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824106, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824106 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.47107438016528924, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.01902372616072456, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.01902372616072456 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.02772498944950931, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.02772498944950931 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.02783302387139968, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.02783302387139968 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.43037974683544306, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.43037974683544306, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2835723598435463, + "acc_stderr": 0.011511900775968325, + "acc_norm": 0.2835723598435463, + "acc_norm_stderr": 0.011511900775968325 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.0372820699868265, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.0372820699868265 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.36964504283965727, + "mc1_stderr": 0.01689818070697389, + "mc2": 0.5471767154414605, + "mc2_stderr": 0.01571979816002145 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3305785123966942, + "acc_stderr": 0.016173423298845697, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.017090852631668336 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "huiwonLee/sft_v4_reverse_lora_8bit_v1", + "model_sha": "6dcc84f7c40d0b93f5eacb2e5f4c6ecfb0a16bc8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwanhe/Big_Minirecord02/result_2023-11-19 23:35:28.json b/hwanhe/Big_Minirecord02/result_2023-11-19 23:35:28.json new file mode 100644 index 0000000000000000000000000000000000000000..26c256d55a68728f806d816817c46290cac05b18 --- /dev/null +++ b/hwanhe/Big_Minirecord02/result_2023-11-19 23:35:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3873720136518771, + "acc_stderr": 0.01423587248790987, + "acc_norm": 0.43430034129692835, + "acc_norm_stderr": 0.014484703048857355 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3999203345947023, + "acc_stderr": 0.0048888050031030755, + "acc_norm": 0.5129456283608843, + "acc_norm_stderr": 0.004988108663179773 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.038237270928823064, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.038237270928823064 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.017874698667491355, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.017874698667491355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.032436186361081025, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.032436186361081025 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.02837327096106942, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.02837327096106942 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449296, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449296 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.02530295889085015, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.02530295889085015 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.028438677998909558, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.028438677998909558 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066475, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066475 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268815, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268815 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983056, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.026918645383239004, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.026918645383239004 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.03919415545048411, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.03919415545048411 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5082568807339449, + "acc_stderr": 0.021434399918214334, + "acc_norm": 0.5082568807339449, + "acc_norm_stderr": 0.021434399918214334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138282, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138282 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.01945076843250551, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.01945076843250551 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29832402234636873, + "acc_stderr": 0.01530184004512928, + "acc_norm": 0.29832402234636873, + "acc_norm_stderr": 0.01530184004512928 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.03198761546763125, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.03198761546763125 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.032230171959375976, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.032230171959375976 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30834419817470665, + "acc_stderr": 0.011794833789715329, + "acc_norm": 0.30834419817470665, + "acc_norm_stderr": 0.011794833789715329 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777308, + "mc2": 0.42843883250666265, + "mc2_stderr": 0.015479949381497765 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41912632821723733, + "acc_stderr": 0.016963995010862792, + "acc_norm": 0.4793388429752066, + "acc_norm_stderr": 0.01717567127983645 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwanhe/Big_Minirecord02", + "model_sha": "32f6a2427781870bac71410f3b68407d4db6ce0d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwanhe/Mistral_sum_test01/result_2023-11-07 02:58:51.json b/hwanhe/Mistral_sum_test01/result_2023-11-07 02:58:51.json new file mode 100644 index 0000000000000000000000000000000000000000..9c9fb675d8d032fc6696187a6e12463c2e442369 --- /dev/null +++ b/hwanhe/Mistral_sum_test01/result_2023-11-07 02:58:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27303754266211605, + "acc_stderr": 0.013019332762635734, + "acc_norm": 0.3046075085324232, + "acc_norm_stderr": 0.01344952210993249 + }, + "harness|ko_hellaswag|10": { + "acc": 0.31935869348735313, + "acc_stderr": 0.004652753439460146, + "acc_norm": 0.3890659231228839, + "acc_norm_stderr": 0.004865419468213886 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.03815827365913235, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.03815827365913235 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.38697318007662834, + "acc_stderr": 0.017417138059440153, + "acc_norm": 0.38697318007662834, + "acc_norm_stderr": 0.017417138059440153 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.028957342788342347, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.028957342788342347 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.39228295819935693, + "acc_stderr": 0.027731258647012, + "acc_norm": 0.39228295819935693, + "acc_norm_stderr": 0.027731258647012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.03526552724601198, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.03526552724601198 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36134453781512604, + "acc_stderr": 0.03120469122515002, + "acc_norm": 0.36134453781512604, + "acc_norm_stderr": 0.03120469122515002 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3717948717948718, + "acc_stderr": 0.024503472557110946, + "acc_norm": 0.3717948717948718, + "acc_norm_stderr": 0.024503472557110946 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.02770935967503249, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.02770935967503249 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651047, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651047 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3622641509433962, + "acc_stderr": 0.0295822451283843, + "acc_norm": 0.3622641509433962, + "acc_norm_stderr": 0.0295822451283843 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145665, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145665 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4925373134328358, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.4925373134328358, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247077, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247077 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307695, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307695 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.026613350840261746, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.026613350840261746 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02712511551316687, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02712511551316687 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3626943005181347, + "acc_stderr": 0.03469713791704372, + "acc_norm": 0.3626943005181347, + "acc_norm_stderr": 0.03469713791704372 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3467889908256881, + "acc_stderr": 0.020406097104093027, + "acc_norm": 0.3467889908256881, + "acc_norm_stderr": 0.020406097104093027 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316091, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316091 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.019184639328092487, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.019184639328092487 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265015, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265015 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372434, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372434 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.02972215209928006, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.02972215209928006 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4430379746835443, + "acc_stderr": 0.032335327775334835, + "acc_norm": 0.4430379746835443, + "acc_norm_stderr": 0.032335327775334835 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2796610169491525, + "acc_stderr": 0.011463397393861957, + "acc_norm": 0.2796610169491525, + "acc_norm_stderr": 0.011463397393861957 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.03077855467869326, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.03077855467869326 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.0372820699868265, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.0372820699868265 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777308, + "mc2": 0.4365926945057216, + "mc2_stderr": 0.01582664283045154 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3778040141676505, + "acc_stderr": 0.016669082840694967, + "acc_norm": 0.41440377804014167, + "acc_norm_stderr": 0.01693658338394362 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwanhe/Mistral_sum_test01", + "model_sha": "de97843340ab3e732f4ba05ecd22727d76b6c628", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwanhe/Mistral_test01/result_2023-10-30 07:55:50.json b/hwanhe/Mistral_test01/result_2023-10-30 07:55:50.json new file mode 100644 index 0000000000000000000000000000000000000000..ea30798c6425786a00c2c77b86d755ecc1f31f0f --- /dev/null +++ b/hwanhe/Mistral_test01/result_2023-10-30 07:55:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3361774744027304, + "acc_stderr": 0.013804855026205765, + "acc_norm": 0.3822525597269625, + "acc_norm_stderr": 0.014200454049979295 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3701453893646684, + "acc_stderr": 0.004818566366066918, + "acc_norm": 0.4788886675960964, + "acc_norm_stderr": 0.004985331652408345 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.04931801994220416, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.04931801994220416 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4840357598978289, + "acc_stderr": 0.017870847506081717, + "acc_norm": 0.4840357598978289, + "acc_norm_stderr": 0.017870847506081717 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894255, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894255 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.02920254015343119, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.02920254015343119 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389174, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389174 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.0250437573185202, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.0250437573185202 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.02681771813034892, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.02681771813034892 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833946, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833946 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45688073394495415, + "acc_stderr": 0.02135745878522621, + "acc_norm": 0.45688073394495415, + "acc_norm_stderr": 0.02135745878522621 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.019835176484375387, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.019835176484375387 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.028893955412115892, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.028893955412115892 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.01473692638376197, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.01473692638376197 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.01210681720306721, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.01210681720306721 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394812, + "mc2": 0.46466595944130523, + "mc2_stderr": 0.015564409326931861 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4781582054309327, + "acc_stderr": 0.017173944474294375, + "acc_norm": 0.5147579693034239, + "acc_norm_stderr": 0.017182864434998564 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwanhe/Mistral_test01", + "model_sha": "cda9c485214eb8845c47321ef32126ce6622707d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwanhe/Mistral_test02/result_2023-10-31 10:01:31.json b/hwanhe/Mistral_test02/result_2023-10-31 10:01:31.json new file mode 100644 index 0000000000000000000000000000000000000000..a756a2e0ace9c1ce1a4068b68db5bd6c2f6e6359 --- /dev/null +++ b/hwanhe/Mistral_test02/result_2023-10-31 10:01:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38310580204778155, + "acc_stderr": 0.014206472661672883, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.014422181226303022 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39454291973710415, + "acc_stderr": 0.004877534215987088, + "acc_norm": 0.5135431189006174, + "acc_norm_stderr": 0.004987950663406535 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48659003831417624, + "acc_stderr": 0.01787353173651041, + "acc_norm": 0.48659003831417624, + "acc_norm_stderr": 0.01787353173651041 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.031489558297455304, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.031489558297455304 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.02836504154256457, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.02836504154256457 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767762, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767762 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162933, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162933 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.02827241018621491, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.02827241018621491 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547307, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547307 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673281, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673281 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159663, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159663 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.025305906241590632, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.025305906241590632 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353928, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353928 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45321100917431195, + "acc_stderr": 0.021343255165546037, + "acc_norm": 0.45321100917431195, + "acc_norm_stderr": 0.021343255165546037 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3937908496732026, + "acc_stderr": 0.01976621199107306, + "acc_norm": 0.3937908496732026, + "acc_norm_stderr": 0.01976621199107306 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650147, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22569832402234638, + "acc_stderr": 0.013981395058455057, + "acc_norm": 0.22569832402234638, + "acc_norm_stderr": 0.013981395058455057 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.027472274473233818, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.027472274473233818 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3396349413298566, + "acc_stderr": 0.012095592506931974, + "acc_norm": 0.3396349413298566, + "acc_norm_stderr": 0.012095592506931974 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777308, + "mc2": 0.4624714536105945, + "mc2_stderr": 0.015555617186203954 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48288075560802834, + "acc_stderr": 0.017180275246085622, + "acc_norm": 0.5560802833530106, + "acc_norm_stderr": 0.017081884623542543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwanhe/Mistral_test02", + "model_sha": "28ec5016b4e828b0ba127543e9e2931a587a0652", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwanhe/Mistral_test03/result_2023-11-01 23:53:49.json b/hwanhe/Mistral_test03/result_2023-11-01 23:53:49.json new file mode 100644 index 0000000000000000000000000000000000000000..9d29cab639cd845190dff451c8fd2e6d1945bc7c --- /dev/null +++ b/hwanhe/Mistral_test03/result_2023-11-01 23:53:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.014150631435111728, + "acc_norm": 0.42406143344709896, + "acc_norm_stderr": 0.014441889627464392 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39165504879506075, + "acc_stderr": 0.0048712266293464, + "acc_norm": 0.5067715594503087, + "acc_norm_stderr": 0.004989323787413519 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48659003831417624, + "acc_stderr": 0.017873531736510396, + "acc_norm": 0.48659003831417624, + "acc_norm_stderr": 0.017873531736510396 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502737, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674064, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674064 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.036030385453603854, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.036030385453603854 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155247, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155247 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666654, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666654 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5064220183486239, + "acc_stderr": 0.02143555482001308, + "acc_norm": 0.5064220183486239, + "acc_norm_stderr": 0.02143555482001308 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.028629305194003533, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.028629305194003533 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.019922115682786682, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.019922115682786682 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925312, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925312 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031225, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031225 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.0321481463024037, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.0321481463024037 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32920469361147325, + "acc_stderr": 0.012002091666902312, + "acc_norm": 0.32920469361147325, + "acc_norm_stderr": 0.012002091666902312 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.01574402724825605, + "mc2": 0.4568705456080681, + "mc2_stderr": 0.01559510037840762 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48760330578512395, + "acc_stderr": 0.017185069732676528, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.01705775370216029 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwanhe/Mistral_test03", + "model_sha": "0fbb09941fd9a175f92d61159081a8cbd5428061", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwanhe/Mistral_test04/result_2023-11-05 22:29:58.json b/hwanhe/Mistral_test04/result_2023-11-05 22:29:58.json new file mode 100644 index 0000000000000000000000000000000000000000..7e0e97ff2bec2f1f2386e96a976cf44390c5aa57 --- /dev/null +++ b/hwanhe/Mistral_test04/result_2023-11-05 22:29:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3890784982935154, + "acc_stderr": 0.014247309976045607, + "acc_norm": 0.43856655290102387, + "acc_norm_stderr": 0.014500682618212862 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39543915554670384, + "acc_stderr": 0.004879455474663812, + "acc_norm": 0.514937263493328, + "acc_norm_stderr": 0.0049875542559818554 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.017874698667491355, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.017874698667491355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.025254485424799605, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.025254485424799605 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360385, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360385 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347666, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347666 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47889908256880737, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.47889908256880737, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.01964380155792481, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.01964380155792481 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025445, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025445 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.028888193103988633, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.028888193103988633 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.03198761546763126, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.03198761546763126 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3285528031290743, + "acc_stderr": 0.011996027247502922, + "acc_norm": 0.3285528031290743, + "acc_norm_stderr": 0.011996027247502922 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.01600265148736101, + "mc2": 0.4599450727674709, + "mc2_stderr": 0.015606224187062706 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.5608028335301063, + "acc_norm_stderr": 0.017062775744780705 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwanhe/Mistral_test04", + "model_sha": "6d205df368f10311a3220229fafc0dcf0668e446", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwkwon/S-SOLAR-10.7B-SFT-v1.2/result_2024-03-10 06:56:10.json b/hwkwon/S-SOLAR-10.7B-SFT-v1.2/result_2024-03-10 06:56:10.json new file mode 100644 index 0000000000000000000000000000000000000000..9d7e9b65bf2602ec1604082500504bb81fce13d2 --- /dev/null +++ b/hwkwon/S-SOLAR-10.7B-SFT-v1.2/result_2024-03-10 06:56:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5025597269624573, + "acc_stderr": 0.014611199329843788, + "acc_norm": 0.5614334470989761, + "acc_norm_stderr": 0.014500682618212865 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4949213304122685, + "acc_stderr": 0.0049895240030924356, + "acc_norm": 0.6827325234017128, + "acc_norm_stderr": 0.004644613601104162 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6023391812865497, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.6023391812865497, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6551724137931034, + "acc_stderr": 0.016997123346113446, + "acc_norm": 0.6551724137931034, + "acc_norm_stderr": 0.016997123346113446 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6109324758842444, + "acc_stderr": 0.027690337536485376, + "acc_norm": 0.6109324758842444, + "acc_norm_stderr": 0.027690337536485376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.04225875451969637, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.04225875451969637 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7070707070707071, + "acc_stderr": 0.032424979581788166, + "acc_norm": 0.7070707070707071, + "acc_norm_stderr": 0.032424979581788166 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5717948717948718, + "acc_stderr": 0.025088301454694827, + "acc_norm": 0.5717948717948718, + "acc_norm_stderr": 0.025088301454694827 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5935483870967742, + "acc_stderr": 0.027941727346256304, + "acc_norm": 0.5935483870967742, + "acc_norm_stderr": 0.027941727346256304 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.028286324075564424, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.028286324075564424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731571, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731571 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273957, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273957 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.025424835086924, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.025424835086924 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.027339546640662737, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.027339546640662737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.032922966391551414, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.032922966391551414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6788990825688074, + "acc_stderr": 0.020018149772733747, + "acc_norm": 0.6788990825688074, + "acc_norm_stderr": 0.020018149772733747 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.028332397483664278, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.028332397483664278 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.040089737857792046, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.040089737857792046 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5179738562091504, + "acc_stderr": 0.020214761037872397, + "acc_norm": 0.5179738562091504, + "acc_norm_stderr": 0.020214761037872397 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.39106145251396646, + "acc_stderr": 0.016320763763808383, + "acc_norm": 0.39106145251396646, + "acc_norm_stderr": 0.016320763763808383 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5073529411764706, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.5073529411764706, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6081632653061224, + "acc_stderr": 0.031251275910891656, + "acc_norm": 0.6081632653061224, + "acc_norm_stderr": 0.031251275910891656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.43415906127770537, + "acc_stderr": 0.01265903323706725, + "acc_norm": 0.43415906127770537, + "acc_norm_stderr": 0.01265903323706725 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.034107853389047205, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.034107853389047205 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.48959608323133413, + "mc1_stderr": 0.017499711430249264, + "mc2": 0.6666401623197502, + "mc2_stderr": 0.015744622543755443 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5360094451003542, + "acc_stderr": 0.017145715365486654, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.017122829143292644 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwkwon/S-SOLAR-10.7B-SFT-v1.2", + "model_sha": "227b52c95c5babc169629bc113bd0c98e871251b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwkwon/S-SOLAR-10.7B-SFT-v1.3/result_2024-03-18 04:05:27.json b/hwkwon/S-SOLAR-10.7B-SFT-v1.3/result_2024-03-18 04:05:27.json new file mode 100644 index 0000000000000000000000000000000000000000..c1c9756bfcd27a7eec8cd2d6b8e055827fed7b74 --- /dev/null +++ b/hwkwon/S-SOLAR-10.7B-SFT-v1.3/result_2024-03-18 04:05:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46757679180887374, + "acc_stderr": 0.014580637569995421, + "acc_norm": 0.5298634812286689, + "acc_norm_stderr": 0.014585305840007102 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46116311491734713, + "acc_stderr": 0.00497470642843429, + "acc_norm": 0.6393148775144394, + "acc_norm_stderr": 0.004792179052583446 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7076023391812866, + "acc_stderr": 0.03488647713457922, + "acc_norm": 0.7076023391812866, + "acc_norm_stderr": 0.03488647713457922 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280042, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280042 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7305236270753512, + "acc_stderr": 0.01586624307321502, + "acc_norm": 0.7305236270753512, + "acc_norm_stderr": 0.01586624307321502 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5191489361702127, + "acc_stderr": 0.0326620429906468, + "acc_norm": 0.5191489361702127, + "acc_norm_stderr": 0.0326620429906468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6109324758842444, + "acc_stderr": 0.027690337536485372, + "acc_norm": 0.6109324758842444, + "acc_norm_stderr": 0.027690337536485372 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5605381165919282, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.5605381165919282, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.04243869242230523, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.04243869242230523 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7474747474747475, + "acc_stderr": 0.03095405547036592, + "acc_norm": 0.7474747474747475, + "acc_norm_stderr": 0.03095405547036592 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062946, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062946 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6428571428571429, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.6428571428571429, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.558974358974359, + "acc_stderr": 0.025174048384000714, + "acc_norm": 0.558974358974359, + "acc_norm_stderr": 0.025174048384000714 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6193548387096774, + "acc_stderr": 0.027621717832907036, + "acc_norm": 0.6193548387096774, + "acc_norm_stderr": 0.027621717832907036 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.027236013946196673, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.027236013946196673 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5509433962264151, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.5509433962264151, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.746268656716418, + "acc_stderr": 0.030769444967296014, + "acc_norm": 0.746268656716418, + "acc_norm_stderr": 0.030769444967296014 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137275, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137275 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.8, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.8, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.02626167760780665, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.02626167760780665 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5705521472392638, + "acc_stderr": 0.03889066619112723, + "acc_norm": 0.5705521472392638, + "acc_norm_stderr": 0.03889066619112723 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6234567901234568, + "acc_stderr": 0.026959344518747787, + "acc_norm": 0.6234567901234568, + "acc_norm_stderr": 0.026959344518747787 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04644602091222317, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04644602091222317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.710091743119266, + "acc_stderr": 0.019453066609201597, + "acc_norm": 0.710091743119266, + "acc_norm_stderr": 0.019453066609201597 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.02718449890994161, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.02718449890994161 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5921052631578947, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.5921052631578947, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.020184583359102202, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.020184583359102202 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347243, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347243 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21675977653631284, + "acc_stderr": 0.013780598486443366, + "acc_norm": 0.21675977653631284, + "acc_norm_stderr": 0.013780598486443366 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5551470588235294, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.5551470588235294, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6653061224489796, + "acc_stderr": 0.030209235226242304, + "acc_norm": 0.6653061224489796, + "acc_norm_stderr": 0.030209235226242304 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.40547588005215124, + "acc_stderr": 0.012539960672377204, + "acc_norm": 0.40547588005215124, + "acc_norm_stderr": 0.012539960672377204 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7058823529411765, + "acc_stderr": 0.03198001660115071, + "acc_norm": 0.7058823529411765, + "acc_norm_stderr": 0.03198001660115071 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7212121212121212, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.7212121212121212, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.016002651487361005, + "mc2": 0.43783880108557244, + "mc2_stderr": 0.015035818918172005 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.564344746162928, + "acc_stderr": 0.01704741522947632, + "acc_norm": 0.5855962219598583, + "acc_norm_stderr": 0.016936583383943604 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwkwon/S-SOLAR-10.7B-SFT-v1.3", + "model_sha": "00da388a7676492c4bc6823600442a7cda42843d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwkwon/S-SOLAR-10.7B-v1.0/result_2024-03-02 01:32:53.json b/hwkwon/S-SOLAR-10.7B-v1.0/result_2024-03-02 01:32:53.json new file mode 100644 index 0000000000000000000000000000000000000000..8cc362202afd4d06482496572fd3ee10f88d5cad --- /dev/null +++ b/hwkwon/S-SOLAR-10.7B-v1.0/result_2024-03-02 01:32:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44197952218430037, + "acc_stderr": 0.014512682523128343, + "acc_norm": 0.4974402730375427, + "acc_norm_stderr": 0.014611199329843774 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4508066122286397, + "acc_stderr": 0.004965572246803859, + "acc_norm": 0.6176060545708026, + "acc_norm_stderr": 0.004849788423944373 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.04721188506097172, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.04721188506097172 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6832694763729247, + "acc_stderr": 0.016635566427712585, + "acc_norm": 0.6832694763729247, + "acc_norm_stderr": 0.016635566427712585 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.032662042990646775, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.032662042990646775 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866767, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866767 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.639871382636656, + "acc_stderr": 0.027264297599804015, + "acc_norm": 0.639871382636656, + "acc_norm_stderr": 0.027264297599804015 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.04225875451969639, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.04225875451969639 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5966386554621849, + "acc_stderr": 0.031866081214088314, + "acc_norm": 0.5966386554621849, + "acc_norm_stderr": 0.031866081214088314 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5307692307692308, + "acc_stderr": 0.025302958890850158, + "acc_norm": 0.5307692307692308, + "acc_norm_stderr": 0.025302958890850158 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024932, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5806451612903226, + "acc_stderr": 0.028071588901091824, + "acc_norm": 0.5806451612903226, + "acc_norm_stderr": 0.028071588901091824 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.02665569965392276, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.02665569965392276 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.0467375233367024, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.0467375233367024 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083015, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083015 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.03809342081273956, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.03809342081273956 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.025075981767601688, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.025075981767601688 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6184971098265896, + "acc_stderr": 0.0261521986197268, + "acc_norm": 0.6184971098265896, + "acc_norm_stderr": 0.0261521986197268 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.558282208588957, + "acc_stderr": 0.039015918258361836, + "acc_norm": 0.558282208588957, + "acc_norm_stderr": 0.039015918258361836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.595679012345679, + "acc_stderr": 0.027306625297327688, + "acc_norm": 0.595679012345679, + "acc_norm_stderr": 0.027306625297327688 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.694300518134715, + "acc_stderr": 0.033248379397581594, + "acc_norm": 0.694300518134715, + "acc_norm_stderr": 0.033248379397581594 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6642201834862386, + "acc_stderr": 0.020248081396752937, + "acc_norm": 0.6642201834862386, + "acc_norm_stderr": 0.020248081396752937 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04426266681379909, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04426266681379909 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.02782610930728369, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.02782610930728369 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884124, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6118421052631579, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.6118421052631579, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5179738562091504, + "acc_stderr": 0.020214761037872404, + "acc_norm": 0.5179738562091504, + "acc_norm_stderr": 0.020214761037872404 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4078014184397163, + "acc_stderr": 0.029316011776343555, + "acc_norm": 0.4078014184397163, + "acc_norm_stderr": 0.029316011776343555 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2346368715083799, + "acc_stderr": 0.014173044098303679, + "acc_norm": 0.2346368715083799, + "acc_norm_stderr": 0.014173044098303679 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5073529411764706, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.5073529411764706, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6816326530612244, + "acc_stderr": 0.029822533793982038, + "acc_norm": 0.6816326530612244, + "acc_norm_stderr": 0.029822533793982038 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7637130801687764, + "acc_stderr": 0.027652153144159256, + "acc_norm": 0.7637130801687764, + "acc_norm_stderr": 0.027652153144159256 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3859191655801825, + "acc_stderr": 0.012433398911476138, + "acc_norm": 0.3859191655801825, + "acc_norm_stderr": 0.012433398911476138 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6862745098039216, + "acc_stderr": 0.032566854844603886, + "acc_norm": 0.6862745098039216, + "acc_norm_stderr": 0.032566854844603886 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589667, + "mc2": 0.4531657371589615, + "mc2_stderr": 0.015265378530730787 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5348288075560803, + "acc_stderr": 0.017148598015747425, + "acc_norm": 0.564344746162928, + "acc_norm_stderr": 0.01704741522947631 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwkwon/S-SOLAR-10.7B-v1.0", + "model_sha": "ed919c0e41f156fcd5dd1f5ec77402e0a3f18ba4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwkwon/S-SOLAR-10.7B-v1.1/result_2024-03-06 08:51:27.json b/hwkwon/S-SOLAR-10.7B-v1.1/result_2024-03-06 08:51:27.json new file mode 100644 index 0000000000000000000000000000000000000000..ca24f84fdd85af32c1aa5f1d4ffd7bc382bd0303 --- /dev/null +++ b/hwkwon/S-SOLAR-10.7B-v1.1/result_2024-03-06 08:51:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4658703071672355, + "acc_stderr": 0.014577311315231102, + "acc_norm": 0.5204778156996587, + "acc_norm_stderr": 0.014599131353035002 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46026687910774744, + "acc_stderr": 0.004974001515580963, + "acc_norm": 0.632244572794264, + "acc_norm_stderr": 0.0048120886202771655 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.695906432748538, + "acc_stderr": 0.0352821125824523, + "acc_norm": 0.695906432748538, + "acc_norm_stderr": 0.0352821125824523 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.046561471100123514, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.046561471100123514 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7279693486590039, + "acc_stderr": 0.01591336744750054, + "acc_norm": 0.7279693486590039, + "acc_norm_stderr": 0.01591336744750054 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5191489361702127, + "acc_stderr": 0.032662042990646796, + "acc_norm": 0.5191489361702127, + "acc_norm_stderr": 0.032662042990646796 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6109324758842444, + "acc_stderr": 0.027690337536485372, + "acc_norm": 0.6109324758842444, + "acc_norm_stderr": 0.027690337536485372 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5650224215246636, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.5650224215246636, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7676767676767676, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.7676767676767676, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.03149930577784906, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.03149930577784906 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5538461538461539, + "acc_stderr": 0.025203571773028323, + "acc_norm": 0.5538461538461539, + "acc_norm_stderr": 0.025203571773028323 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.03502544650845872, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.03502544650845872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6258064516129033, + "acc_stderr": 0.027528904299845704, + "acc_norm": 0.6258064516129033, + "acc_norm_stderr": 0.027528904299845704 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.02665569965392276, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.02665569965392276 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5773584905660377, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.5773584905660377, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02911661760608301, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02911661760608301 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681681, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681681 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7562189054726368, + "acc_stderr": 0.03036049015401464, + "acc_norm": 0.7562189054726368, + "acc_norm_stderr": 0.03036049015401464 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137282, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137282 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5486111111111112, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.5486111111111112, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.8, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.8, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5867052023121387, + "acc_stderr": 0.026511261369409237, + "acc_norm": 0.5867052023121387, + "acc_norm_stderr": 0.026511261369409237 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5950920245398773, + "acc_stderr": 0.038566721635489125, + "acc_norm": 0.5950920245398773, + "acc_norm_stderr": 0.038566721635489125 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6327160493827161, + "acc_stderr": 0.026822801759507894, + "acc_norm": 0.6327160493827161, + "acc_norm_stderr": 0.026822801759507894 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583704, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583704 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7339449541284404, + "acc_stderr": 0.018946022322225593, + "acc_norm": 0.7339449541284404, + "acc_norm_stderr": 0.018946022322225593 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6503267973856209, + "acc_stderr": 0.0273053080762747, + "acc_norm": 0.6503267973856209, + "acc_norm_stderr": 0.0273053080762747 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6052631578947368, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.6052631578947368, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.020184583359102202, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.020184583359102202 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963768, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963768 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22681564245810057, + "acc_stderr": 0.014005843570897887, + "acc_norm": 0.22681564245810057, + "acc_norm_stderr": 0.014005843570897887 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.030254372573976715, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.030254372573976715 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6612244897959184, + "acc_stderr": 0.030299506562154188, + "acc_norm": 0.6612244897959184, + "acc_norm_stderr": 0.030299506562154188 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7426160337552743, + "acc_stderr": 0.028458820991460302, + "acc_norm": 0.7426160337552743, + "acc_norm_stderr": 0.028458820991460302 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3963494132985658, + "acc_stderr": 0.01249283045209522, + "acc_norm": 0.3963494132985658, + "acc_norm_stderr": 0.01249283045209522 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7205882352941176, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.7205882352941176, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.4405398660196526, + "mc2_stderr": 0.015146550050198923 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5289256198347108, + "acc_stderr": 0.017161563949916348, + "acc_norm": 0.5678866587957497, + "acc_norm_stderr": 0.017031170198851742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwkwon/S-SOLAR-10.7B-v1.1", + "model_sha": "9a5dcda876eb3b5ef089456c9687e99e4fd8888b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwkwon/S-SOLAR-10.7B-v1.2/result_2024-03-06 09:05:29.json b/hwkwon/S-SOLAR-10.7B-v1.2/result_2024-03-06 09:05:29.json new file mode 100644 index 0000000000000000000000000000000000000000..a96540b575e5e9c9d6afef6f2d58ba2289a3dd5e --- /dev/null +++ b/hwkwon/S-SOLAR-10.7B-v1.2/result_2024-03-06 09:05:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4667235494880546, + "acc_stderr": 0.01457899585960581, + "acc_norm": 0.5213310580204779, + "acc_norm_stderr": 0.014598087973127104 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4607647878908584, + "acc_stderr": 0.004974395131539589, + "acc_norm": 0.6313483369846644, + "acc_norm_stderr": 0.0048145326425746635 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7017543859649122, + "acc_stderr": 0.035087719298245626, + "acc_norm": 0.7017543859649122, + "acc_norm_stderr": 0.035087719298245626 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.046561471100123514, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.046561471100123514 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7318007662835249, + "acc_stderr": 0.01584243083526947, + "acc_norm": 0.7318007662835249, + "acc_norm_stderr": 0.01584243083526947 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5191489361702127, + "acc_stderr": 0.032662042990646796, + "acc_norm": 0.5191489361702127, + "acc_norm_stderr": 0.032662042990646796 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.02760468902858199, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.02760468902858199 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.57847533632287, + "acc_stderr": 0.03314190222110658, + "acc_norm": 0.57847533632287, + "acc_norm_stderr": 0.03314190222110658 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.648854961832061, + "acc_stderr": 0.0418644516301375, + "acc_norm": 0.648854961832061, + "acc_norm_stderr": 0.0418644516301375 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7676767676767676, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.7676767676767676, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.634453781512605, + "acc_stderr": 0.03128217706368462, + "acc_norm": 0.634453781512605, + "acc_norm_stderr": 0.03128217706368462 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5487179487179488, + "acc_stderr": 0.025230381238934844, + "acc_norm": 0.5487179487179488, + "acc_norm_stderr": 0.025230381238934844 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.03502544650845872, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.03502544650845872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6290322580645161, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.6290322580645161, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.02685345037700914, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.02685345037700914 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5773584905660377, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.5773584905660377, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02911661760608301, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02911661760608301 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681681, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681681 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7512437810945274, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.7512437810945274, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137602, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.8, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.8, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5924855491329479, + "acc_stderr": 0.026454578146931508, + "acc_norm": 0.5924855491329479, + "acc_norm_stderr": 0.026454578146931508 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5950920245398773, + "acc_stderr": 0.038566721635489125, + "acc_norm": 0.5950920245398773, + "acc_norm_stderr": 0.038566721635489125 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6327160493827161, + "acc_stderr": 0.026822801759507894, + "acc_norm": 0.6327160493827161, + "acc_norm_stderr": 0.026822801759507894 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366596, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7321100917431193, + "acc_stderr": 0.01898746225797865, + "acc_norm": 0.7321100917431193, + "acc_norm_stderr": 0.01898746225797865 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6503267973856209, + "acc_stderr": 0.0273053080762747, + "acc_norm": 0.6503267973856209, + "acc_norm_stderr": 0.0273053080762747 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6052631578947368, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.6052631578947368, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.020180144843307296, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.020180144843307296 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0286638201471995, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0286638201471995 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22569832402234638, + "acc_stderr": 0.013981395058455057, + "acc_norm": 0.22569832402234638, + "acc_norm_stderr": 0.013981395058455057 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5477941176470589, + "acc_stderr": 0.030233758551596445, + "acc_norm": 0.5477941176470589, + "acc_norm_stderr": 0.030233758551596445 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6612244897959184, + "acc_stderr": 0.030299506562154188, + "acc_norm": 0.6612244897959184, + "acc_norm_stderr": 0.030299506562154188 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7383966244725738, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.7383966244725738, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3963494132985658, + "acc_stderr": 0.01249283045209522, + "acc_norm": 0.3963494132985658, + "acc_norm_stderr": 0.01249283045209522 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7156862745098039, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.7156862745098039, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.703030303030303, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.703030303030303, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.44135383206283857, + "mc2_stderr": 0.015152842003159752 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5336481700118064, + "acc_stderr": 0.017151384117131865, + "acc_norm": 0.5667060212514758, + "acc_norm_stderr": 0.017036683641893098 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwkwon/S-SOLAR-10.7B-v1.2", + "model_sha": "cc7a7de94081a7ed989d886592e92b5f3718ac4a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwkwon/S-SOLAR-10.7B-v1.3/result_2024-03-08 12:10:53.json b/hwkwon/S-SOLAR-10.7B-v1.3/result_2024-03-08 12:10:53.json new file mode 100644 index 0000000000000000000000000000000000000000..f61ef831e5901e9ae3338bba942536051adf54f1 --- /dev/null +++ b/hwkwon/S-SOLAR-10.7B-v1.3/result_2024-03-08 12:10:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46928327645051193, + "acc_stderr": 0.014583792546304037, + "acc_norm": 0.5273037542662116, + "acc_norm_stderr": 0.014589589101985993 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45937064329814775, + "acc_stderr": 0.0049732804177055135, + "acc_norm": 0.632742481577375, + "acc_norm_stderr": 0.004810723108378213 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7076023391812866, + "acc_stderr": 0.03488647713457922, + "acc_norm": 0.7076023391812866, + "acc_norm_stderr": 0.03488647713457922 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7184466019417476, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.7184466019417476, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.719029374201788, + "acc_stderr": 0.016073127851221263, + "acc_norm": 0.719029374201788, + "acc_norm_stderr": 0.016073127851221263 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5404255319148936, + "acc_stderr": 0.032579014820998335, + "acc_norm": 0.5404255319148936, + "acc_norm_stderr": 0.032579014820998335 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5301204819277109, + "acc_stderr": 0.03885425420866767, + "acc_norm": 0.5301204819277109, + "acc_norm_stderr": 0.03885425420866767 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.02760468902858199, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.02760468902858199 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5695067264573991, + "acc_stderr": 0.0332319730294294, + "acc_norm": 0.5695067264573991, + "acc_norm_stderr": 0.0332319730294294 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.04225875451969639, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.04225875451969639 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6050420168067226, + "acc_stderr": 0.031753678460966245, + "acc_norm": 0.6050420168067226, + "acc_norm_stderr": 0.031753678460966245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.025275892070240655, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.025275892070240655 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6258064516129033, + "acc_stderr": 0.027528904299845704, + "acc_norm": 0.6258064516129033, + "acc_norm_stderr": 0.027528904299845704 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.026246772946890477, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.026246772946890477 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.569811320754717, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.569811320754717, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.746268656716418, + "acc_stderr": 0.030769444967296014, + "acc_norm": 0.746268656716418, + "acc_norm_stderr": 0.030769444967296014 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.03807301726504513, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.03807301726504513 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851112, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851112 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5763888888888888, + "acc_stderr": 0.0413212501972337, + "acc_norm": 0.5763888888888888, + "acc_norm_stderr": 0.0413212501972337 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.81, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.81, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6242774566473989, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.6242774566473989, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334385, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334385 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6358024691358025, + "acc_stderr": 0.026774929899722327, + "acc_norm": 0.6358024691358025, + "acc_norm_stderr": 0.026774929899722327 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7409326424870466, + "acc_stderr": 0.031618779179354094, + "acc_norm": 0.7409326424870466, + "acc_norm_stderr": 0.031618779179354094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7339449541284404, + "acc_stderr": 0.018946022322225593, + "acc_norm": 0.7339449541284404, + "acc_norm_stderr": 0.018946022322225593 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6339869281045751, + "acc_stderr": 0.027582811415159614, + "acc_norm": 0.6339869281045751, + "acc_norm_stderr": 0.027582811415159614 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6118421052631579, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.6118421052631579, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5375816993464052, + "acc_stderr": 0.02017061497496977, + "acc_norm": 0.5375816993464052, + "acc_norm_stderr": 0.02017061497496977 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199502, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199502 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21564245810055865, + "acc_stderr": 0.013754835975482348, + "acc_norm": 0.21564245810055865, + "acc_norm_stderr": 0.013754835975482348 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5257352941176471, + "acc_stderr": 0.030332578094555033, + "acc_norm": 0.5257352941176471, + "acc_norm_stderr": 0.030332578094555033 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6530612244897959, + "acc_stderr": 0.0304725260267265, + "acc_norm": 0.6530612244897959, + "acc_norm_stderr": 0.0304725260267265 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7341772151898734, + "acc_stderr": 0.028756799629658335, + "acc_norm": 0.7341772151898734, + "acc_norm_stderr": 0.028756799629658335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4048239895697523, + "acc_stderr": 0.012536743830953994, + "acc_norm": 0.4048239895697523, + "acc_norm_stderr": 0.012536743830953994 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7058823529411765, + "acc_stderr": 0.03198001660115072, + "acc_norm": 0.7058823529411765, + "acc_norm_stderr": 0.03198001660115072 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.03588624800091709, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.03588624800091709 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.01576477083677731, + "mc2": 0.4336409997692196, + "mc2_stderr": 0.0150540919260045 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5466351829988194, + "acc_stderr": 0.01711541822522687, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.017019847535972202 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwkwon/S-SOLAR-10.7B-v1.3", + "model_sha": "fb0982ec3e1b301258c60d65c80afcb88f9f5415", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwkwon/S-SOLAR-10.7B-v1.4/result_2024-03-21 06:06:00.json b/hwkwon/S-SOLAR-10.7B-v1.4/result_2024-03-21 06:06:00.json new file mode 100644 index 0000000000000000000000000000000000000000..0a4a762b6073d57894b9bb0028dbdeeb03494531 --- /dev/null +++ b/hwkwon/S-SOLAR-10.7B-v1.4/result_2024-03-21 06:06:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.697098976109215, + "acc_stderr": 0.013428241573185349, + "acc_norm": 0.7389078498293515, + "acc_norm_stderr": 0.01283552390947385 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5690101573391755, + "acc_stderr": 0.004942026200279571, + "acc_norm": 0.722266480780721, + "acc_norm_stderr": 0.004469659042824782 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7309941520467836, + "acc_stderr": 0.03401052620104089, + "acc_norm": 0.7309941520467836, + "acc_norm_stderr": 0.03401052620104089 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.70242656449553, + "acc_stderr": 0.01634911191290942, + "acc_norm": 0.70242656449553, + "acc_norm_stderr": 0.01634911191290942 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5234042553191489, + "acc_stderr": 0.0326501947503358, + "acc_norm": 0.5234042553191489, + "acc_norm_stderr": 0.0326501947503358 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6559485530546624, + "acc_stderr": 0.026981478043648043, + "acc_norm": 0.6559485530546624, + "acc_norm_stderr": 0.026981478043648043 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6367713004484304, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.6367713004484304, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.797979797979798, + "acc_stderr": 0.028606204289229876, + "acc_norm": 0.797979797979798, + "acc_norm_stderr": 0.028606204289229876 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.02432173848460235, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.02432173848460235 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.035158955511656986, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.035158955511656986 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.635483870967742, + "acc_stderr": 0.02737987122994325, + "acc_norm": 0.635483870967742, + "acc_norm_stderr": 0.02737987122994325 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8333333333333334, + "acc_stderr": 0.024414947304543688, + "acc_norm": 0.8333333333333334, + "acc_norm_stderr": 0.024414947304543688 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.030437794342983052, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.030437794342983052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.046313813194254656, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.046313813194254656 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630882, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630882 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.030965903123573037, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.030965903123573037 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4656084656084656, + "acc_stderr": 0.025690321762493855, + "acc_norm": 0.4656084656084656, + "acc_norm_stderr": 0.025690321762493855 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6527777777777778, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.6527777777777778, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932263, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932263 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.02626167760780665, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.02626167760780665 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6012269938650306, + "acc_stderr": 0.03847021420456024, + "acc_norm": 0.6012269938650306, + "acc_norm_stderr": 0.03847021420456024 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6728395061728395, + "acc_stderr": 0.026105673861409825, + "acc_norm": 0.6728395061728395, + "acc_norm_stderr": 0.026105673861409825 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7467889908256881, + "acc_stderr": 0.01864407304137505, + "acc_norm": 0.7467889908256881, + "acc_norm_stderr": 0.01864407304137505 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6372549019607843, + "acc_stderr": 0.02753007844711031, + "acc_norm": 0.6372549019607843, + "acc_norm_stderr": 0.02753007844711031 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6644736842105263, + "acc_stderr": 0.03842498559395268, + "acc_norm": 0.6644736842105263, + "acc_norm_stderr": 0.03842498559395268 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5849673202614379, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.5849673202614379, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3971631205673759, + "acc_stderr": 0.029189805673587102, + "acc_norm": 0.3971631205673759, + "acc_norm_stderr": 0.029189805673587102 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.034086558679777494, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.034086558679777494 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.4145251396648045, + "acc_stderr": 0.016476342210254, + "acc_norm": 0.4145251396648045, + "acc_norm_stderr": 0.016476342210254 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5477941176470589, + "acc_stderr": 0.03023375855159644, + "acc_norm": 0.5477941176470589, + "acc_norm_stderr": 0.03023375855159644 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6816326530612244, + "acc_stderr": 0.02982253379398204, + "acc_norm": 0.6816326530612244, + "acc_norm_stderr": 0.02982253379398204 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036416, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036416 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44589308996088656, + "acc_stderr": 0.012695244711379781, + "acc_norm": 0.44589308996088656, + "acc_norm_stderr": 0.012695244711379781 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.0332057461294543, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.0332057461294543 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6988984088127295, + "mc1_stderr": 0.0160589990261006, + "mc2": 0.7957239210066106, + "mc2_stderr": 0.013319893277389111 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5395513577331759, + "acc_stderr": 0.017136487626049846, + "acc_norm": 0.5560802833530106, + "acc_norm_stderr": 0.017081884623542543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwkwon/S-SOLAR-10.7B-v1.4", + "model_sha": "9a1bb664b9a76a88ef4222332f3bbc62acded72b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hwkwon/S-SOLAR-10.7B-v1.5/result_2024-03-29 05:35:13.json b/hwkwon/S-SOLAR-10.7B-v1.5/result_2024-03-29 05:35:13.json new file mode 100644 index 0000000000000000000000000000000000000000..d355fc82d4fa9d89379abebbacb0b0fd0c94af3d --- /dev/null +++ b/hwkwon/S-SOLAR-10.7B-v1.5/result_2024-03-29 05:35:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7329351535836177, + "acc_stderr": 0.01292893319649635, + "acc_norm": 0.7627986348122867, + "acc_norm_stderr": 0.012430399829260854 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6912965544712208, + "acc_stderr": 0.00461014357555347, + "acc_norm": 0.8085042820155347, + "acc_norm_stderr": 0.003926740595179811 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.685823754789272, + "acc_stderr": 0.01659929173588493, + "acc_norm": 0.685823754789272, + "acc_norm_stderr": 0.01659929173588493 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033583, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033583 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.027368078243971646, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.027368078243971646 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6457399103139013, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.6457399103139013, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7626262626262627, + "acc_stderr": 0.030313710538198906, + "acc_norm": 0.7626262626262627, + "acc_norm_stderr": 0.030313710538198906 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.047840607041056527, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.047840607041056527 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.03142946637883708, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.03142946637883708 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.617948717948718, + "acc_stderr": 0.024635549163908237, + "acc_norm": 0.617948717948718, + "acc_norm_stderr": 0.024635549163908237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4729064039408867, + "acc_stderr": 0.03512819077876106, + "acc_norm": 0.4729064039408867, + "acc_norm_stderr": 0.03512819077876106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5967741935483871, + "acc_stderr": 0.027906150826041146, + "acc_norm": 0.5967741935483871, + "acc_norm_stderr": 0.027906150826041146 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8376068376068376, + "acc_stderr": 0.024161618127987745, + "acc_norm": 0.8376068376068376, + "acc_norm_stderr": 0.024161618127987745 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5509433962264151, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.5509433962264151, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948496, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4497354497354497, + "acc_stderr": 0.02562085704293665, + "acc_norm": 0.4497354497354497, + "acc_norm_stderr": 0.02562085704293665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6073619631901841, + "acc_stderr": 0.0383674090783103, + "acc_norm": 0.6073619631901841, + "acc_norm_stderr": 0.0383674090783103 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.654320987654321, + "acc_stderr": 0.026462487777001862, + "acc_norm": 0.654320987654321, + "acc_norm_stderr": 0.026462487777001862 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7461139896373057, + "acc_stderr": 0.03141024780565318, + "acc_norm": 0.7461139896373057, + "acc_norm_stderr": 0.03141024780565318 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7174311926605504, + "acc_stderr": 0.01930424349770715, + "acc_norm": 0.7174311926605504, + "acc_norm_stderr": 0.01930424349770715 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.02830457667314111, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.02830457667314111 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5702614379084967, + "acc_stderr": 0.020027122784928554, + "acc_norm": 0.5702614379084967, + "acc_norm_stderr": 0.020027122784928554 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4326241134751773, + "acc_stderr": 0.02955545423677885, + "acc_norm": 0.4326241134751773, + "acc_norm_stderr": 0.02955545423677885 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.0340763209385405, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.0340763209385405 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3329608938547486, + "acc_stderr": 0.015761716178397563, + "acc_norm": 0.3329608938547486, + "acc_norm_stderr": 0.015761716178397563 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.49264705882352944, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.49264705882352944, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6653061224489796, + "acc_stderr": 0.030209235226242304, + "acc_norm": 0.6653061224489796, + "acc_norm_stderr": 0.030209235226242304 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7383966244725738, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.7383966244725738, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44589308996088656, + "acc_stderr": 0.012695244711379781, + "acc_norm": 0.44589308996088656, + "acc_norm_stderr": 0.012695244711379781 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6813725490196079, + "acc_stderr": 0.032702871814820816, + "acc_norm": 0.6813725490196079, + "acc_norm_stderr": 0.032702871814820816 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205983, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205983 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.762545899632803, + "mc1_stderr": 0.014896277441041822, + "mc2": 0.843259298813541, + "mc2_stderr": 0.012118553781604036 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.017188904359077314, + "acc_norm": 0.5419126328217237, + "acc_norm_stderr": 0.017129852117911147 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hwkwon/S-SOLAR-10.7B-v1.5", + "model_sha": "6dbacbb7436fa677e4c3b4258c6fa68161613df6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/SOLAR-10.7B-dpo-v0.1/result_2023-12-31 07:40:27.json b/hyeogi/SOLAR-10.7B-dpo-v0.1/result_2023-12-31 07:40:27.json new file mode 100644 index 0000000000000000000000000000000000000000..950ffb6c6d1ea4d8f521b5d9ccc4734a9ae0c6e3 --- /dev/null +++ b/hyeogi/SOLAR-10.7B-dpo-v0.1/result_2023-12-31 07:40:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4283276450511945, + "acc_stderr": 0.01446049636759902, + "acc_norm": 0.47952218430034127, + "acc_norm_stderr": 0.014599131353035017 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43696474805815577, + "acc_stderr": 0.004949969363017665, + "acc_norm": 0.594901414060944, + "acc_norm_stderr": 0.004899078300184254 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5964912280701754, + "acc_stderr": 0.03762738699917057, + "acc_norm": 0.5964912280701754, + "acc_norm_stderr": 0.03762738699917057 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6232439335887612, + "acc_stderr": 0.01732829290730306, + "acc_norm": 0.6232439335887612, + "acc_norm_stderr": 0.01732829290730306 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6077170418006431, + "acc_stderr": 0.02773125864701199, + "acc_norm": 0.6077170418006431, + "acc_norm_stderr": 0.02773125864701199 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465918, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465918 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7070707070707071, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.7070707070707071, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5840336134453782, + "acc_stderr": 0.03201650100739611, + "acc_norm": 0.5840336134453782, + "acc_norm_stderr": 0.03201650100739611 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.02531764972644868, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.02531764972644868 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04712821257426769, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04712821257426769 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5548387096774193, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.5548387096774193, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.02685345037700915, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.02685345037700915 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389188, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389188 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.031524391865554044, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.031524391865554044 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.03807301726504511, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.03807301726504511 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.025424835086924006, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.025424835086924006 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.026830805998952243, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.026830805998952243 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.027513747284379424, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.027513747284379424 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6735751295336787, + "acc_stderr": 0.033840286211432945, + "acc_norm": 0.6735751295336787, + "acc_norm_stderr": 0.033840286211432945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.04685473041907789, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.04685473041907789 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.636697247706422, + "acc_stderr": 0.020620603919625804, + "acc_norm": 0.636697247706422, + "acc_norm_stderr": 0.020620603919625804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.028358956313423545, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.028358956313423545 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.47875816993464054, + "acc_stderr": 0.020209572388600244, + "acc_norm": 0.47875816993464054, + "acc_norm_stderr": 0.020209572388600244 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5324074074074074, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.5324074074074074, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.01513160884996375, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.01513160884996375 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4889705882352941, + "acc_stderr": 0.03036544647727568, + "acc_norm": 0.4889705882352941, + "acc_norm_stderr": 0.03036544647727568 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7215189873417721, + "acc_stderr": 0.029178682304842534, + "acc_norm": 0.7215189873417721, + "acc_norm_stderr": 0.029178682304842534 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39374185136897, + "acc_stderr": 0.012478532272564439, + "acc_norm": 0.39374185136897, + "acc_norm_stderr": 0.012478532272564439 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.033933885849584046, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.033933885849584046 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.037694303145125695, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.037694303145125695 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4320685434516524, + "mc1_stderr": 0.01734120239498825, + "mc2": 0.6097452426860501, + "mc2_stderr": 0.015682133857043576 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5832349468713105, + "acc_stderr": 0.016950489146108826, + "acc_norm": 0.6174734356552538, + "acc_norm_stderr": 0.016709165387228837 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/SOLAR-10.7B-dpo-v0.1", + "model_sha": "8e657d79a30c8030a2e5bebfc60425e6a849a5bc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/SOLAR-10.7B-dpo-v1/result_2024-01-10 00:44:41.json b/hyeogi/SOLAR-10.7B-dpo-v1/result_2024-01-10 00:44:41.json new file mode 100644 index 0000000000000000000000000000000000000000..1bd02f1bedfc5959f87c203bce46ecbb59733eb3 --- /dev/null +++ b/hyeogi/SOLAR-10.7B-dpo-v1/result_2024-01-10 00:44:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.454778156996587, + "acc_stderr": 0.014551507060836353, + "acc_norm": 0.515358361774744, + "acc_norm_stderr": 0.014604496129394911 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4433379804819757, + "acc_stderr": 0.00495763764842647, + "acc_norm": 0.607149970125473, + "acc_norm_stderr": 0.004873858323840783 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.036871306155620606, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.036871306155620606 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6819923371647509, + "acc_stderr": 0.016653486275615404, + "acc_norm": 0.6819923371647509, + "acc_norm_stderr": 0.016653486275615404 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5819935691318328, + "acc_stderr": 0.028013651891995076, + "acc_norm": 0.5819935691318328, + "acc_norm_stderr": 0.028013651891995076 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6767676767676768, + "acc_stderr": 0.03332299921070646, + "acc_norm": 0.6767676767676768, + "acc_norm_stderr": 0.03332299921070646 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.02534267129380724, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.02534267129380724 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978813, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978813 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5870967741935483, + "acc_stderr": 0.028009138125400387, + "acc_norm": 0.5870967741935483, + "acc_norm_stderr": 0.028009138125400387 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748842, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748842 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5433962264150943, + "acc_stderr": 0.030656748696739438, + "acc_norm": 0.5433962264150943, + "acc_norm_stderr": 0.030656748696739438 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6616915422885572, + "acc_stderr": 0.03345563070339191, + "acc_norm": 0.6616915422885572, + "acc_norm_stderr": 0.03345563070339191 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.02418049716437691, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.02418049716437691 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.04177578950739993, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.04177578950739993 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.02684298551961537, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.02684298551961537 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6632124352331606, + "acc_stderr": 0.03410780251836184, + "acc_norm": 0.6632124352331606, + "acc_norm_stderr": 0.03410780251836184 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6256880733944954, + "acc_stderr": 0.02074895940898832, + "acc_norm": 0.6256880733944954, + "acc_norm_stderr": 0.02074895940898832 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.020154685712590895, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.020154685712590895 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3318435754189944, + "acc_stderr": 0.015748421208187306, + "acc_norm": 0.3318435754189944, + "acc_norm_stderr": 0.015748421208187306 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.030254372573976698, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.030254372573976698 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421398, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421398 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7088607594936709, + "acc_stderr": 0.02957160106575337, + "acc_norm": 0.7088607594936709, + "acc_norm_stderr": 0.02957160106575337 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3546284224250326, + "acc_stderr": 0.01221857643909018, + "acc_norm": 0.3546284224250326, + "acc_norm_stderr": 0.01221857643909018 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6372549019607843, + "acc_stderr": 0.03374499356319355, + "acc_norm": 0.6372549019607843, + "acc_norm_stderr": 0.03374499356319355 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.397796817625459, + "mc1_stderr": 0.01713393424855963, + "mc2": 0.5798336413360368, + "mc2_stderr": 0.015574576460707772 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6115702479338843, + "acc_stderr": 0.01675692157106942, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.01627295299701914 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/SOLAR-10.7B-dpo-v1", + "model_sha": "d0159f5516ab6b4fb0c5f7e44e02c8b2c092fca0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/SOLAR-10.7B-v1.1/result_2024-02-01 00:10:08.json b/hyeogi/SOLAR-10.7B-v1.1/result_2024-02-01 00:10:08.json new file mode 100644 index 0000000000000000000000000000000000000000..2b1a5740d3e9fa088071bd6dad4ffe0ede8ddb83 --- /dev/null +++ b/hyeogi/SOLAR-10.7B-v1.1/result_2024-02-01 00:10:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4709897610921502, + "acc_stderr": 0.014586776355294314, + "acc_norm": 0.5366894197952219, + "acc_norm_stderr": 0.01457200052775699 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43945429197371044, + "acc_stderr": 0.004953063404791457, + "acc_norm": 0.608743278231428, + "acc_norm_stderr": 0.004870342592915047 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6551724137931034, + "acc_stderr": 0.016997123346113443, + "acc_norm": 0.6551724137931034, + "acc_norm_stderr": 0.016997123346113443 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.032600385118357715, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.032600385118357715 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.02736807824397163, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.02736807824397163 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5695067264573991, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.5695067264573991, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5798319327731093, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.5798319327731093, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.02534267129380724, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.02534267129380724 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5612903225806452, + "acc_stderr": 0.02822949732031721, + "acc_norm": 0.5612903225806452, + "acc_norm_stderr": 0.02822949732031721 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.02704685763071667, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.02704685763071667 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5509433962264151, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.5509433962264151, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.03807301726504511, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.03807301726504511 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.025075981767601688, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.025075981767601688 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6011560693641619, + "acc_stderr": 0.02636243757454654, + "acc_norm": 0.6011560693641619, + "acc_norm_stderr": 0.02636243757454654 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6049382716049383, + "acc_stderr": 0.027201117666925654, + "acc_norm": 0.6049382716049383, + "acc_norm_stderr": 0.027201117666925654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.03292296639155141, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.03292296639155141 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6587155963302752, + "acc_stderr": 0.02032861281659244, + "acc_norm": 0.6587155963302752, + "acc_norm_stderr": 0.02032861281659244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.02827549015679145, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.02827549015679145 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874143, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874143 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.020203517280261447, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.020203517280261447 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596143, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596143 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3240223463687151, + "acc_stderr": 0.015652542496421125, + "acc_norm": 0.3240223463687151, + "acc_norm_stderr": 0.015652542496421125 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47794117647058826, + "acc_stderr": 0.03034326422421352, + "acc_norm": 0.47794117647058826, + "acc_norm_stderr": 0.03034326422421352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6571428571428571, + "acc_stderr": 0.030387262919547728, + "acc_norm": 0.6571428571428571, + "acc_norm_stderr": 0.030387262919547728 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7763713080168776, + "acc_stderr": 0.027123298205229966, + "acc_norm": 0.7763713080168776, + "acc_norm_stderr": 0.027123298205229966 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41134289439374183, + "acc_stderr": 0.012567882673803689, + "acc_norm": 0.41134289439374183, + "acc_norm_stderr": 0.012567882673803689 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.03296245110172229, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.03296245110172229 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.39412484700122397, + "mc1_stderr": 0.017106588140700322, + "mc2": 0.5671596078505355, + "mc2_stderr": 0.0155796255292381 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5360094451003542, + "acc_stderr": 0.017145715365486664, + "acc_norm": 0.5608028335301063, + "acc_norm_stderr": 0.017062775744780708 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/SOLAR-10.7B-v1.1", + "model_sha": "4c98961487612994dff393a1931e6b401eabd6aa", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/SOLAR-10.7B-v1.2/result_2024-02-01 02:08:15.json b/hyeogi/SOLAR-10.7B-v1.2/result_2024-02-01 02:08:15.json new file mode 100644 index 0000000000000000000000000000000000000000..71a77ec1689d4232de3b1169408bd62afc302a20 --- /dev/null +++ b/hyeogi/SOLAR-10.7B-v1.2/result_2024-02-01 02:08:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46757679180887374, + "acc_stderr": 0.01458063756999542, + "acc_norm": 0.5204778156996587, + "acc_norm_stderr": 0.014599131353035004 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4547898824935272, + "acc_stderr": 0.004969341773423516, + "acc_norm": 0.6200955984863573, + "acc_norm_stderr": 0.004843708550386536 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6432748538011696, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.6432748538011696, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.016857391247472542, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.016857391247472542 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5148936170212766, + "acc_stderr": 0.032671518489247764, + "acc_norm": 0.5148936170212766, + "acc_norm_stderr": 0.032671518489247764 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866767, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866767 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6430868167202572, + "acc_stderr": 0.027210420375934023, + "acc_norm": 0.6430868167202572, + "acc_norm_stderr": 0.027210420375934023 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.57847533632287, + "acc_stderr": 0.03314190222110658, + "acc_norm": 0.57847533632287, + "acc_norm_stderr": 0.03314190222110658 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7373737373737373, + "acc_stderr": 0.03135305009533087, + "acc_norm": 0.7373737373737373, + "acc_norm_stderr": 0.03135305009533087 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6050420168067226, + "acc_stderr": 0.03175367846096625, + "acc_norm": 0.6050420168067226, + "acc_norm_stderr": 0.03175367846096625 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.02529460802398645, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.02529460802398645 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.603225806451613, + "acc_stderr": 0.027831231605767948, + "acc_norm": 0.603225806451613, + "acc_norm_stderr": 0.027831231605767948 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922754, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922754 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5433962264150943, + "acc_stderr": 0.030656748696739435, + "acc_norm": 0.5433962264150943, + "acc_norm_stderr": 0.030656748696739435 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4021164021164021, + "acc_stderr": 0.025253032554997685, + "acc_norm": 0.4021164021164021, + "acc_norm_stderr": 0.025253032554997685 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6329479768786127, + "acc_stderr": 0.02595005433765407, + "acc_norm": 0.6329479768786127, + "acc_norm_stderr": 0.02595005433765407 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6049382716049383, + "acc_stderr": 0.027201117666925647, + "acc_norm": 0.6049382716049383, + "acc_norm_stderr": 0.027201117666925647 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.03292296639155141, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.03292296639155141 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594964, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594964 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6752293577981652, + "acc_stderr": 0.020077729109310327, + "acc_norm": 0.6752293577981652, + "acc_norm_stderr": 0.020077729109310327 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6078431372549019, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.6078431372549019, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591206, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591206 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.040179012759817494, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.040179012759817494 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.02022092082962691, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.02022092082962691 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596143, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596143 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2245810055865922, + "acc_stderr": 0.01395680366654464, + "acc_norm": 0.2245810055865922, + "acc_norm_stderr": 0.01395680366654464 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6857142857142857, + "acc_stderr": 0.02971932942241746, + "acc_norm": 0.6857142857142857, + "acc_norm_stderr": 0.02971932942241746 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7383966244725738, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.7383966244725738, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3924380704041721, + "acc_stderr": 0.012471243669229104, + "acc_norm": 0.3924380704041721, + "acc_norm_stderr": 0.012471243669229104 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6813725490196079, + "acc_stderr": 0.03270287181482081, + "acc_norm": 0.6813725490196079, + "acc_norm_stderr": 0.03270287181482081 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.703030303030303, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.703030303030303, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3157894736842105, + "mc1_stderr": 0.016272287957916926, + "mc2": 0.46589098446210675, + "mc2_stderr": 0.01523764607105186 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5584415584415584, + "acc_stderr": 0.017072525875563106, + "acc_norm": 0.5985832349468713, + "acc_norm_stderr": 0.01685290785872906 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/SOLAR-10.7B-v1.2", + "model_sha": "91cffe5b0dcb98efe0ea7d98a79bdb14ae98ca21", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/SOLAR-10.7B-v1.3/result_2024-02-09 06:02:57.json b/hyeogi/SOLAR-10.7B-v1.3/result_2024-02-09 06:02:57.json new file mode 100644 index 0000000000000000000000000000000000000000..76daa01d546fa2f88abdccac4a839b403ada8e57 --- /dev/null +++ b/hyeogi/SOLAR-10.7B-v1.3/result_2024-02-09 06:02:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4803754266211604, + "acc_stderr": 0.014600132075947089, + "acc_norm": 0.5332764505119454, + "acc_norm_stderr": 0.014578995859605808 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46325433180641307, + "acc_stderr": 0.004976288321681822, + "acc_norm": 0.6293567018522207, + "acc_norm_stderr": 0.004819899945342492 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6608187134502924, + "acc_stderr": 0.036310534964889056, + "acc_norm": 0.6608187134502924, + "acc_norm_stderr": 0.036310534964889056 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.685823754789272, + "acc_stderr": 0.016599291735884925, + "acc_norm": 0.685823754789272, + "acc_norm_stderr": 0.016599291735884925 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5148936170212766, + "acc_stderr": 0.032671518489247764, + "acc_norm": 0.5148936170212766, + "acc_norm_stderr": 0.032671518489247764 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6012861736334405, + "acc_stderr": 0.0278093225857745, + "acc_norm": 0.6012861736334405, + "acc_norm_stderr": 0.0278093225857745 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6412556053811659, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.6412556053811659, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7525252525252525, + "acc_stderr": 0.03074630074212451, + "acc_norm": 0.7525252525252525, + "acc_norm_stderr": 0.03074630074212451 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5966386554621849, + "acc_stderr": 0.03186608121408832, + "acc_norm": 0.5966386554621849, + "acc_norm_stderr": 0.03186608121408832 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.02532399086173626, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.02532399086173626 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.045245960070300476, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.045245960070300476 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883233, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883233 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6129032258064516, + "acc_stderr": 0.02770935967503249, + "acc_norm": 0.6129032258064516, + "acc_norm_stderr": 0.02770935967503249 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.02514093595033544, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.02514093595033544 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5471698113207547, + "acc_stderr": 0.030635627957961827, + "acc_norm": 0.5471698113207547, + "acc_norm_stderr": 0.030635627957961827 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652458, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652458 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555402, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555402 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02510742548113727, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02510742548113727 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5208333333333334, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.5208333333333334, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6358381502890174, + "acc_stderr": 0.025906632631016124, + "acc_norm": 0.6358381502890174, + "acc_norm_stderr": 0.025906632631016124 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5828220858895705, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.5828220858895705, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5987654320987654, + "acc_stderr": 0.027272582849839792, + "acc_norm": 0.5987654320987654, + "acc_norm_stderr": 0.027272582849839792 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7202072538860104, + "acc_stderr": 0.03239637046735703, + "acc_norm": 0.7202072538860104, + "acc_norm_stderr": 0.03239637046735703 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7027522935779816, + "acc_stderr": 0.019595707224643547, + "acc_norm": 0.7027522935779816, + "acc_norm_stderr": 0.019595707224643547 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6013071895424836, + "acc_stderr": 0.028036092273891765, + "acc_norm": 0.6013071895424836, + "acc_norm_stderr": 0.028036092273891765 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.040179012759817494, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.040179012759817494 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5081699346405228, + "acc_stderr": 0.020225134343057265, + "acc_norm": 0.5081699346405228, + "acc_norm_stderr": 0.020225134343057265 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41134751773049644, + "acc_stderr": 0.02935491115994099, + "acc_norm": 0.41134751773049644, + "acc_norm_stderr": 0.02935491115994099 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.034063153607115086, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.034063153607115086 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.18212290502793296, + "acc_stderr": 0.012907958130579976, + "acc_norm": 0.18212290502793296, + "acc_norm_stderr": 0.012907958130579976 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5257352941176471, + "acc_stderr": 0.03033257809455504, + "acc_norm": 0.5257352941176471, + "acc_norm_stderr": 0.03033257809455504 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6857142857142857, + "acc_stderr": 0.029719329422417454, + "acc_norm": 0.6857142857142857, + "acc_norm_stderr": 0.029719329422417454 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7763713080168776, + "acc_stderr": 0.027123298205229966, + "acc_norm": 0.7763713080168776, + "acc_norm_stderr": 0.027123298205229966 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39308996088657105, + "acc_stderr": 0.012474899613873955, + "acc_norm": 0.39308996088657105, + "acc_norm_stderr": 0.012474899613873955 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7549019607843137, + "acc_stderr": 0.03019028245350195, + "acc_norm": 0.7549019607843137, + "acc_norm_stderr": 0.03019028245350195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7212121212121212, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.7212121212121212, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3353733170134639, + "mc1_stderr": 0.016527534039668987, + "mc2": 0.4920811792727815, + "mc2_stderr": 0.015483638661592148 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4923258559622196, + "acc_stderr": 0.017188329219654283, + "acc_norm": 0.5159386068476978, + "acc_norm_stderr": 0.0171816178371902 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/SOLAR-10.7B-v1.3", + "model_sha": "c5ce9d3793c0d1c26f3be7d4ad410c027a76c40b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/SOLAR-10.7B-v1.4/result_2024-03-12 06:22:22.json b/hyeogi/SOLAR-10.7B-v1.4/result_2024-03-12 06:22:22.json new file mode 100644 index 0000000000000000000000000000000000000000..4c194d7d4de86a16c2560677b8b3c9d38dc34bb8 --- /dev/null +++ b/hyeogi/SOLAR-10.7B-v1.4/result_2024-03-12 06:22:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.48378839590443684, + "acc_stderr": 0.014603708567414952, + "acc_norm": 0.5520477815699659, + "acc_norm_stderr": 0.014532011498211672 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46415056761601275, + "acc_stderr": 0.004976939333240072, + "acc_norm": 0.6320454092810197, + "acc_norm_stderr": 0.004812633280078254 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7076023391812866, + "acc_stderr": 0.03488647713457922, + "acc_norm": 0.7076023391812866, + "acc_norm_stderr": 0.03488647713457922 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7281553398058253, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.7281553398058253, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7266922094508301, + "acc_stderr": 0.015936681062628556, + "acc_norm": 0.7266922094508301, + "acc_norm_stderr": 0.015936681062628556 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5259259259259259, + "acc_stderr": 0.04313531696750575, + "acc_norm": 0.5259259259259259, + "acc_norm_stderr": 0.04313531696750575 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542124, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542124 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5574468085106383, + "acc_stderr": 0.03246956919789957, + "acc_norm": 0.5574468085106383, + "acc_norm_stderr": 0.03246956919789957 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.536144578313253, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.536144578313253, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6688102893890675, + "acc_stderr": 0.026730620728004917, + "acc_norm": 0.6688102893890675, + "acc_norm_stderr": 0.026730620728004917 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6233183856502242, + "acc_stderr": 0.03252113489929188, + "acc_norm": 0.6233183856502242, + "acc_norm_stderr": 0.03252113489929188 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.797979797979798, + "acc_stderr": 0.028606204289229872, + "acc_norm": 0.797979797979798, + "acc_norm_stderr": 0.028606204289229872 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062946, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062946 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6848739495798319, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.6848739495798319, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5871794871794872, + "acc_stderr": 0.024962683564331782, + "acc_norm": 0.5871794871794872, + "acc_norm_stderr": 0.024962683564331782 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301811, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301811 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.7032258064516129, + "acc_stderr": 0.0259885007924119, + "acc_norm": 0.7032258064516129, + "acc_norm_stderr": 0.0259885007924119 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8247863247863247, + "acc_stderr": 0.024904439098918214, + "acc_norm": 0.8247863247863247, + "acc_norm_stderr": 0.024904439098918214 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5849056603773585, + "acc_stderr": 0.03032594578928611, + "acc_norm": 0.5849056603773585, + "acc_norm_stderr": 0.03032594578928611 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6636363636363637, + "acc_stderr": 0.04525393596302506, + "acc_norm": 0.6636363636363637, + "acc_norm_stderr": 0.04525393596302506 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.030965903123573033, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.030965903123573033 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.037940126746970296, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.037940126746970296 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.025279850397404897, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.025279850397404897 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5972222222222222, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.5972222222222222, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.79, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.79, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6329479768786127, + "acc_stderr": 0.02595005433765407, + "acc_norm": 0.6329479768786127, + "acc_norm_stderr": 0.02595005433765407 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6012269938650306, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.6012269938650306, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6450617283950617, + "acc_stderr": 0.02662415247884585, + "acc_norm": 0.6450617283950617, + "acc_norm_stderr": 0.02662415247884585 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.8082901554404145, + "acc_stderr": 0.02840895362624528, + "acc_norm": 0.8082901554404145, + "acc_norm_stderr": 0.02840895362624528 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4824561403508772, + "acc_stderr": 0.04700708033551038, + "acc_norm": 0.4824561403508772, + "acc_norm_stderr": 0.04700708033551038 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7541284403669725, + "acc_stderr": 0.018461940968708457, + "acc_norm": 0.7541284403669725, + "acc_norm_stderr": 0.018461940968708457 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6437908496732027, + "acc_stderr": 0.027420477662629245, + "acc_norm": 0.6437908496732027, + "acc_norm_stderr": 0.027420477662629245 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7768595041322314, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.7768595041322314, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6447368421052632, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.6447368421052632, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.565359477124183, + "acc_stderr": 0.02005426920072646, + "acc_norm": 0.565359477124183, + "acc_norm_stderr": 0.02005426920072646 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4078014184397163, + "acc_stderr": 0.02931601177634356, + "acc_norm": 0.4078014184397163, + "acc_norm_stderr": 0.02931601177634356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2, + "acc_stderr": 0.013378001241813074, + "acc_norm": 0.2, + "acc_norm_stderr": 0.013378001241813074 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5477941176470589, + "acc_stderr": 0.03023375855159645, + "acc_norm": 0.5477941176470589, + "acc_norm_stderr": 0.03023375855159645 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.7346938775510204, + "acc_stderr": 0.028263889943784606, + "acc_norm": 0.7346938775510204, + "acc_norm_stderr": 0.028263889943784606 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7848101265822784, + "acc_stderr": 0.02675082699467615, + "acc_norm": 0.7848101265822784, + "acc_norm_stderr": 0.02675082699467615 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4074315514993481, + "acc_stderr": 0.01254947371421222, + "acc_norm": 0.4074315514993481, + "acc_norm_stderr": 0.01254947371421222 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7647058823529411, + "acc_stderr": 0.029771775228145628, + "acc_norm": 0.7647058823529411, + "acc_norm_stderr": 0.029771775228145628 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7515151515151515, + "acc_stderr": 0.03374402644139405, + "acc_norm": 0.7515151515151515, + "acc_norm_stderr": 0.03374402644139405 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32068543451652387, + "mc1_stderr": 0.016339170373280906, + "mc2": 0.46588911930346544, + "mc2_stderr": 0.015381504238533388 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5017709563164109, + "acc_stderr": 0.017190246276231863, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.017175671279836446 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/SOLAR-10.7B-v1.4", + "model_sha": "31998932ea0f460e5befed423406fcc6e2261463", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/SOLAR-10.7B-v1.5/result_2024-03-13 02:14:34.json b/hyeogi/SOLAR-10.7B-v1.5/result_2024-03-13 02:14:34.json new file mode 100644 index 0000000000000000000000000000000000000000..061355a8a2b5c586da9049600b4caa56adaa1256 --- /dev/null +++ b/hyeogi/SOLAR-10.7B-v1.5/result_2024-03-13 02:14:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5034129692832765, + "acc_stderr": 0.014611050403244077, + "acc_norm": 0.5716723549488054, + "acc_norm_stderr": 0.01446049636759901 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45379406492730534, + "acc_stderr": 0.0049684294763450085, + "acc_norm": 0.6213901613224457, + "acc_norm_stderr": 0.004840493603166207 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7017543859649122, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.7017543859649122, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7266922094508301, + "acc_stderr": 0.01593668106262856, + "acc_norm": 0.7266922094508301, + "acc_norm_stderr": 0.01593668106262856 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.574468085106383, + "acc_stderr": 0.0323214691622447, + "acc_norm": 0.574468085106383, + "acc_norm_stderr": 0.0323214691622447 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6237942122186495, + "acc_stderr": 0.02751392568354943, + "acc_norm": 0.6237942122186495, + "acc_norm_stderr": 0.02751392568354943 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6322869955156951, + "acc_stderr": 0.032361983509282745, + "acc_norm": 0.6322869955156951, + "acc_norm_stderr": 0.032361983509282745 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.02962022787479048, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.02962022787479048 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.031566630992154156, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.031566630992154156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5743589743589743, + "acc_stderr": 0.025069094387296525, + "acc_norm": 0.5743589743589743, + "acc_norm_stderr": 0.025069094387296525 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6709677419354839, + "acc_stderr": 0.026729499068349958, + "acc_norm": 0.6709677419354839, + "acc_norm_stderr": 0.026729499068349958 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.025372139671722933, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.025372139671722933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5773584905660377, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.5773584905660377, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719197, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719197 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.746268656716418, + "acc_stderr": 0.030769444967296018, + "acc_norm": 0.746268656716418, + "acc_norm_stderr": 0.030769444967296018 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.03794012674697029, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.03794012674697029 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.455026455026455, + "acc_stderr": 0.02564692836104939, + "acc_norm": 0.455026455026455, + "acc_norm_stderr": 0.02564692836104939 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6069364161849711, + "acc_stderr": 0.026296227915613674, + "acc_norm": 0.6069364161849711, + "acc_norm_stderr": 0.026296227915613674 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334385, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334385 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.026725868809100786, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.026725868809100786 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7412844036697248, + "acc_stderr": 0.01877605231961962, + "acc_norm": 0.7412844036697248, + "acc_norm_stderr": 0.01877605231961962 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.044359328928514664, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.044359328928514664 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.027826109307283693, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.027826109307283693 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.618421052631579, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.618421052631579, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5473856209150327, + "acc_stderr": 0.020136790918492523, + "acc_norm": 0.5473856209150327, + "acc_norm_stderr": 0.020136790918492523 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4397163120567376, + "acc_stderr": 0.02960991207559411, + "acc_norm": 0.4397163120567376, + "acc_norm_stderr": 0.02960991207559411 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3642458100558659, + "acc_stderr": 0.016094338768474593, + "acc_norm": 0.3642458100558659, + "acc_norm_stderr": 0.016094338768474593 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5551470588235294, + "acc_stderr": 0.030187532060329387, + "acc_norm": 0.5551470588235294, + "acc_norm_stderr": 0.030187532060329387 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6938775510204082, + "acc_stderr": 0.02950489645459596, + "acc_norm": 0.6938775510204082, + "acc_norm_stderr": 0.02950489645459596 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7679324894514767, + "acc_stderr": 0.027479744550808517, + "acc_norm": 0.7679324894514767, + "acc_norm_stderr": 0.027479744550808517 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.42503259452411996, + "acc_stderr": 0.012625879884891993, + "acc_norm": 0.42503259452411996, + "acc_norm_stderr": 0.012625879884891993 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7058823529411765, + "acc_stderr": 0.03198001660115071, + "acc_norm": 0.7058823529411765, + "acc_norm_stderr": 0.03198001660115071 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7212121212121212, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.7212121212121212, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.48592411260709917, + "mc1_stderr": 0.0174965637170428, + "mc2": 0.6541003093368067, + "mc2_stderr": 0.015200374408770171 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.551357733175915, + "acc_stderr": 0.01709943051472579, + "acc_norm": 0.5678866587957497, + "acc_norm_stderr": 0.017031170198851746 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/SOLAR-10.7B-v1.5", + "model_sha": "ca02b93e91b2007b7fb18affb28d7e869513e1df", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/SOLAR-10.7B-v1.6/result_2024-03-20 00:56:54.json b/hyeogi/SOLAR-10.7B-v1.6/result_2024-03-20 00:56:54.json new file mode 100644 index 0000000000000000000000000000000000000000..cadf4b5223e2ffdba6097e4b56ece8c9eed64422 --- /dev/null +++ b/hyeogi/SOLAR-10.7B-v1.6/result_2024-03-20 00:56:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44795221843003413, + "acc_stderr": 0.014532011498211669, + "acc_norm": 0.49829351535836175, + "acc_norm_stderr": 0.014611305705056983 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4629555865365465, + "acc_stderr": 0.004976067726432572, + "acc_norm": 0.6338378809002191, + "acc_norm_stderr": 0.004807699539973423 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7134502923976608, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.7134502923976608, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7281553398058253, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.7281553398058253, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7484035759897829, + "acc_stderr": 0.0155173223655296, + "acc_norm": 0.7484035759897829, + "acc_norm_stderr": 0.0155173223655296 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5659574468085107, + "acc_stderr": 0.03240038086792747, + "acc_norm": 0.5659574468085107, + "acc_norm_stderr": 0.03240038086792747 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.536144578313253, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.536144578313253, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6591639871382636, + "acc_stderr": 0.02692084126077616, + "acc_norm": 0.6591639871382636, + "acc_norm_stderr": 0.02692084126077616 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6322869955156951, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.6322869955156951, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6946564885496184, + "acc_stderr": 0.04039314978724562, + "acc_norm": 0.6946564885496184, + "acc_norm_stderr": 0.04039314978724562 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.8181818181818182, + "acc_stderr": 0.027479603010538787, + "acc_norm": 0.8181818181818182, + "acc_norm_stderr": 0.027479603010538787 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5655172413793104, + "acc_stderr": 0.04130740879555498, + "acc_norm": 0.5655172413793104, + "acc_norm_stderr": 0.04130740879555498 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062946, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062946 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.030388353551886786, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.030388353551886786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6230769230769231, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.6230769230769231, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720685, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720685 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.5024630541871922, + "acc_stderr": 0.03517945038691063, + "acc_norm": 0.5024630541871922, + "acc_norm_stderr": 0.03517945038691063 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.7096774193548387, + "acc_stderr": 0.025822106119415888, + "acc_norm": 0.7096774193548387, + "acc_norm_stderr": 0.025822106119415888 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8290598290598291, + "acc_stderr": 0.0246624968452098, + "acc_norm": 0.8290598290598291, + "acc_norm_stderr": 0.0246624968452098 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6075471698113207, + "acc_stderr": 0.03005258057955784, + "acc_norm": 0.6075471698113207, + "acc_norm_stderr": 0.03005258057955784 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.04554619617541053, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.04554619617541053 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.031157150869355568, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.031157150869355568 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4312169312169312, + "acc_stderr": 0.025506481698138215, + "acc_norm": 0.4312169312169312, + "acc_norm_stderr": 0.025506481698138215 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6319444444444444, + "acc_stderr": 0.040329990539607175, + "acc_norm": 0.6319444444444444, + "acc_norm_stderr": 0.040329990539607175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.8, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.8, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6473988439306358, + "acc_stderr": 0.025722802200895806, + "acc_norm": 0.6473988439306358, + "acc_norm_stderr": 0.025722802200895806 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5828220858895705, + "acc_stderr": 0.03874102859818083, + "acc_norm": 0.5828220858895705, + "acc_norm_stderr": 0.03874102859818083 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.654320987654321, + "acc_stderr": 0.02646248777700187, + "acc_norm": 0.654320987654321, + "acc_norm_stderr": 0.02646248777700187 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.8134715025906736, + "acc_stderr": 0.02811209121011747, + "acc_norm": 0.8134715025906736, + "acc_norm_stderr": 0.02811209121011747 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.5, + "acc_stderr": 0.047036043419179864, + "acc_norm": 0.5, + "acc_norm_stderr": 0.047036043419179864 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7596330275229358, + "acc_stderr": 0.01832060732096407, + "acc_norm": 0.7596330275229358, + "acc_norm_stderr": 0.01832060732096407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6437908496732027, + "acc_stderr": 0.027420477662629235, + "acc_norm": 0.6437908496732027, + "acc_norm_stderr": 0.027420477662629235 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6447368421052632, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.6447368421052632, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.020007912739359368, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.020007912739359368 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.02949482760014437, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.02949482760014437 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.03406315360711507, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.03406315360711507 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2223463687150838, + "acc_stderr": 0.013907189208156883, + "acc_norm": 0.2223463687150838, + "acc_norm_stderr": 0.013907189208156883 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5514705882352942, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.5514705882352942, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6489795918367347, + "acc_stderr": 0.03055531675557364, + "acc_norm": 0.6489795918367347, + "acc_norm_stderr": 0.03055531675557364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7974683544303798, + "acc_stderr": 0.026160568246601467, + "acc_norm": 0.7974683544303798, + "acc_norm_stderr": 0.026160568246601467 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44132985658409385, + "acc_stderr": 0.01268201633564668, + "acc_norm": 0.44132985658409385, + "acc_norm_stderr": 0.01268201633564668 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7352941176470589, + "acc_stderr": 0.03096451792692339, + "acc_norm": 0.7352941176470589, + "acc_norm_stderr": 0.03096451792692339 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7454545454545455, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.7454545454545455, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476197, + "mc2": 0.38752302764119984, + "mc2_stderr": 0.014726517317051517 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46871310507674147, + "acc_stderr": 0.017156666859785463, + "acc_norm": 0.4923258559622196, + "acc_norm_stderr": 0.01718832921965428 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/SOLAR-10.7B-v1.6", + "model_sha": "707029c955d824a87d142fc6afef27a197e480f1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/Yi-6b-dpo-v0.1/result_2023-12-05 03:43:34.json b/hyeogi/Yi-6b-dpo-v0.1/result_2023-12-05 03:43:34.json new file mode 100644 index 0000000000000000000000000000000000000000..e8a1455917626834ad619144ad8195be13a4c293 --- /dev/null +++ b/hyeogi/Yi-6b-dpo-v0.1/result_2023-12-05 03:43:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.013888816286782112, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398326 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3943437562238598, + "acc_stderr": 0.004877104939356235, + "acc_norm": 0.5223063134833699, + "acc_norm_stderr": 0.004984813391016212 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5440613026819924, + "acc_stderr": 0.01781040392543536, + "acc_norm": 0.5440613026819924, + "acc_norm_stderr": 0.01781040392543536 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.0348890161685273, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.0348890161685273 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.025339003010106522, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.025339003010106522 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438804, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438804 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173095, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173095 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.03807301726504513, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.03807301726504513 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.02386520683697259, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.02386520683697259 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.0387410285981808, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.0387410285981808 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6, + "acc_stderr": 0.021004201260420075, + "acc_norm": 0.6, + "acc_norm_stderr": 0.021004201260420075 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.041634530313028585, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.041634530313028585 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556044, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556044 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.019675808135281515, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.019675808135281515 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26927374301675977, + "acc_stderr": 0.014835616582882622, + "acc_norm": 0.26927374301675977, + "acc_norm_stderr": 0.014835616582882622 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4742647058823529, + "acc_stderr": 0.03033257809455504, + "acc_norm": 0.4742647058823529, + "acc_norm_stderr": 0.03033257809455504 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42448979591836733, + "acc_stderr": 0.03164209487942941, + "acc_norm": 0.42448979591836733, + "acc_norm_stderr": 0.03164209487942941 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3089960886571056, + "acc_stderr": 0.011801729777239249, + "acc_norm": 0.3089960886571056, + "acc_norm_stderr": 0.011801729777239249 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3598531211750306, + "mc1_stderr": 0.016801860466677126, + "mc2": 0.5402612898523886, + "mc2_stderr": 0.01538434298166149 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6033057851239669, + "acc_stderr": 0.016819438642971404, + "acc_norm": 0.6399055489964581, + "acc_norm_stderr": 0.01650368672044008 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/Yi-6b-dpo-v0.1", + "model_sha": "2e263aec3b4b3fa27baa420ce98448d4b3644632", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/Yi-6b-dpo-v0.2/result_2023-12-08 12:58:33.json b/hyeogi/Yi-6b-dpo-v0.2/result_2023-12-08 12:58:33.json new file mode 100644 index 0000000000000000000000000000000000000000..8623c94d57d2a1364168a50b622e30b90748183e --- /dev/null +++ b/hyeogi/Yi-6b-dpo-v0.2/result_2023-12-08 12:58:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35238907849829354, + "acc_stderr": 0.013960142600598682, + "acc_norm": 0.41723549488054607, + "acc_norm_stderr": 0.01440982551840308 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3968333001394145, + "acc_stderr": 0.004882410029935438, + "acc_norm": 0.5295757817167894, + "acc_norm_stderr": 0.004981044370530806 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5696040868454662, + "acc_stderr": 0.01770586877629239, + "acc_norm": 0.5696040868454662, + "acc_norm_stderr": 0.01770586877629239 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.031565646822367836, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.031565646822367836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223264, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016338, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016338 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5462184873949579, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.5462184873949579, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.02920254015343118, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.02920254015343118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602841997, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602841997 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.041614023984032786, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.041614023984032786 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.027807490044276198, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.027807490044276198 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860807, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6110091743119266, + "acc_stderr": 0.020902300887392866, + "acc_norm": 0.6110091743119266, + "acc_norm_stderr": 0.020902300887392866 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42320261437908496, + "acc_stderr": 0.019987809769482064, + "acc_norm": 0.42320261437908496, + "acc_norm_stderr": 0.019987809769482064 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281285, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281285 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475353, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475353 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5, + "acc_stderr": 0.030372836961539352, + "acc_norm": 0.5, + "acc_norm_stderr": 0.030372836961539352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3213820078226858, + "acc_stderr": 0.011927581352265076, + "acc_norm": 0.3213820078226858, + "acc_norm_stderr": 0.011927581352265076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.593939393939394, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.593939393939394, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3427172582619339, + "mc1_stderr": 0.016614949385347046, + "mc2": 0.5237635137263473, + "mc2_stderr": 0.015260079405506066 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6694214876033058, + "acc_stderr": 0.016173423298845694, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.0158405389325341 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/Yi-6b-dpo-v0.2", + "model_sha": "cfe3e81342b6bccf706170f85d0357f7017572fd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/Yi-6b-dpo-v0.3/result_2023-12-15 20:36:23.json b/hyeogi/Yi-6b-dpo-v0.3/result_2023-12-15 20:36:23.json new file mode 100644 index 0000000000000000000000000000000000000000..aeba9282d6842cad865757f62e3f26ec650668be --- /dev/null +++ b/hyeogi/Yi-6b-dpo-v0.3/result_2023-12-15 20:36:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37627986348122866, + "acc_stderr": 0.014157022555407163, + "acc_norm": 0.4249146757679181, + "acc_norm_stderr": 0.014445698968520769 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39464250149372637, + "acc_stderr": 0.004877748536428437, + "acc_norm": 0.5307707627962557, + "acc_norm_stderr": 0.0049803234000310795 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5312899106002554, + "acc_stderr": 0.017844918090468547, + "acc_norm": 0.5312899106002554, + "acc_norm_stderr": 0.017844918090468547 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237653, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768818, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768818 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.0344578996436275, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.0344578996436275 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.024180497164376886, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.024180497164376886 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.0387410285981808, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.0387410285981808 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.0278074900442762, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.0278074900442762 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5981651376146789, + "acc_stderr": 0.021020106172997006, + "acc_norm": 0.5981651376146789, + "acc_norm_stderr": 0.021020106172997006 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557836, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557836 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.02858034106513829, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.02858034106513829 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.01992211568278668, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.01992211568278668 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.028893955412115882, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.028893955412115882 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31731843575418994, + "acc_stderr": 0.01556639263005703, + "acc_norm": 0.31731843575418994, + "acc_norm_stderr": 0.01556639263005703 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.48161764705882354, + "acc_stderr": 0.03035230339535196, + "acc_norm": 0.48161764705882354, + "acc_norm_stderr": 0.03035230339535196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34224250325945244, + "acc_stderr": 0.01211793999870587, + "acc_norm": 0.34224250325945244, + "acc_norm_stderr": 0.01211793999870587 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35862913096695226, + "mc1_stderr": 0.016789289499502025, + "mc2": 0.5302526106032021, + "mc2_stderr": 0.01564565803995267 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6292798110979929, + "acc_stderr": 0.01660580128921261, + "acc_norm": 0.6646989374262101, + "acc_norm_stderr": 0.016230981232989813 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/Yi-6b-dpo-v0.3", + "model_sha": "590fef1d72c0bc3b406410739707b3247ede2cdb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/Yi-6b-dpo-v0.4/result_2023-12-31 07:40:09.json b/hyeogi/Yi-6b-dpo-v0.4/result_2023-12-31 07:40:09.json new file mode 100644 index 0000000000000000000000000000000000000000..94ec1215de4453174c3d51441abd0da7cdc7cb63 --- /dev/null +++ b/hyeogi/Yi-6b-dpo-v0.4/result_2023-12-31 07:40:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.01404910656495501, + "acc_norm": 0.4274744027303754, + "acc_norm_stderr": 0.014456862944650647 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39822744473212507, + "acc_stderr": 0.004885323175701674, + "acc_norm": 0.5345548695478988, + "acc_norm_stderr": 0.0049778511619043946 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5721583652618135, + "acc_stderr": 0.017692787927803728, + "acc_norm": 0.5721583652618135, + "acc_norm_stderr": 0.017692787927803728 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016337, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016337 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.025317649726448652, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.025317649726448652 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509568, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509568 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540632, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540632 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.03011821010694265, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.03011821010694265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6220183486238532, + "acc_stderr": 0.020789187066728117, + "acc_norm": 0.6220183486238532, + "acc_norm_stderr": 0.020789187066728117 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.02847293847803353, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.02847293847803353 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437538, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437538 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787317, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787317 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372435, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372435 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03004261583271486, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03004261583271486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32920469361147325, + "acc_stderr": 0.01200209166690231, + "acc_norm": 0.32920469361147325, + "acc_norm_stderr": 0.01200209166690231 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3659730722154223, + "mc1_stderr": 0.01686294168408836, + "mc2": 0.5390386024707369, + "mc2_stderr": 0.015472303689441313 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5820543093270366, + "acc_stderr": 0.01695729200527971, + "acc_norm": 0.6245572609208973, + "acc_norm_stderr": 0.01664841158951108 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/Yi-6b-dpo-v0.4", + "model_sha": "8a267a46b55cedb026233d7f41db9dd97dca2c36", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/Yi-6b-v0.3/result_2023-12-08 04:28:25.json b/hyeogi/Yi-6b-v0.3/result_2023-12-08 04:28:25.json new file mode 100644 index 0000000000000000000000000000000000000000..cc0bbabcfcfa82aebb2e73e67e62d0f5cae356e0 --- /dev/null +++ b/hyeogi/Yi-6b-v0.3/result_2023-12-08 04:28:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3583617747440273, + "acc_stderr": 0.014012883334859854, + "acc_norm": 0.4283276450511945, + "acc_norm_stderr": 0.014460496367599027 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3972316271659032, + "acc_stderr": 0.004883246579496662, + "acc_norm": 0.5323640709022107, + "acc_norm_stderr": 0.004979317515432522 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5504469987228607, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.5504469987228607, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.028333277109562793, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.028333277109562793 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03427308652999936, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03427308652999936 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752173, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752173 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954953, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954953 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653333, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.0282863240755644, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.0282863240755644 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5433962264150943, + "acc_stderr": 0.030656748696739435, + "acc_norm": 0.5433962264150943, + "acc_norm_stderr": 0.030656748696739435 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.041614023984032786, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.041614023984032786 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6458715596330276, + "acc_stderr": 0.020504729013829104, + "acc_norm": 0.6458715596330276, + "acc_norm_stderr": 0.020504729013829104 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.01984828016840116, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.01984828016840116 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.030320243265004137, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.030320243265004137 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.012014142101842974, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.012014142101842974 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398393, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398393 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.01532182168847619, + "mc2": 0.4048349906269079, + "mc2_stderr": 0.014771877117522413 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5690672963400236, + "acc_stderr": 0.017025558196043136, + "acc_norm": 0.602125147579693, + "acc_norm_stderr": 0.01682795905473339 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/Yi-6b-v0.3", + "model_sha": "754fd3466db3c4713f86ad61a0eabec2aeaa3c57", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/Yi-9b-v1/result_2024-01-11 06:28:42.json b/hyeogi/Yi-9b-v1/result_2024-01-11 06:28:42.json new file mode 100644 index 0000000000000000000000000000000000000000..3cf319b1fc325ad5168ddf67d6e4994723d6d895 --- /dev/null +++ b/hyeogi/Yi-9b-v1/result_2024-01-11 06:28:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3532423208191126, + "acc_stderr": 0.013967822714840053, + "acc_norm": 0.4035836177474403, + "acc_norm_stderr": 0.014337158914268438 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3895638319059948, + "acc_stderr": 0.00486654742235557, + "acc_norm": 0.5292770364469229, + "acc_norm_stderr": 0.004981220135882328 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5402298850574713, + "acc_stderr": 0.01782199409693354, + "acc_norm": 0.5402298850574713, + "acc_norm_stderr": 0.01782199409693354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.028333277109562783, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.028333277109562783 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547155, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931764, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931764 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986483, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.029872577708891176, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.029872577708891176 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473065, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596433, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596433 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723367, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723367 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.03889066619112722, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.03889066619112722 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.027820214158594363, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.027820214158594363 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.03555300319557669, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.03555300319557669 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6238532110091743, + "acc_stderr": 0.020769231968205078, + "acc_norm": 0.6238532110091743, + "acc_norm_stderr": 0.020769231968205078 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749234, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749234 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.04060127035236395, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.04060127035236395 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3937908496732026, + "acc_stderr": 0.01976621199107307, + "acc_norm": 0.3937908496732026, + "acc_norm_stderr": 0.01976621199107307 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3396648044692737, + "acc_stderr": 0.01583940040621249, + "acc_norm": 0.3396648044692737, + "acc_norm_stderr": 0.01583940040621249 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.03023375855159646, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.03023375855159646 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3226857887874837, + "acc_stderr": 0.011940264193195986, + "acc_norm": 0.3226857887874837, + "acc_norm_stderr": 0.011940264193195986 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35006119951040393, + "mc1_stderr": 0.016697949420151032, + "mc2": 0.5158756745116321, + "mc2_stderr": 0.015481536132196987 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5832349468713105, + "acc_stderr": 0.016950489146108815, + "acc_norm": 0.6328217237308147, + "acc_norm_stderr": 0.016572727807458606 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/Yi-9b-v1", + "model_sha": "c07e8b7a3279e1d786894b4b4553dc4126ced722", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/llama2-70b-v0.1/result_2023-12-05 09:36:11.json b/hyeogi/llama2-70b-v0.1/result_2023-12-05 09:36:11.json new file mode 100644 index 0000000000000000000000000000000000000000..2cc5396b8a96bd084ab85f71a99c25da656828bd --- /dev/null +++ b/hyeogi/llama2-70b-v0.1/result_2023-12-05 09:36:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35409556313993173, + "acc_stderr": 0.01397545412275656, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.014422181226303022 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37442740489942244, + "acc_stderr": 0.004829856058603586, + "acc_norm": 0.4869547898824935, + "acc_norm_stderr": 0.004988082825213275 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5044699872286079, + "acc_stderr": 0.017879248970584353, + "acc_norm": 0.5044699872286079, + "acc_norm_stderr": 0.017879248970584353 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.0424463323835323, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.0424463323835323 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.547085201793722, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.547085201793722, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.034961309720561266, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.034961309720561266 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087764, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087764 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.030351527323344948, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.030351527323344948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.039439666991836285, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.039439666991836285 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.024419234966819067, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.024419234966819067 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.02689029788130311, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.02689029788130311 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.02775653525734767, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.02775653525734767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5192660550458715, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.5192660550458715, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.02835895631342354, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.02835895631342354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.019861155193829163, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.019861155193829163 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650154, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650154 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261452, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261452 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824866, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824866 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.03189141832421396, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.03189141832421396 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36310299869621904, + "acc_stderr": 0.012282264406018758, + "acc_norm": 0.36310299869621904, + "acc_norm_stderr": 0.012282264406018758 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.03471157907953426, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.03471157907953426 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087307, + "mc2": 0.44642268659301343, + "mc2_stderr": 0.015149376929354377 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4309327036599764, + "acc_stderr": 0.017025558196043136, + "acc_norm": 0.4805194805194805, + "acc_norm_stderr": 0.017177301992342558 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/llama2-70b-v0.1", + "model_sha": "0b83d3a9260e4adfd644b52d593e8a93d6698aa0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/open-llama2-7b-dpo-v0.1/result_2023-12-16 20:16:49.json b/hyeogi/open-llama2-7b-dpo-v0.1/result_2023-12-16 20:16:49.json new file mode 100644 index 0000000000000000000000000000000000000000..93f1146eaadc395e13bcd2fc1f82b04add24d339 --- /dev/null +++ b/hyeogi/open-llama2-7b-dpo-v0.1/result_2023-12-16 20:16:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31143344709897613, + "acc_stderr": 0.013532472099850947, + "acc_norm": 0.3856655290102389, + "acc_norm_stderr": 0.01422425097325718 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3815972913762199, + "acc_stderr": 0.004847857546957469, + "acc_norm": 0.4929296952798247, + "acc_norm_stderr": 0.004989282516055395 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3269476372924649, + "acc_stderr": 0.016774908180131467, + "acc_norm": 0.3269476372924649, + "acc_norm_stderr": 0.016774908180131467 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.0355092018568963, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.0355092018568963 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2765273311897106, + "acc_stderr": 0.025403832978179604, + "acc_norm": 0.2765273311897106, + "acc_norm_stderr": 0.025403832978179604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416827, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416827 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.04142313771996664, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.04142313771996664 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.03154449888270285, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.03154449888270285 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.21379310344827587, + "acc_stderr": 0.03416520447747549, + "acc_norm": 0.21379310344827587, + "acc_norm_stderr": 0.03416520447747549 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.02788682807838057, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.02788682807838057 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2, + "acc_stderr": 0.020280805062535722, + "acc_norm": 0.2, + "acc_norm_stderr": 0.020280805062535722 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.047128212574267705, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.047128212574267705 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678243, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678243 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.025736542745594528, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.025736542745594528 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.33760683760683763, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.33760683760683763, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.027611163402399715, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.027611163402399715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.02564410863926762, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.02564410863926762 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.19653179190751446, + "acc_stderr": 0.030299574664788147, + "acc_norm": 0.19653179190751446, + "acc_norm_stderr": 0.030299574664788147 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.02218203720294837, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.02218203720294837 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.024257901705323374, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.024257901705323374 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292405, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292405 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.024836057868294674, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.024836057868294674 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26972477064220185, + "acc_stderr": 0.01902848671111544, + "acc_norm": 0.26972477064220185, + "acc_norm_stderr": 0.01902848671111544 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047182, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047182 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242557, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242557 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312337, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312337 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26633986928104575, + "acc_stderr": 0.0178831881346672, + "acc_norm": 0.26633986928104575, + "acc_norm_stderr": 0.0178831881346672 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460983, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460983 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.027467401804058014, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.027467401804058014 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.01493131670322051, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.01493131670322051 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403325, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403325 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.027257202606114948, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.027257202606114948 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.028263889943784606, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.028263889943784606 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3206751054852321, + "acc_stderr": 0.03038193194999041, + "acc_norm": 0.3206751054852321, + "acc_norm_stderr": 0.03038193194999041 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24445893089960888, + "acc_stderr": 0.010976425013113893, + "acc_norm": 0.24445893089960888, + "acc_norm_stderr": 0.010976425013113893 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693247, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693247 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511784, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511784 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33414932680538556, + "mc1_stderr": 0.016512530677150517, + "mc2": 0.5147595886551624, + "mc2_stderr": 0.015288691882001125 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30342384887839435, + "acc_stderr": 0.01580607271790957, + "acc_norm": 0.4002361275088548, + "acc_norm_stderr": 0.01684469351050505 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/open-llama2-7b-dpo-v0.1", + "model_sha": "54f59971e5f2a15c7cd8baff05b20e1c469283f9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyeogi/open-llama2-7b-v0.1/result_2023-12-15 23:35:02.json b/hyeogi/open-llama2-7b-v0.1/result_2023-12-15 23:35:02.json new file mode 100644 index 0000000000000000000000000000000000000000..4de78176351df0f6d458d9587ee57c32b1756cb4 --- /dev/null +++ b/hyeogi/open-llama2-7b-v0.1/result_2023-12-15 23:35:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3242320819112628, + "acc_stderr": 0.013678810399518822, + "acc_norm": 0.3967576791808874, + "acc_norm_stderr": 0.014296513020180644 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3833897629954192, + "acc_stderr": 0.00485218262127426, + "acc_norm": 0.5002987452698665, + "acc_norm_stderr": 0.004989780520782245 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245231, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245231 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3116219667943806, + "acc_stderr": 0.016562433867284176, + "acc_norm": 0.3116219667943806, + "acc_norm_stderr": 0.016562433867284176 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.04049122041702506, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.04049122041702506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542124, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542124 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071855, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071855 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2861736334405145, + "acc_stderr": 0.025670259242188957, + "acc_norm": 0.2861736334405145, + "acc_norm_stderr": 0.025670259242188957 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.03318833286217282, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.03318833286217282 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.041423137719966634, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.041423137719966634 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.03154449888270286, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.03154449888270286 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380572, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380572 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23846153846153847, + "acc_stderr": 0.021606294494647727, + "acc_norm": 0.23846153846153847, + "acc_norm_stderr": 0.021606294494647727 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.030712730070982592, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.030712730070982592 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.29354838709677417, + "acc_stderr": 0.0259060870213193, + "acc_norm": 0.29354838709677417, + "acc_norm_stderr": 0.0259060870213193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32905982905982906, + "acc_stderr": 0.03078232157768816, + "acc_norm": 0.32905982905982906, + "acc_norm_stderr": 0.03078232157768816 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.28679245283018867, + "acc_stderr": 0.027834912527544074, + "acc_norm": 0.28679245283018867, + "acc_norm_stderr": 0.027834912527544074 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302506, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302506 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173042, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173042 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643895, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643895 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.024105712607754307, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.024105712607754307 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.025842248700902168, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.025842248700902168 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147601, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147601 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25321100917431194, + "acc_stderr": 0.018644073041375043, + "acc_norm": 0.25321100917431194, + "acc_norm_stderr": 0.018644073041375043 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242494, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242494 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.025160998214292456, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.025160998214292456 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.33884297520661155, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882924, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.018718067052623216, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.018718067052623216 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.024398192986654924, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.024398192986654924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.31020408163265306, + "acc_stderr": 0.029613459872484378, + "acc_norm": 0.31020408163265306, + "acc_norm_stderr": 0.029613459872484378 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.32489451476793246, + "acc_stderr": 0.030486039389105313, + "acc_norm": 0.32489451476793246, + "acc_norm_stderr": 0.030486039389105313 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.01098630787004552, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.01098630787004552 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.03317505930009179, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.03317505930009179 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.014948812679062137, + "mc2": 0.4045967942290401, + "mc2_stderr": 0.014756891632525398 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29988193624557263, + "acc_stderr": 0.01575344761542946, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.01705775370216028 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyeogi/open-llama2-7b-v0.1", + "model_sha": "0f2714d91a830c5a89ba9f54ed4cc8ba147fafb1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyokwan/hkcode-solar-youtube-merged/result_2024-06-06 06:07:10.json b/hyokwan/hkcode-solar-youtube-merged/result_2024-06-06 06:07:10.json new file mode 100644 index 0000000000000000000000000000000000000000..d249dd78a4c2d630629ffd55b31d5d55b1f279a6 --- /dev/null +++ b/hyokwan/hkcode-solar-youtube-merged/result_2024-06-06 06:07:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910467, + "acc_norm": 0.44283276450511944, + "acc_norm_stderr": 0.014515573873348888 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3932483569010157, + "acc_stderr": 0.004874728756528209, + "acc_norm": 0.5068711412069309, + "acc_norm_stderr": 0.004989310228276122 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5491698595146871, + "acc_stderr": 0.017793297572699027, + "acc_norm": 0.5491698595146871, + "acc_norm_stderr": 0.017793297572699027 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542125, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542125 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108102, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305693, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305693 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.043171711948702556, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.043171711948702556 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6414141414141414, + "acc_stderr": 0.034169036403915214, + "acc_norm": 0.6414141414141414, + "acc_norm_stderr": 0.034169036403915214 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954953, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954953 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356462, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356462 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33004926108374383, + "acc_stderr": 0.03308530426228258, + "acc_norm": 0.33004926108374383, + "acc_norm_stderr": 0.03308530426228258 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389174, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389174 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752042, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752042 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5635838150289018, + "acc_stderr": 0.026700545424943684, + "acc_norm": 0.5635838150289018, + "acc_norm_stderr": 0.026700545424943684 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.035260770955482405, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.035260770955482405 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5981651376146789, + "acc_stderr": 0.02102010617299701, + "acc_norm": 0.5981651376146789, + "acc_norm_stderr": 0.02102010617299701 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.04463112720677172, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.04463112720677172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.028614624752805434, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.028614624752805434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.020062874243539128, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.020062874243539128 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.033622774366080424, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.033622774366080424 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.32849162011173183, + "acc_stderr": 0.015707935398496457, + "acc_norm": 0.32849162011173183, + "acc_norm_stderr": 0.015707935398496457 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6040816326530613, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.6040816326530613, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6835443037974683, + "acc_stderr": 0.030274974880218977, + "acc_norm": 0.6835443037974683, + "acc_norm_stderr": 0.030274974880218977 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.01242554841630295, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.01242554841630295 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.03465868196380761, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.03465868196380761 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.0157853708583967, + "mc2": 0.45979474803002673, + "mc2_stderr": 0.016387035242144212 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5206611570247934, + "acc_stderr": 0.01717567127983645, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.01717567127983645 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyokwan/hkcode-solar-youtube-merged", + "model_sha": "6a42277fa95e23dc86131739b9598844d18d5e38", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyokwan/hkcode_llama3_8b/result_2024-06-25 13:20:48.json b/hyokwan/hkcode_llama3_8b/result_2024-06-25 13:20:48.json new file mode 100644 index 0000000000000000000000000000000000000000..f0bfa9f63fdeed9c772f55d59faaba1abc8dae12 --- /dev/null +++ b/hyokwan/hkcode_llama3_8b/result_2024-06-25 13:20:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32081911262798635, + "acc_stderr": 0.01364094309194653, + "acc_norm": 0.3771331058020478, + "acc_norm_stderr": 0.014163366896192582 + }, + "harness|ko_hellaswag|10": { + "acc": 0.335291774546903, + "acc_stderr": 0.00471127540813841, + "acc_norm": 0.41585341565425216, + "acc_norm_stderr": 0.004918612098944034 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39208173690932313, + "acc_stderr": 0.017458524050147643, + "acc_norm": 0.39208173690932313, + "acc_norm_stderr": 0.017458524050147643 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984548, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984548 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.43434343434343436, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.43434343434343436, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5655172413793104, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.5655172413793104, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714506, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714506 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978813, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978813 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.03502544650845872, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.03502544650845872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5032258064516129, + "acc_stderr": 0.02844341422643831, + "acc_norm": 0.5032258064516129, + "acc_norm_stderr": 0.02844341422643831 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618554, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618554 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066475, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066475 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.02513809138885111, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.02513809138885111 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.02681771813034892, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.02681771813034892 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.03814269893261835, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.03814269893261835 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5192660550458715, + "acc_stderr": 0.021421402982548892, + "acc_norm": 0.5192660550458715, + "acc_norm_stderr": 0.021421402982548892 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.04060127035236397, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.04060127035236397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3545751633986928, + "acc_stderr": 0.019353360547553704, + "acc_norm": 0.3545751633986928, + "acc_norm_stderr": 0.019353360547553704 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.02746470844202213, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.02746470844202213 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696044, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2849162011173184, + "acc_stderr": 0.015096222302469802, + "acc_norm": 0.2849162011173184, + "acc_norm_stderr": 0.015096222302469802 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.01197150729498278, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.01197150729498278 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3072215422276622, + "mc1_stderr": 0.016150201321323006, + "mc2": 0.48425032270827056, + "mc2_stderr": 0.016273013374347334 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.448642266824085, + "acc_stderr": 0.017099430514725785, + "acc_norm": 0.47461629279811096, + "acc_norm_stderr": 0.01716818720142925 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyokwan/hkcode_llama3_8b", + "model_sha": "76e057d17da4a4ac6f6157b8f3749bcfb706ce8a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyokwan/hkcode_solar_10.7b/result_2024-06-25 13:18:18.json b/hyokwan/hkcode_solar_10.7b/result_2024-06-25 13:18:18.json new file mode 100644 index 0000000000000000000000000000000000000000..d0b076d1b205f222d529a246bec853656a624355 --- /dev/null +++ b/hyokwan/hkcode_solar_10.7b/result_2024-06-25 13:18:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34982935153583616, + "acc_stderr": 0.013936809212158298, + "acc_norm": 0.40187713310580203, + "acc_norm_stderr": 0.014327268614578276 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3805018920533758, + "acc_stderr": 0.004845180034271624, + "acc_norm": 0.48297151961760604, + "acc_norm_stderr": 0.004986886806565646 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219295, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219295 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5466155810983397, + "acc_stderr": 0.01780208713585031, + "acc_norm": 0.5466155810983397, + "acc_norm_stderr": 0.01780208713585031 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.032662042990646775, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.032662042990646775 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.028320325830105908, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.028320325830105908 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016338, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016338 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5630252100840336, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.5630252100840336, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5096774193548387, + "acc_stderr": 0.028438677998909558, + "acc_norm": 0.5096774193548387, + "acc_norm_stderr": 0.028438677998909558 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749465, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749465 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228402, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228402 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.024594975128920945, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.024594975128920945 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.026720034380514995, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.026720034380514995 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5467889908256881, + "acc_stderr": 0.021343255165546037, + "acc_norm": 0.5467889908256881, + "acc_norm_stderr": 0.021343255165546037 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805413, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4297385620915033, + "acc_stderr": 0.020027122784928533, + "acc_norm": 0.4297385620915033, + "acc_norm_stderr": 0.020027122784928533 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19329608938547485, + "acc_stderr": 0.013206868561343232, + "acc_norm": 0.19329608938547485, + "acc_norm_stderr": 0.013206868561343232 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776125, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776125 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.03164209487942942, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.03164209487942942 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3833116036505867, + "acc_stderr": 0.012417603662901188, + "acc_norm": 0.3833116036505867, + "acc_norm_stderr": 0.012417603662901188 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253597, + "mc2": 0.43828541660153714, + "mc2_stderr": 0.01634151225389109 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4592680047225502, + "acc_stderr": 0.017133218276537673, + "acc_norm": 0.46871310507674147, + "acc_norm_stderr": 0.017156666859785463 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyokwan/hkcode_solar_10.7b", + "model_sha": "e43d3af9ef3ce5c77db91288dac6fb1f15987f4b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyunjae/polyglot-ko-3.8b-total/result_2024-01-30 08:02:48.json b/hyunjae/polyglot-ko-3.8b-total/result_2024-01-30 08:02:48.json new file mode 100644 index 0000000000000000000000000000000000000000..dfb7c1b35881d26229adfa6dc4de2df700854aff --- /dev/null +++ b/hyunjae/polyglot-ko-3.8b-total/result_2024-01-30 08:02:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2764505119453925, + "acc_stderr": 0.013069662474252428, + "acc_norm": 0.3438566552901024, + "acc_norm_stderr": 0.013880644570156224 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37661820354511055, + "acc_stderr": 0.0048354759576109295, + "acc_norm": 0.4778928500298745, + "acc_norm_stderr": 0.0049849017528463945 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3300970873786408, + "acc_stderr": 0.04656147110012351, + "acc_norm": 0.3300970873786408, + "acc_norm_stderr": 0.04656147110012351 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2822477650063857, + "acc_stderr": 0.01609530296987853, + "acc_norm": 0.2822477650063857, + "acc_norm_stderr": 0.01609530296987853 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386705, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386705 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.273542600896861, + "acc_stderr": 0.029918586707798834, + "acc_norm": 0.273542600896861, + "acc_norm_stderr": 0.029918586707798834 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.37404580152671757, + "acc_stderr": 0.042438692422305246, + "acc_norm": 0.37404580152671757, + "acc_norm_stderr": 0.042438692422305246 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3686868686868687, + "acc_stderr": 0.034373055019806184, + "acc_norm": 0.3686868686868687, + "acc_norm_stderr": 0.034373055019806184 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.03878352372138623, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.03878352372138623 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3277310924369748, + "acc_stderr": 0.03048991141767323, + "acc_norm": 0.3277310924369748, + "acc_norm_stderr": 0.03048991141767323 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.024321738484602354, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.024321738484602354 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3419354838709677, + "acc_stderr": 0.02698528957655274, + "acc_norm": 0.3419354838709677, + "acc_norm_stderr": 0.02698528957655274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.03222414045241106, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.03222414045241106 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4228855721393035, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.4228855721393035, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267437, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267437 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02256989707491841, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02256989707491841 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548594, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548594 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.025305258131879716, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.025305258131879716 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32098765432098764, + "acc_stderr": 0.025976566010862744, + "acc_norm": 0.32098765432098764, + "acc_norm_stderr": 0.025976566010862744 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38341968911917096, + "acc_stderr": 0.03508984236295342, + "acc_norm": 0.38341968911917096, + "acc_norm_stderr": 0.03508984236295342 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.344954128440367, + "acc_stderr": 0.020380605405066962, + "acc_norm": 0.344954128440367, + "acc_norm_stderr": 0.020380605405066962 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790607, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790607 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283683, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283683 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2892561983471074, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998904, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2670391061452514, + "acc_stderr": 0.014796502622562557, + "acc_norm": 0.2670391061452514, + "acc_norm_stderr": 0.014796502622562557 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776125, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776125 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4092827004219409, + "acc_stderr": 0.032007041833595914, + "acc_norm": 0.4092827004219409, + "acc_norm_stderr": 0.032007041833595914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2770534550195567, + "acc_stderr": 0.011430462443719674, + "acc_norm": 0.2770534550195567, + "acc_norm_stderr": 0.011430462443719674 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.037937131711656344, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.037937131711656344 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283344, + "mc2": 0.4187113040924125, + "mc2_stderr": 0.015493444513670155 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3364817001180638, + "acc_stderr": 0.01624508529438656, + "acc_norm": 0.44155844155844154, + "acc_norm_stderr": 0.0170725258755631 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyunjae/polyglot-ko-3.8b-total", + "model_sha": "25414de9f517d2e821fd39f04586f77994fdc92c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyunseoki/ko-en-llama2-13b/result_2023-10-02 15:05:47.json b/hyunseoki/ko-en-llama2-13b/result_2023-10-02 15:05:47.json new file mode 100644 index 0000000000000000000000000000000000000000..b86f5e56b299d60e1c277a5726a69607e0633354 --- /dev/null +++ b/hyunseoki/ko-en-llama2-13b/result_2023-10-02 15:05:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.01407722310847014, + "acc_norm": 0.42150170648464164, + "acc_norm_stderr": 0.014430197069326021 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4063931487751444, + "acc_stderr": 0.004901558132335531, + "acc_norm": 0.5423222465644294, + "acc_norm_stderr": 0.004971874159777693 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.017869330154003705, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.017869330154003705 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236784, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236784 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.035578062450873145, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.035578062450873145 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149351, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149351 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.02483881198803315, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02483881198803315 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.028071588901091852, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.028071588901091852 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145647, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145647 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.02656417811142262, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.02656417811142262 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46972477064220186, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.46972477064220186, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.0403356566784832, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.0403356566784832 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.018550634502952964, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.018550634502952964 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169938, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169938 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.02667925227010311, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.02667925227010311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.47257383966244726, + "acc_stderr": 0.03249822718301303, + "acc_norm": 0.47257383966244726, + "acc_norm_stderr": 0.03249822718301303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3005215123859192, + "acc_stderr": 0.011709918883039119, + "acc_norm": 0.3005215123859192, + "acc_norm_stderr": 0.011709918883039119 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.40740955216969593, + "mc2_stderr": 0.01489940591651966 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4557260920897285, + "acc_stderr": 0.017122829143292648, + "acc_norm": 0.5194805194805194, + "acc_norm_stderr": 0.017177301992342544 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyunseoki/ko-en-llama2-13b", + "model_sha": "2768cf6f955b65868ccbb20658e2cc444b2f3be9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyunseoki/ko-ref-llama2-13b/result_2023-10-04 08:17:01.json b/hyunseoki/ko-ref-llama2-13b/result_2023-10-04 08:17:01.json new file mode 100644 index 0000000000000000000000000000000000000000..f35699e65cd1bb3a181457205a6a21bd1e03570e --- /dev/null +++ b/hyunseoki/ko-ref-llama2-13b/result_2023-10-04 08:17:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37457337883959047, + "acc_stderr": 0.014144193471893456, + "acc_norm": 0.43600682593856654, + "acc_norm_stderr": 0.014491225699230916 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3990240987851026, + "acc_stderr": 0.004886969266944274, + "acc_norm": 0.5257916749651463, + "acc_norm_stderr": 0.00498313847960438 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260595, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260595 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.40229885057471265, + "acc_stderr": 0.01753529452906895, + "acc_norm": 0.40229885057471265, + "acc_norm_stderr": 0.01753529452906895 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231008, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231008 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553026, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553026 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3440514469453376, + "acc_stderr": 0.026981478043648022, + "acc_norm": 0.3440514469453376, + "acc_norm_stderr": 0.026981478043648022 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677697, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677697 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3282828282828283, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.3282828282828283, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378948, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378948 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.029597329730978103, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.029597329730978103 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.022556551010132354, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.022556551010132354 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.046166311118017125, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.046166311118017125 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.03178529710642749, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.03178529710642749 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.41452991452991456, + "acc_stderr": 0.03227396567623778, + "acc_norm": 0.41452991452991456, + "acc_norm_stderr": 0.03227396567623778 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.028049186315695238, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.028049186315695238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04265792110940588, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04265792110940588 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.373134328358209, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.373134328358209, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463084, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463084 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.025722802200895803, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.025722802200895803 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.026041766202717163, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.026041766202717163 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.03712454853721368, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.03712454853721368 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.326605504587156, + "acc_stderr": 0.020106990889937306, + "acc_norm": 0.326605504587156, + "acc_norm_stderr": 0.020106990889937306 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.11904761904761904, + "acc_stderr": 0.028965535858562975, + "acc_norm": 0.11904761904761904, + "acc_norm_stderr": 0.028965535858562975 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.026336613469046626, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.026336613469046626 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4380165289256198, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998905, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998905 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29248366013071897, + "acc_stderr": 0.018403415710109783, + "acc_norm": 0.29248366013071897, + "acc_norm_stderr": 0.018403415710109783 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755808, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755808 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.028765111718046937, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.028765111718046937 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2897959183673469, + "acc_stderr": 0.02904308868330433, + "acc_norm": 0.2897959183673469, + "acc_norm_stderr": 0.02904308868330433 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3881856540084388, + "acc_stderr": 0.03172295004332329, + "acc_norm": 0.3881856540084388, + "acc_norm_stderr": 0.03172295004332329 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2985658409387223, + "acc_stderr": 0.011688060141794228, + "acc_norm": 0.2985658409387223, + "acc_norm_stderr": 0.011688060141794228 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03308611113236436, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03308611113236436 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.037818873532059816, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.037818873532059816 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707679, + "mc2": 0.4089327594647445, + "mc2_stderr": 0.01512159542972759 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.34710743801652894, + "acc_stderr": 0.016366945603281276, + "acc_norm": 0.4498229043683589, + "acc_norm_stderr": 0.017103573343825715 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyunseoki/ko-ref-llama2-13b", + "model_sha": "c5d09631c88ab5012b48187ecd90ae773cd4bbd9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/hyunseoki/ko-ref-llama2-7b/result_2023-10-04 08:18:22.json b/hyunseoki/ko-ref-llama2-7b/result_2023-10-04 08:18:22.json new file mode 100644 index 0000000000000000000000000000000000000000..4dab19767e32f21b991084a1cfef8b87fce4533d --- /dev/null +++ b/hyunseoki/ko-ref-llama2-7b/result_2023-10-04 08:18:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33447098976109213, + "acc_stderr": 0.013787460322441387, + "acc_norm": 0.3848122866894198, + "acc_norm_stderr": 0.0142183710652511 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3836885082652858, + "acc_stderr": 0.0048528966817367606, + "acc_norm": 0.4970125473013344, + "acc_norm_stderr": 0.004989692344313999 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457922, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457922 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.31417624521072796, + "acc_stderr": 0.01659929173588491, + "acc_norm": 0.31417624521072796, + "acc_norm_stderr": 0.01659929173588491 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370519, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370519 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818784, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818784 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.030898610882477515, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.030898610882477515 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.03258630383836554, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.03258630383836554 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993179, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993179 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.030176808288974337, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.030176808288974337 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.02136202772522273, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.02136202772522273 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.02652270967466777, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.02652270967466777 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.031937057262002924, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.031937057262002924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.02560423347089909, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.02560423347089909 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31840796019900497, + "acc_stderr": 0.032941184790540944, + "acc_norm": 0.31840796019900497, + "acc_norm_stderr": 0.032941184790540944 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047876, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047876 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432563, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432563 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30346820809248554, + "acc_stderr": 0.02475241196091721, + "acc_norm": 0.30346820809248554, + "acc_norm_stderr": 0.02475241196091721 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.025773111169630443, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.025773111169630443 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.038351539543994194, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.038351539543994194 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24403669724770644, + "acc_stderr": 0.018415286351416416, + "acc_norm": 0.24403669724770644, + "acc_norm_stderr": 0.018415286351416416 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03718489006818114, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03718489006818114 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.02649303322514589, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.02649303322514589 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316091, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316091 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.017776947157528037, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.017776947157528037 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.030546745264953185, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.030546745264953185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16176470588235295, + "acc_stderr": 0.02236867256288675, + "acc_norm": 0.16176470588235295, + "acc_norm_stderr": 0.02236867256288675 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2620599739243807, + "acc_stderr": 0.011231552795890394, + "acc_norm": 0.2620599739243807, + "acc_norm_stderr": 0.011231552795890394 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.3953129040998704, + "mc2_stderr": 0.015062425593708578 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29279811097992914, + "acc_stderr": 0.015644823205401337, + "acc_norm": 0.4037780401416765, + "acc_norm_stderr": 0.016869031540298632 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "hyunseoki/ko-ref-llama2-7b", + "model_sha": "1ee08c79ae7393473754b77e82b1472ef63d5dd2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/iRASC/BioLlama-Ko-8B/result_2024-06-06 07:29:54.json b/iRASC/BioLlama-Ko-8B/result_2024-06-06 07:29:54.json new file mode 100644 index 0000000000000000000000000000000000000000..4e6fd36752fb785dc0583e0a46a030999ade5707 --- /dev/null +++ b/iRASC/BioLlama-Ko-8B/result_2024-06-06 07:29:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42662116040955633, + "acc_stderr": 0.014453185592920293, + "acc_norm": 0.4726962457337884, + "acc_norm_stderr": 0.014589589101985994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42690699063931487, + "acc_stderr": 0.004936176784631954, + "acc_norm": 0.566620195180243, + "acc_norm_stderr": 0.0049452912700724315 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6842105263157895, + "acc_stderr": 0.03565079670708311, + "acc_norm": 0.6842105263157895, + "acc_norm_stderr": 0.03565079670708311 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6015325670498084, + "acc_stderr": 0.01750743860277742, + "acc_norm": 0.6015325670498084, + "acc_norm_stderr": 0.01750743860277742 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5531914893617021, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.5531914893617021, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6012861736334405, + "acc_stderr": 0.027809322585774496, + "acc_norm": 0.6012861736334405, + "acc_norm_stderr": 0.027809322585774496 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644559, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644559 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6818181818181818, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.6818181818181818, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5862068965517241, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.5862068965517241, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5798319327731093, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.5798319327731093, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5717948717948718, + "acc_stderr": 0.025088301454694827, + "acc_norm": 0.5717948717948718, + "acc_norm_stderr": 0.025088301454694827 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5741935483870968, + "acc_stderr": 0.028129112709165908, + "acc_norm": 0.5741935483870968, + "acc_norm_stderr": 0.028129112709165908 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417607, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5886792452830188, + "acc_stderr": 0.03028500925900978, + "acc_norm": 0.5886792452830188, + "acc_norm_stderr": 0.03028500925900978 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.04582004841505417, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.04582004841505417 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3814814814814815, + "acc_stderr": 0.029616718927497593, + "acc_norm": 0.3814814814814815, + "acc_norm_stderr": 0.029616718927497593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681681, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681681 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.03794012674697028, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.03794012674697028 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752052, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752052 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.041633319989322626, + "acc_norm": 0.78, + "acc_norm_stderr": 0.041633319989322626 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5679012345679012, + "acc_stderr": 0.027563010971606672, + "acc_norm": 0.5679012345679012, + "acc_norm_stderr": 0.027563010971606672 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6269430051813472, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.6269430051813472, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583703, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583703 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6403669724770642, + "acc_stderr": 0.020575234660123783, + "acc_norm": 0.6403669724770642, + "acc_norm_stderr": 0.020575234660123783 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6078431372549019, + "acc_stderr": 0.02795604616542451, + "acc_norm": 0.6078431372549019, + "acc_norm_stderr": 0.02795604616542451 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6118421052631579, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.6118421052631579, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.020220920829626916, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.020220920829626916 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347243, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347243 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5178571428571429, + "acc_stderr": 0.04742762361243011, + "acc_norm": 0.5178571428571429, + "acc_norm_stderr": 0.04742762361243011 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.03406315360711507, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.03406315360711507 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.01448750085285043, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.01448750085285043 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252611, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252611 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.46691176470588236, + "acc_stderr": 0.030306257722468317, + "acc_norm": 0.46691176470588236, + "acc_norm_stderr": 0.030306257722468317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.030862144921087572, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.030862144921087572 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6962025316455697, + "acc_stderr": 0.029936696387138608, + "acc_norm": 0.6962025316455697, + "acc_norm_stderr": 0.029936696387138608 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37614080834419816, + "acc_stderr": 0.012372214430599817, + "acc_norm": 0.37614080834419816, + "acc_norm_stderr": 0.012372214430599817 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6127450980392157, + "acc_stderr": 0.03418931233833344, + "acc_norm": 0.6127450980392157, + "acc_norm_stderr": 0.03418931233833344 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7333333333333333, + "acc_stderr": 0.03453131801885416, + "acc_norm": 0.7333333333333333, + "acc_norm_stderr": 0.03453131801885416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.46407926473271655, + "mc2_stderr": 0.015697311993787478 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.5100354191263282, + "acc_norm_stderr": 0.017186891286894053 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "iRASC/BioLlama-Ko-8B", + "model_sha": "5dfe497fe39c03db721f94247a4210139501b12b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ibivibiv/llama-3-nectar-dpo-8B/result_2024-07-02 14:15:33.json b/ibivibiv/llama-3-nectar-dpo-8B/result_2024-07-02 14:15:33.json new file mode 100644 index 0000000000000000000000000000000000000000..56bae8c72a6c6510880a9b6743646568a37a4150 --- /dev/null +++ b/ibivibiv/llama-3-nectar-dpo-8B/result_2024-07-02 14:15:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3771331058020478, + "acc_stderr": 0.014163366896192598, + "acc_norm": 0.43600682593856654, + "acc_norm_stderr": 0.01449122569923092 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3594901414060944, + "acc_stderr": 0.004788703173474754, + "acc_norm": 0.46723760207130055, + "acc_norm_stderr": 0.004979058078478702 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.017784034534992457, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.017784034534992457 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4978723404255319, + "acc_stderr": 0.032685726586674915, + "acc_norm": 0.4978723404255319, + "acc_norm_stderr": 0.032685726586674915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.04122737111370333, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.04122737111370333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207761, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207761 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.02535100632816969, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02535100632816969 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.02812096650391439, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.02812096650391439 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.0291857149498574, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.0291857149498574 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.037786210790920545, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.037786210790920545 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.025075981767601677, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.025075981767601677 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5216049382716049, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.5216049382716049, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008585, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008585 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.04598188057816542, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.04598188057816542 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5724770642201835, + "acc_stderr": 0.021210910204300434, + "acc_norm": 0.5724770642201835, + "acc_norm_stderr": 0.021210910204300434 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.028614624752805434, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.028614624752805434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.040516463428741406, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.040516463428741406 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.020102583895887184, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.020102583895887184 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.03338473403207401, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.03338473403207401 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3016759776536313, + "acc_stderr": 0.015350767572220285, + "acc_norm": 0.3016759776536313, + "acc_norm_stderr": 0.015350767572220285 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.03093285879278986, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.03093285879278986 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3670143415906128, + "acc_stderr": 0.01231026424484213, + "acc_norm": 0.3670143415906128, + "acc_norm_stderr": 0.01231026424484213 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.03495624522015476, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.03495624522015476 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.03713158067481913, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.03713158067481913 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559696, + "mc2": 0.4862562047183603, + "mc2_stderr": 0.015878773208401586 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.017182864434998564, + "acc_norm": 0.5053128689492326, + "acc_norm_stderr": 0.01718938362722971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ibivibiv/llama-3-nectar-dpo-8B", + "model_sha": "55b6eda126756b92fe5cbd09ca2a8ce245e4f491", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ifuseok/ft-solar-10.7b-v2.1-dpo/result_2024-01-15 08:08:39.json b/ifuseok/ft-solar-10.7b-v2.1-dpo/result_2024-01-15 08:08:39.json new file mode 100644 index 0000000000000000000000000000000000000000..119939e7fdaaca8f126dc3a85782c4fc24def44a --- /dev/null +++ b/ifuseok/ft-solar-10.7b-v2.1-dpo/result_2024-01-15 08:08:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4257679180887372, + "acc_stderr": 0.014449464278868807, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.01457558392201967 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4441346345349532, + "acc_stderr": 0.0049585379889935804, + "acc_norm": 0.5836486755626369, + "acc_norm_stderr": 0.004919457850104255 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6549707602339181, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.6549707602339181, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.01726860756000579, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.01726860756000579 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936337, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866767, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866767 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6045016077170418, + "acc_stderr": 0.027770918531427838, + "acc_norm": 0.6045016077170418, + "acc_norm_stderr": 0.027770918531427838 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.042764865428145914, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.042764865428145914 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6767676767676768, + "acc_stderr": 0.033322999210706444, + "acc_norm": 0.6767676767676768, + "acc_norm_stderr": 0.033322999210706444 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6050420168067226, + "acc_stderr": 0.031753678460966245, + "acc_norm": 0.6050420168067226, + "acc_norm_stderr": 0.031753678460966245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.025339003010106532, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.025339003010106532 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.034524539038220406, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.034524539038220406 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5580645161290323, + "acc_stderr": 0.028251557906849745, + "acc_norm": 0.5580645161290323, + "acc_norm_stderr": 0.028251557906849745 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.027046857630716663, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.027046857630716663 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5547169811320755, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.5547169811320755, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.025305906241590636, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.025305906241590636 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5347222222222222, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.5347222222222222, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.026636539741116082, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.026636539741116082 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6358024691358025, + "acc_stderr": 0.026774929899722334, + "acc_norm": 0.6358024691358025, + "acc_norm_stderr": 0.026774929899722334 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7098445595854922, + "acc_stderr": 0.03275264467791516, + "acc_norm": 0.7098445595854922, + "acc_norm_stderr": 0.03275264467791516 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594964, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594964 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.671559633027523, + "acc_stderr": 0.020135902797298398, + "acc_norm": 0.671559633027523, + "acc_norm_stderr": 0.020135902797298398 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.565359477124183, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.565359477124183, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884124, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.040179012759817494, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.040179012759817494 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.02022394600507431, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.02022394600507431 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210752, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210752 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.49264705882352944, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.49264705882352944, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7215189873417721, + "acc_stderr": 0.029178682304842538, + "acc_norm": 0.7215189873417721, + "acc_norm_stderr": 0.029178682304842538 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.363754889178618, + "acc_stderr": 0.012286991879902889, + "acc_norm": 0.363754889178618, + "acc_norm_stderr": 0.012286991879902889 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6813725490196079, + "acc_stderr": 0.03270287181482082, + "acc_norm": 0.6813725490196079, + "acc_norm_stderr": 0.03270287181482082 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.03646204963253814, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.03646204963253814 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768545, + "mc2": 0.4673510067203589, + "mc2_stderr": 0.016048008898188526 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5218417945690673, + "acc_stderr": 0.017173944474294378, + "acc_norm": 0.5678866587957497, + "acc_norm_stderr": 0.01703117019885175 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ifuseok/ft-solar-10.7b-v2.1-dpo", + "model_sha": "eb2f7dab8f36e97240891ef789ff17fd082745a1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ifuseok/sft-solar-10.7b-v1.1/result_2024-01-05 08:37:57.json b/ifuseok/sft-solar-10.7b-v1.1/result_2024-01-05 08:37:57.json new file mode 100644 index 0000000000000000000000000000000000000000..549f0f0c2379d3315b0a723db87ffb101f2394dc --- /dev/null +++ b/ifuseok/sft-solar-10.7b-v1.1/result_2024-01-05 08:37:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4283276450511945, + "acc_stderr": 0.01446049636759902, + "acc_norm": 0.46245733788395904, + "acc_norm_stderr": 0.014570144495075571 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44144592710615416, + "acc_stderr": 0.00495544756469404, + "acc_norm": 0.5806612228639714, + "acc_norm_stderr": 0.004924424018073683 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6549707602339181, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.6549707602339181, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6309067688378033, + "acc_stderr": 0.01725628310912462, + "acc_norm": 0.6309067688378033, + "acc_norm_stderr": 0.01725628310912462 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4851063829787234, + "acc_stderr": 0.03267151848924777, + "acc_norm": 0.4851063829787234, + "acc_norm_stderr": 0.03267151848924777 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5884244372990354, + "acc_stderr": 0.027950481494401266, + "acc_norm": 0.5884244372990354, + "acc_norm_stderr": 0.027950481494401266 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6008403361344538, + "acc_stderr": 0.03181110032413925, + "acc_norm": 0.6008403361344538, + "acc_norm_stderr": 0.03181110032413925 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.025348006031534805, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.025348006031534805 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199985, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199985 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5548387096774193, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.5548387096774193, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922754, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922754 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5433962264150943, + "acc_stderr": 0.030656748696739435, + "acc_norm": 0.5433962264150943, + "acc_norm_stderr": 0.030656748696739435 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871934, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871934 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41005291005291006, + "acc_stderr": 0.02533120243894444, + "acc_norm": 0.41005291005291006, + "acc_norm_stderr": 0.02533120243894444 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5347222222222222, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.5347222222222222, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.02686949074481525, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.02686949074481525 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.032210245080411544, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.032210245080411544 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6642201834862386, + "acc_stderr": 0.020248081396752934, + "acc_norm": 0.6642201834862386, + "acc_norm_stderr": 0.020248081396752934 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.044359328928514664, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.044359328928514664 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.565359477124183, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.565359477124183, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.040179012759817494, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.040179012759817494 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.020226106567657803, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.020226106567657803 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.033922384053216174, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.033922384053216174 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.014635185616527819, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.014635185616527819 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4889705882352941, + "acc_stderr": 0.030365446477275675, + "acc_norm": 0.4889705882352941, + "acc_norm_stderr": 0.030365446477275675 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7172995780590717, + "acc_stderr": 0.029312814153955934, + "acc_norm": 0.7172995780590717, + "acc_norm_stderr": 0.029312814153955934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36766623207301175, + "acc_stderr": 0.0123148459100717, + "acc_norm": 0.36766623207301175, + "acc_norm_stderr": 0.0123148459100717 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.696078431372549, + "acc_stderr": 0.032282103870378914, + "acc_norm": 0.696078431372549, + "acc_norm_stderr": 0.032282103870378914 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.03646204963253814, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.03646204963253814 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.46596896991950454, + "mc2_stderr": 0.01613428576228491 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5159386068476978, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.5608028335301063, + "acc_norm_stderr": 0.0170627757447807 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ifuseok/sft-solar-10.7b-v1.1", + "model_sha": "dbfe377fca86bb8b7ae609cbf00eb4cf1a46e622", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ifuseok/sft-solar-10.7b-v1/result_2024-01-04 09:44:33.json b/ifuseok/sft-solar-10.7b-v1/result_2024-01-04 09:44:33.json new file mode 100644 index 0000000000000000000000000000000000000000..1425393669a73c3154f452b52473ce684d297698 --- /dev/null +++ b/ifuseok/sft-solar-10.7b-v1/result_2024-01-04 09:44:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4283276450511945, + "acc_stderr": 0.014460496367599017, + "acc_norm": 0.4718430034129693, + "acc_norm_stderr": 0.0145882041051022 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4312885879306911, + "acc_stderr": 0.0049424407463284845, + "acc_norm": 0.5688109938259311, + "acc_norm_stderr": 0.004942302768002097 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6232439335887612, + "acc_stderr": 0.017328292907303065, + "acc_norm": 0.6232439335887612, + "acc_norm_stderr": 0.017328292907303065 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340354, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340354 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.038913644958358175, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.038913644958358175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.027604689028582, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.027604689028582 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5605381165919282, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.5605381165919282, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.04225875451969639, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.04225875451969639 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6616161616161617, + "acc_stderr": 0.03371124142626303, + "acc_norm": 0.6616161616161617, + "acc_norm_stderr": 0.03371124142626303 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.04161808503501528, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.04161808503501528 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5840336134453782, + "acc_stderr": 0.03201650100739611, + "acc_norm": 0.5840336134453782, + "acc_norm_stderr": 0.03201650100739611 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5205128205128206, + "acc_stderr": 0.02532966316348994, + "acc_norm": 0.5205128205128206, + "acc_norm_stderr": 0.02532966316348994 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524572, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524572 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.03807301726504511, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.03807301726504511 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752045, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752045 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5347222222222222, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.5347222222222222, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.02663653974111608, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.02663653974111608 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.027237415094592474, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.027237415094592474 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.689119170984456, + "acc_stderr": 0.03340361906276586, + "acc_norm": 0.689119170984456, + "acc_norm_stderr": 0.03340361906276586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.045796394220704355, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.045796394220704355 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6678899082568808, + "acc_stderr": 0.02019268298542334, + "acc_norm": 0.6678899082568808, + "acc_norm_stderr": 0.02019268298542334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5849673202614379, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.5849673202614379, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366684, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366684 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5592105263157895, + "acc_stderr": 0.04040311062490437, + "acc_norm": 0.5592105263157895, + "acc_norm_stderr": 0.04040311062490437 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.020227402794434867, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.020227402794434867 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.029462189233370593, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.029462189233370593 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22905027932960895, + "acc_stderr": 0.014054314935614562, + "acc_norm": 0.22905027932960895, + "acc_norm_stderr": 0.014054314935614562 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.03027332507734575, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.03027332507734575 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5387755102040817, + "acc_stderr": 0.03191282052669278, + "acc_norm": 0.5387755102040817, + "acc_norm_stderr": 0.03191282052669278 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7088607594936709, + "acc_stderr": 0.029571601065753374, + "acc_norm": 0.7088607594936709, + "acc_norm_stderr": 0.029571601065753374 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3833116036505867, + "acc_stderr": 0.012417603662901188, + "acc_norm": 0.3833116036505867, + "acc_norm_stderr": 0.012417603662901188 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6519607843137255, + "acc_stderr": 0.033433112404884176, + "acc_norm": 0.6519607843137255, + "acc_norm_stderr": 0.033433112404884176 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391241, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875828, + "mc2": 0.45420680883972225, + "mc2_stderr": 0.016035988620921905 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5371900826446281, + "acc_stderr": 0.0171427361176433, + "acc_norm": 0.5726092089728453, + "acc_norm_stderr": 0.017008129844823153 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ifuseok/sft-solar-10.7b-v1", + "model_sha": "92639ef51b354e0a761486dfa7f4d65100bacc69", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ifuseok/sft-solar-10.7b-v2.1/result_2024-01-13 15:57:43.json b/ifuseok/sft-solar-10.7b-v2.1/result_2024-01-13 15:57:43.json new file mode 100644 index 0000000000000000000000000000000000000000..3598c4731ad60c9df5dcb5e4e005043fe433f830 --- /dev/null +++ b/ifuseok/sft-solar-10.7b-v2.1/result_2024-01-13 15:57:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.22696245733788395, + "acc_stderr": 0.01224049153613286, + "acc_norm": 0.22696245733788395, + "acc_norm_stderr": 0.01224049153613286 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2504481179047998, + "acc_stderr": 0.004323856300539177, + "acc_norm": 0.2504481179047998, + "acc_norm_stderr": 0.004323856300539177 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 1.0, + "mc1_stderr": 0.0, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.014846044968252247, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.014846044968252247 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ifuseok/sft-solar-10.7b-v2.1", + "model_sha": "2e3cb1351b9d54b14470dca306c09cc532bec0de", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ifuseok/yi-ko-playtus-instruct-v0.1/result_2023-12-20 04:49:59.json b/ifuseok/yi-ko-playtus-instruct-v0.1/result_2023-12-20 04:49:59.json new file mode 100644 index 0000000000000000000000000000000000000000..96b4b79966141d42d6f96a1e7310c0fc5324141e --- /dev/null +++ b/ifuseok/yi-ko-playtus-instruct-v0.1/result_2023-12-20 04:49:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3122866894197952, + "acc_stderr": 0.013542598541688065, + "acc_norm": 0.3703071672354949, + "acc_norm_stderr": 0.01411129875167495 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3737303326030671, + "acc_stderr": 0.004828045774734903, + "acc_norm": 0.4780920135431189, + "acc_norm_stderr": 0.00498498932064813 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.03771283107626545, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.03771283107626545 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44061302681992337, + "acc_stderr": 0.01775339697390849, + "acc_norm": 0.44061302681992337, + "acc_norm_stderr": 0.01775339697390849 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534422, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534422 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34080717488789236, + "acc_stderr": 0.03181149747055358, + "acc_norm": 0.34080717488789236, + "acc_norm_stderr": 0.03181149747055358 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.043285772152629715, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.043285772152629715 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413926, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.02737987122994325, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.02737987122994325 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5897435897435898, + "acc_stderr": 0.032224140452411065, + "acc_norm": 0.5897435897435898, + "acc_norm_stderr": 0.032224140452411065 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.02983280811479601, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.02983280811479601 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945266, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945266 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.03533133389323657, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.03533133389323657 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.02375292871211213, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.02375292871211213 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41329479768786126, + "acc_stderr": 0.02651126136940924, + "acc_norm": 0.41329479768786126, + "acc_norm_stderr": 0.02651126136940924 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4012345679012346, + "acc_stderr": 0.027272582849839792, + "acc_norm": 0.4012345679012346, + "acc_norm_stderr": 0.027272582849839792 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.03602573571288441, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.03602573571288441 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47522935779816516, + "acc_stderr": 0.021410999753635914, + "acc_norm": 0.47522935779816516, + "acc_norm_stderr": 0.021410999753635914 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.028036092273891765, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.028036092273891765 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35130718954248363, + "acc_stderr": 0.01931267606578656, + "acc_norm": 0.35130718954248363, + "acc_norm_stderr": 0.01931267606578656 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503803, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503803 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257013, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.01457265038340917, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.01457265038340917 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2977941176470588, + "acc_stderr": 0.027778298701545443, + "acc_norm": 0.2977941176470588, + "acc_norm_stderr": 0.027778298701545443 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3161668839634941, + "acc_stderr": 0.01187578089438658, + "acc_norm": 0.3161668839634941, + "acc_norm_stderr": 0.01187578089438658 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006504, + "mc2": 0.43992652953593975, + "mc2_stderr": 0.015968344564232163 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.39669421487603307, + "acc_stderr": 0.016819438642971408, + "acc_norm": 0.44391971664698937, + "acc_norm_stderr": 0.017081884623542546 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ifuseok/yi-ko-playtus-instruct-v0.1", + "model_sha": "a35b7d833cb87ca41d7380da9411df17e3c4052e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ifuseok/yi-ko-playtus-instruct-v0.2/result_2023-12-27 08:08:48.json b/ifuseok/yi-ko-playtus-instruct-v0.2/result_2023-12-27 08:08:48.json new file mode 100644 index 0000000000000000000000000000000000000000..b4d211fb92742e25c6623a0552db35410e43aeba --- /dev/null +++ b/ifuseok/yi-ko-playtus-instruct-v0.2/result_2023-12-27 08:08:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3430034129692833, + "acc_stderr": 0.013872423223718167, + "acc_norm": 0.41552901023890787, + "acc_norm_stderr": 0.014401366641216391 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3979286994622585, + "acc_stderr": 0.0048847024124560965, + "acc_norm": 0.5294761999601673, + "acc_norm_stderr": 0.004981103157940433 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5593869731800766, + "acc_stderr": 0.017753396973908486, + "acc_norm": 0.5593869731800766, + "acc_norm_stderr": 0.017753396973908486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611549, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611549 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.028386198084177673, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.028386198084177673 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016338, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016338 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179327, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179327 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126177, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126177 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.02843453315268184, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.02843453315268184 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.03011821010694266, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.03011821010694266 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.430635838150289, + "acc_stderr": 0.026658800273672376, + "acc_norm": 0.430635838150289, + "acc_norm_stderr": 0.026658800273672376 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.03825825548848608, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.03825825548848608 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5871559633027523, + "acc_stderr": 0.021109128133413913, + "acc_norm": 0.5871559633027523, + "acc_norm_stderr": 0.021109128133413913 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208839, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208839 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.02850980780262656, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.02850980780262656 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777473, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777473 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.01962744474841224, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.01962744474841224 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331161, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331161 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.03178471874564729, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.03178471874564729 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32529335071707954, + "acc_stderr": 0.011965311536571528, + "acc_norm": 0.32529335071707954, + "acc_norm_stderr": 0.011965311536571528 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087302, + "mc2": 0.41178231606045385, + "mc2_stderr": 0.014882012342473607 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5324675324675324, + "acc_stderr": 0.017154073716682868, + "acc_norm": 0.602125147579693, + "acc_norm_stderr": 0.01682795905473339 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ifuseok/yi-ko-playtus-instruct-v0.2", + "model_sha": "2682d6473035aabe0a0fc7a2ab035659225a1d36", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/igig98/ppo2/result_2023-10-29 13:20:18.json b/igig98/ppo2/result_2023-10-29 13:20:18.json new file mode 100644 index 0000000000000000000000000000000000000000..0a13d7a8b69cce47a7d15f600c5edcde705ad20b --- /dev/null +++ b/igig98/ppo2/result_2023-10-29 13:20:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2909556313993174, + "acc_stderr": 0.013273077865907573, + "acc_norm": 0.3447098976109215, + "acc_norm_stderr": 0.013888816286782112 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3908583947420832, + "acc_stderr": 0.00486945515093382, + "acc_norm": 0.5073690499900418, + "acc_norm_stderr": 0.004989239462835228 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457923, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457923 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2554278416347382, + "acc_stderr": 0.015594955384455765, + "acc_norm": 0.2554278416347382, + "acc_norm_stderr": 0.015594955384455765 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.02732107841738753, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.02732107841738753 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.18072289156626506, + "acc_stderr": 0.029955737855810138, + "acc_norm": 0.18072289156626506, + "acc_norm_stderr": 0.029955737855810138 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31189710610932475, + "acc_stderr": 0.02631185807185416, + "acc_norm": 0.31189710610932475, + "acc_norm_stderr": 0.02631185807185416 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.16143497757847533, + "acc_stderr": 0.02469395789912846, + "acc_norm": 0.16143497757847533, + "acc_norm_stderr": 0.02469395789912846 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.038073871163060866, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.038073871163060866 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.03208779558786751, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.03208779558786751 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003337, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003337 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.026653531596715477, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.026653531596715477 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2358974358974359, + "acc_stderr": 0.021525965407408726, + "acc_norm": 0.2358974358974359, + "acc_norm_stderr": 0.021525965407408726 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.024892469172462826, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.024892469172462826 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23504273504273504, + "acc_stderr": 0.027778835904935437, + "acc_norm": 0.23504273504273504, + "acc_norm_stderr": 0.027778835904935437 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.0264803571798957, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.0264803571798957 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2, + "acc_stderr": 0.03831305140884601, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03831305140884601 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959912, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959912 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.208955223880597, + "acc_stderr": 0.028748298931728658, + "acc_norm": 0.208955223880597, + "acc_norm_stderr": 0.028748298931728658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594295, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.021851509822031715, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.021851509822031715 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554858, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554858 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.02289408248992599, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.02289408248992599 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724148, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724148 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.025773111169630433, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.025773111169630433 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147602, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147602 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24220183486238533, + "acc_stderr": 0.01836817630659862, + "acc_norm": 0.24220183486238533, + "acc_norm_stderr": 0.01836817630659862 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333338, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333338 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.238562091503268, + "acc_stderr": 0.024404394928087873, + "acc_norm": 0.238562091503268, + "acc_norm_stderr": 0.024404394928087873 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.30578512396694213, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119667, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119667 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.018185218954318082, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.018185218954318082 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432407, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432407 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100998, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100998 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.027682979522960234, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.027682979522960234 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539264, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539264 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373618, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373618 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875833, + "mc2": 0.4229362575464193, + "mc2_stderr": 0.015023014923371594 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3022432113341204, + "acc_stderr": 0.015788654863022375, + "acc_norm": 0.3447461629279811, + "acc_norm_stderr": 0.016340649905418697 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "igig98/ppo2", + "model_sha": "12b21b4455bffbaea9811bddb74ceb8cb6cc5f8c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ihopper/I-SOLAR-10.7B-dpo-sft-v1.0/result_2024-04-18 05:54:45.json b/ihopper/I-SOLAR-10.7B-dpo-sft-v1.0/result_2024-04-18 05:54:45.json new file mode 100644 index 0000000000000000000000000000000000000000..46835a46bb889ef139c0e4c57f503c3bc1fa5f84 --- /dev/null +++ b/ihopper/I-SOLAR-10.7B-dpo-sft-v1.0/result_2024-04-18 05:54:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7278156996587031, + "acc_stderr": 0.013006600406423709, + "acc_norm": 0.757679180887372, + "acc_norm_stderr": 0.01252159329580012 + }, + "harness|ko_hellaswag|10": { + "acc": 0.7027484564827724, + "acc_stderr": 0.004561141293448459, + "acc_norm": 0.8167695678151763, + "acc_norm_stderr": 0.0038606469988972836 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.0458212416016155, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.0458212416016155 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6743295019157088, + "acc_stderr": 0.016757989458549682, + "acc_norm": 0.6743295019157088, + "acc_norm_stderr": 0.016757989458549682 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.02764814959975146, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.02764814959975146 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6367713004484304, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.6367713004484304, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.032087795587867514, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.032087795587867514 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.041641887201693775, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.041641887201693775 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.04810840148082635, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.04810840148082635 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.031631458075523776, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.031631458075523776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.024811920017903832, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.024811920017903832 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5741935483870968, + "acc_stderr": 0.028129112709165908, + "acc_norm": 0.5741935483870968, + "acc_norm_stderr": 0.028129112709165908 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.026246772946890484, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.026246772946890484 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5471698113207547, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.5471698113207547, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4417989417989418, + "acc_stderr": 0.025576257061253833, + "acc_norm": 0.4417989417989418, + "acc_norm_stderr": 0.025576257061253833 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5763888888888888, + "acc_stderr": 0.04132125019723369, + "acc_norm": 0.5763888888888888, + "acc_norm_stderr": 0.04132125019723369 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6011560693641619, + "acc_stderr": 0.02636243757454654, + "acc_norm": 0.6011560693641619, + "acc_norm_stderr": 0.02636243757454654 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.039158572914369714, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.039158572914369714 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6358024691358025, + "acc_stderr": 0.026774929899722345, + "acc_norm": 0.6358024691358025, + "acc_norm_stderr": 0.026774929899722345 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.03097543638684543, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.03097543638684543 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.689908256880734, + "acc_stderr": 0.01983084968443975, + "acc_norm": 0.689908256880734, + "acc_norm_stderr": 0.01983084968443975 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.028431095444176636, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.028431095444176636 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.041733491480834994, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.041733491480834994 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5702614379084967, + "acc_stderr": 0.020027122784928558, + "acc_norm": 0.5702614379084967, + "acc_norm_stderr": 0.020027122784928558 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4148936170212766, + "acc_stderr": 0.0293922365846125, + "acc_norm": 0.4148936170212766, + "acc_norm_stderr": 0.0293922365846125 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.033953227263757976, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.033953227263757976 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.015131608849963752, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.015131608849963752 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5220588235294118, + "acc_stderr": 0.03034326422421352, + "acc_norm": 0.5220588235294118, + "acc_norm_stderr": 0.03034326422421352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.03093285879278986, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.03093285879278986 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7088607594936709, + "acc_stderr": 0.029571601065753374, + "acc_norm": 0.7088607594936709, + "acc_norm_stderr": 0.029571601065753374 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.42959582790091266, + "acc_stderr": 0.012643004623790215, + "acc_norm": 0.42959582790091266, + "acc_norm_stderr": 0.012643004623790215 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6911764705882353, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.6911764705882353, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.038254602783800246, + "acc_norm": 0.6, + "acc_norm_stderr": 0.038254602783800246 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.762545899632803, + "mc1_stderr": 0.01489627744104182, + "mc2": 0.8381116116755583, + "mc2_stderr": 0.01236997467680779 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.500590318772137, + "acc_stderr": 0.017190342123448586, + "acc_norm": 0.5230224321133412, + "acc_norm_stderr": 0.017172121546727634 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ihopper/I-SOLAR-10.7B-dpo-sft-v1.0", + "model_sha": "c13a2cdd7149ef8063499634a15f44eef5e3bb4e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ihopper/I-SOLAR-10.7B-sft-v1.0/result_2024-05-16 08:20:42.json b/ihopper/I-SOLAR-10.7B-sft-v1.0/result_2024-05-16 08:20:42.json new file mode 100644 index 0000000000000000000000000000000000000000..8769de0ec7fa7b4165866002ad5dc13b8e763bca --- /dev/null +++ b/ihopper/I-SOLAR-10.7B-sft-v1.0/result_2024-05-16 08:20:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7312286689419796, + "acc_stderr": 0.012955065963710682, + "acc_norm": 0.7696245733788396, + "acc_norm_stderr": 0.01230492841874761 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5748854809798845, + "acc_stderr": 0.004933500261683602, + "acc_norm": 0.7308305118502291, + "acc_norm_stderr": 0.004426217654917972 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7017543859649122, + "acc_stderr": 0.03508771929824564, + "acc_norm": 0.7017543859649122, + "acc_norm_stderr": 0.03508771929824564 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.04354631077260597, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.04354631077260597 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7279693486590039, + "acc_stderr": 0.01591336744750054, + "acc_norm": 0.7279693486590039, + "acc_norm_stderr": 0.01591336744750054 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.04319223625811331, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.04319223625811331 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5148936170212766, + "acc_stderr": 0.032671518489247764, + "acc_norm": 0.5148936170212766, + "acc_norm_stderr": 0.032671518489247764 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.038913644958358175, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.038913644958358175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.662379421221865, + "acc_stderr": 0.026858825879488558, + "acc_norm": 0.662379421221865, + "acc_norm_stderr": 0.026858825879488558 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6771300448430493, + "acc_stderr": 0.03138147637575499, + "acc_norm": 0.6771300448430493, + "acc_norm_stderr": 0.03138147637575499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.029620227874790465, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.029620227874790465 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.634453781512605, + "acc_stderr": 0.031282177063684614, + "acc_norm": 0.634453781512605, + "acc_norm_stderr": 0.031282177063684614 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6435897435897436, + "acc_stderr": 0.02428314052946731, + "acc_norm": 0.6435897435897436, + "acc_norm_stderr": 0.02428314052946731 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301812, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301812 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.47783251231527096, + "acc_stderr": 0.035145285621750094, + "acc_norm": 0.47783251231527096, + "acc_norm_stderr": 0.035145285621750094 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.632258064516129, + "acc_stderr": 0.027430866579973463, + "acc_norm": 0.632258064516129, + "acc_norm_stderr": 0.027430866579973463 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8418803418803419, + "acc_stderr": 0.02390232554956041, + "acc_norm": 0.8418803418803419, + "acc_norm_stderr": 0.02390232554956041 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.045820048415054174, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.045820048415054174 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.02956070739246571, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.02956070739246571 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555403, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555403 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.03794012674697029, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.03794012674697029 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4576719576719577, + "acc_stderr": 0.02565886886205832, + "acc_norm": 0.4576719576719577, + "acc_norm_stderr": 0.02565886886205832 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5972222222222222, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.5972222222222222, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932263, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932263 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6184971098265896, + "acc_stderr": 0.026152198619726803, + "acc_norm": 0.6184971098265896, + "acc_norm_stderr": 0.026152198619726803 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6073619631901841, + "acc_stderr": 0.0383674090783103, + "acc_norm": 0.6073619631901841, + "acc_norm_stderr": 0.0383674090783103 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.02584224870090217, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.02584224870090217 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7512953367875648, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.7512953367875648, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7596330275229358, + "acc_stderr": 0.01832060732096407, + "acc_norm": 0.7596330275229358, + "acc_norm_stderr": 0.01832060732096407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.027363593284684972, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.027363593284684972 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6578947368421053, + "acc_stderr": 0.038607315993160904, + "acc_norm": 0.6578947368421053, + "acc_norm_stderr": 0.038607315993160904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.019861155193829163, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.019861155193829163 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4397163120567376, + "acc_stderr": 0.029609912075594116, + "acc_norm": 0.4397163120567376, + "acc_norm_stderr": 0.029609912075594116 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219588, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219588 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.44692737430167595, + "acc_stderr": 0.016628030039647614, + "acc_norm": 0.44692737430167595, + "acc_norm_stderr": 0.016628030039647614 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5404411764705882, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.5404411764705882, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6693877551020408, + "acc_stderr": 0.03011642629654062, + "acc_norm": 0.6693877551020408, + "acc_norm_stderr": 0.03011642629654062 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7637130801687764, + "acc_stderr": 0.02765215314415927, + "acc_norm": 0.7637130801687764, + "acc_norm_stderr": 0.02765215314415927 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44328552803129073, + "acc_stderr": 0.012687818419599914, + "acc_norm": 0.44328552803129073, + "acc_norm_stderr": 0.012687818419599914 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.03283472056108561, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.03283472056108561 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7417380660954712, + "mc1_stderr": 0.015321821688476185, + "mc2": 0.815328688586994, + "mc2_stderr": 0.012960756154797257 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5053128689492326, + "acc_stderr": 0.01718938362722971, + "acc_norm": 0.5230224321133412, + "acc_norm_stderr": 0.017172121546727634 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ihopper/I-SOLAR-10.7B-sft-v1.0", + "model_sha": "553bb2e7a6b0b43571e3ccda218ad8aebba4a0c9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ihopper/ko-Llama-3-8B-Instruct-sft-v1.0/result_2024-05-13 03:02:19.json b/ihopper/ko-Llama-3-8B-Instruct-sft-v1.0/result_2024-05-13 03:02:19.json new file mode 100644 index 0000000000000000000000000000000000000000..daff0ce1124cc0eb781a134643312eab2c7840e2 --- /dev/null +++ b/ihopper/ko-Llama-3-8B-Instruct-sft-v1.0/result_2024-05-13 03:02:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36945392491467577, + "acc_stderr": 0.014104578366491888, + "acc_norm": 0.4249146757679181, + "acc_norm_stderr": 0.01444569896852077 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39294961163114916, + "acc_stderr": 0.004874076250521578, + "acc_norm": 0.5235012945628361, + "acc_norm_stderr": 0.004984266543053128 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4355044699872286, + "acc_stderr": 0.017730589927926598, + "acc_norm": 0.4355044699872286, + "acc_norm_stderr": 0.017730589927926598 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736125, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736125 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.02737987122994325, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.02737987122994325 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518026, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518026 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602841997, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602841997 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670788, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670788 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.02680372058320619, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.02680372058320619 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.038818912133343826, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.038818912133343826 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5, + "acc_stderr": 0.02782074420373286, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02782074420373286 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.581651376146789, + "acc_stderr": 0.021149548596443878, + "acc_norm": 0.581651376146789, + "acc_norm_stderr": 0.021149548596443878 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.040633027314866725, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.040633027314866725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.01982184368827177, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.01982184368827177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320203, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320203 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.01448750085285041, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.01448750085285041 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.029624663581159685, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.029624663581159685 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5387755102040817, + "acc_stderr": 0.03191282052669278, + "acc_norm": 0.5387755102040817, + "acc_norm_stderr": 0.03191282052669278 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811224, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811224 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3663624511082138, + "acc_stderr": 0.012305658346838444, + "acc_norm": 0.3663624511082138, + "acc_norm_stderr": 0.012305658346838444 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398393, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398393 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522509, + "mc2": 0.4129971395038817, + "mc2_stderr": 0.015149379790573684 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3246753246753247, + "acc_stderr": 0.01609888393934646, + "acc_norm": 0.42502951593860683, + "acc_norm_stderr": 0.016996016308362887 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ihopper/ko-Llama-3-8B-Instruct-sft-v1.0", + "model_sha": "e1fa98a7f781fa54884d1dd13801788c3dcaf0d7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ihopper/ko-gemma-7b-sft-dpo-v1.0/result_2024-04-04 06:28:18.json b/ihopper/ko-gemma-7b-sft-dpo-v1.0/result_2024-04-04 06:28:18.json new file mode 100644 index 0000000000000000000000000000000000000000..3a301b163cefa6c31e8fa5dbe8e6415790cbb53d --- /dev/null +++ b/ihopper/ko-gemma-7b-sft-dpo-v1.0/result_2024-04-04 06:28:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20392491467576793, + "acc_stderr": 0.011774262478702247, + "acc_norm": 0.24829351535836178, + "acc_norm_stderr": 0.012624912868089764 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2560246962756423, + "acc_stderr": 0.004355436696716298, + "acc_norm": 0.2560246962756423, + "acc_norm_stderr": 0.004355436696716298 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041693, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041693 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2886334610472541, + "acc_stderr": 0.016203792703197797, + "acc_norm": 0.2886334610472541, + "acc_norm_stderr": 0.016203792703197797 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944966, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944966 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26366559485530544, + "acc_stderr": 0.02502553850053234, + "acc_norm": 0.26366559485530544, + "acc_norm_stderr": 0.02502553850053234 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280226, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280226 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082394, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082394 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.029472485833136084, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.029472485833136084 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2794871794871795, + "acc_stderr": 0.02275238883977683, + "acc_norm": 0.2794871794871795, + "acc_norm_stderr": 0.02275238883977683 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.03893542518824847, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.03893542518824847 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.02564938106302926, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.02564938106302926 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.16363636363636364, + "acc_stderr": 0.03543433054298677, + "acc_norm": 0.16363636363636364, + "acc_norm_stderr": 0.03543433054298677 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.029705284056772426, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.029705284056772426 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.03396116205845335, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.03396116205845335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21676300578034682, + "acc_stderr": 0.02218347766841286, + "acc_norm": 0.21676300578034682, + "acc_norm_stderr": 0.02218347766841286 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924055, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924055 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.02357688174400572, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.02357688174400572 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147602, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147602 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.03775205013583638, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.03775205013583638 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24954128440366974, + "acc_stderr": 0.01855389762950162, + "acc_norm": 0.24954128440366974, + "acc_norm_stderr": 0.01855389762950162 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848877, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848877 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.02367908986180772, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.02367908986180772 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.014551553659369923, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.014551553659369923 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.26838235294117646, + "acc_stderr": 0.026917481224377232, + "acc_norm": 0.26838235294117646, + "acc_norm_stderr": 0.026917481224377232 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.025991117672813292, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.025991117672813292 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006518, + "mc2": 0.4769711918408213, + "mc2_stderr": 0.016714610613318 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.1487603305785124, + "acc_stderr": 0.012234446131035063, + "acc_norm": 0.4002361275088548, + "acc_norm_stderr": 0.016844693510505052 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ihopper/ko-gemma-7b-sft-dpo-v1.0", + "model_sha": "5e44b81d775e62d7de5c73ad884ba9d05b6d98bc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/iknow-lab/AULM-12.8b-v0/result_2023-10-14 16:04:08.json b/iknow-lab/AULM-12.8b-v0/result_2023-10-14 16:04:08.json new file mode 100644 index 0000000000000000000000000000000000000000..aef4fe6831e4031ca6caf81f34ba52fe9888152b --- /dev/null +++ b/iknow-lab/AULM-12.8b-v0/result_2023-10-14 16:04:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26706484641638223, + "acc_stderr": 0.012928933196496337, + "acc_norm": 0.3310580204778157, + "acc_norm_stderr": 0.013752062419817836 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37134037044413465, + "acc_stderr": 0.004821757734156723, + "acc_norm": 0.47470623381796456, + "acc_norm_stderr": 0.004983392650570962 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.03424042924691584, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.03424042924691584 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.0153023801235421, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.0153023801235421 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066654, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066654 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2, + "acc_stderr": 0.026148818018424513, + "acc_norm": 0.2, + "acc_norm_stderr": 0.026148818018424513 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.03329394119073529, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.03329394119073529 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.21864951768488747, + "acc_stderr": 0.02347558141786111, + "acc_norm": 0.21864951768488747, + "acc_norm_stderr": 0.02347558141786111 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.030769352008229136, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.030769352008229136 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.028359620870533953, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.028359620870533953 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233483, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233483 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24193548387096775, + "acc_stderr": 0.024362599693031093, + "acc_norm": 0.24193548387096775, + "acc_norm_stderr": 0.024362599693031093 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02934311479809445, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02934311479809445 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.026616482980501704, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.026616482980501704 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766114, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21393034825870647, + "acc_stderr": 0.028996909693328927, + "acc_norm": 0.21393034825870647, + "acc_norm_stderr": 0.028996909693328927 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.03063114553919882, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.03063114553919882 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.021679219663693138, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.021679219663693138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.023618678310069363, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.023618678310069363 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.02465968518596729, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.02465968518596729 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21243523316062177, + "acc_stderr": 0.029519282616817244, + "acc_norm": 0.21243523316062177, + "acc_norm_stderr": 0.029519282616817244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25688073394495414, + "acc_stderr": 0.01873249292834247, + "acc_norm": 0.25688073394495414, + "acc_norm_stderr": 0.01873249292834247 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333338, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333338 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.02495418432487991, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.02495418432487991 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.017479487001364764, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.017479487001364764 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953777, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953777 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19907407407407407, + "acc_stderr": 0.027232298462690218, + "acc_norm": 0.19907407407407407, + "acc_norm_stderr": 0.027232298462690218 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409162, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409162 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254184, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254184 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.21224489795918366, + "acc_stderr": 0.026176967197866767, + "acc_norm": 0.21224489795918366, + "acc_norm_stderr": 0.026176967197866767 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25097783572359844, + "acc_stderr": 0.011073730299187234, + "acc_norm": 0.25097783572359844, + "acc_norm_stderr": 0.011073730299187234 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.031145570659486782, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.031145570659486782 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624335, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624335 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4336773026110262, + "mc2_stderr": 0.01517918566270363 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29515938606847697, + "acc_stderr": 0.015681535229192186, + "acc_norm": 0.3659976387249115, + "acc_norm_stderr": 0.016561489664895696 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "iknow-lab/AULM-12.8b-v0", + "model_sha": "daeca40346ba44b1fbb6939cc635adf467fa6cab", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ingeol/ppo_test/result_2023-10-16 23:46:09.json b/ingeol/ppo_test/result_2023-10-16 23:46:09.json new file mode 100644 index 0000000000000000000000000000000000000000..20db94820abbd896657d47bbd439814cc1da115e --- /dev/null +++ b/ingeol/ppo_test/result_2023-10-16 23:46:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29266211604095566, + "acc_stderr": 0.013295916103619404, + "acc_norm": 0.3438566552901024, + "acc_norm_stderr": 0.013880644570156213 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39026090420235016, + "acc_stderr": 0.004868117598481941, + "acc_norm": 0.5064728141804421, + "acc_norm_stderr": 0.00498936327695524 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824564, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824564 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25287356321839083, + "acc_stderr": 0.015543377313719681, + "acc_norm": 0.25287356321839083, + "acc_norm_stderr": 0.015543377313719681 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.026355158413349407, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.026355158413349407 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.18072289156626506, + "acc_stderr": 0.02995573785581014, + "acc_norm": 0.18072289156626506, + "acc_norm_stderr": 0.02995573785581014 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.026664410886937613, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.026664410886937613 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.16591928251121077, + "acc_stderr": 0.024967553196547157, + "acc_norm": 0.16591928251121077, + "acc_norm_stderr": 0.024967553196547157 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713549, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713549 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.027025433498882378, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.027025433498882378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.02127839386358628, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.02127839386358628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.030315099285617732, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.030315099285617732 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.025284416114900156, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.025284416114900156 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2641509433962264, + "acc_stderr": 0.027134291628741695, + "acc_norm": 0.2641509433962264, + "acc_norm_stderr": 0.027134291628741695 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.19090909090909092, + "acc_stderr": 0.03764425585984927, + "acc_norm": 0.19090909090909092, + "acc_norm_stderr": 0.03764425585984927 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21890547263681592, + "acc_stderr": 0.029239174636647, + "acc_norm": 0.21890547263681592, + "acc_norm_stderr": 0.029239174636647 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184766, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184766 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.023357365785874037, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.023357365785874037 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724148, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724148 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.02570264026060376, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.02570264026060376 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147602, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147602 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.0181256691808615, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.0181256691808615 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333338, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333338 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119667, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119667 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266736, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266736 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03141554629402545, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03141554629402545 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2659217877094972, + "acc_stderr": 0.014776765066438895, + "acc_norm": 0.2659217877094972, + "acc_norm_stderr": 0.014776765066438895 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396563, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396563 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174934, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174934 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27835723598435463, + "acc_stderr": 0.011446990197380982, + "acc_norm": 0.27835723598435463, + "acc_norm_stderr": 0.011446990197380982 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083292, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083292 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842883, + "mc2": 0.4208363898748992, + "mc2_stderr": 0.014946599322770709 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30578512396694213, + "acc_stderr": 0.0158405389325341, + "acc_norm": 0.36481700118063753, + "acc_norm_stderr": 0.016550144337046595 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ingeol/ppo_test", + "model_sha": "ec1c89b180c1eb383c5a348b4d113733c3e8e238", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ingeol/ppo_test/result_2023-10-16 23:46:16.json b/ingeol/ppo_test/result_2023-10-16 23:46:16.json new file mode 100644 index 0000000000000000000000000000000000000000..20db94820abbd896657d47bbd439814cc1da115e --- /dev/null +++ b/ingeol/ppo_test/result_2023-10-16 23:46:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29266211604095566, + "acc_stderr": 0.013295916103619404, + "acc_norm": 0.3438566552901024, + "acc_norm_stderr": 0.013880644570156213 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39026090420235016, + "acc_stderr": 0.004868117598481941, + "acc_norm": 0.5064728141804421, + "acc_norm_stderr": 0.00498936327695524 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824564, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824564 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25287356321839083, + "acc_stderr": 0.015543377313719681, + "acc_norm": 0.25287356321839083, + "acc_norm_stderr": 0.015543377313719681 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.026355158413349407, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.026355158413349407 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.18072289156626506, + "acc_stderr": 0.02995573785581014, + "acc_norm": 0.18072289156626506, + "acc_norm_stderr": 0.02995573785581014 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.026664410886937613, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.026664410886937613 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.16591928251121077, + "acc_stderr": 0.024967553196547157, + "acc_norm": 0.16591928251121077, + "acc_norm_stderr": 0.024967553196547157 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713549, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713549 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.027025433498882378, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.027025433498882378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.02127839386358628, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.02127839386358628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.030315099285617732, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.030315099285617732 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.025284416114900156, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.025284416114900156 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2641509433962264, + "acc_stderr": 0.027134291628741695, + "acc_norm": 0.2641509433962264, + "acc_norm_stderr": 0.027134291628741695 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.19090909090909092, + "acc_stderr": 0.03764425585984927, + "acc_norm": 0.19090909090909092, + "acc_norm_stderr": 0.03764425585984927 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21890547263681592, + "acc_stderr": 0.029239174636647, + "acc_norm": 0.21890547263681592, + "acc_norm_stderr": 0.029239174636647 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184766, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184766 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.023357365785874037, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.023357365785874037 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724148, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724148 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.02570264026060376, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.02570264026060376 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147602, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147602 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.0181256691808615, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.0181256691808615 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333338, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333338 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119667, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119667 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266736, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266736 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03141554629402545, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03141554629402545 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2659217877094972, + "acc_stderr": 0.014776765066438895, + "acc_norm": 0.2659217877094972, + "acc_norm_stderr": 0.014776765066438895 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396563, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396563 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174934, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174934 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27835723598435463, + "acc_stderr": 0.011446990197380982, + "acc_norm": 0.27835723598435463, + "acc_norm_stderr": 0.011446990197380982 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083292, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083292 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842883, + "mc2": 0.4208363898748992, + "mc2_stderr": 0.014946599322770709 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30578512396694213, + "acc_stderr": 0.0158405389325341, + "acc_norm": 0.36481700118063753, + "acc_norm_stderr": 0.016550144337046595 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ingeol/ppo_test", + "model_sha": "ec1c89b180c1eb383c5a348b4d113733c3e8e238", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ingeol/sft_merged/result_2023-10-15 17:37:34.json b/ingeol/sft_merged/result_2023-10-15 17:37:34.json new file mode 100644 index 0000000000000000000000000000000000000000..378acc6761a5ebbb338fb8a0ef7741ac9ee1c0a1 --- /dev/null +++ b/ingeol/sft_merged/result_2023-10-15 17:37:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2977815699658703, + "acc_stderr": 0.01336308010724449, + "acc_norm": 0.3395904436860068, + "acc_norm_stderr": 0.013839039762820167 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39055964947221666, + "acc_stderr": 0.004868787333436579, + "acc_norm": 0.5038836885082653, + "acc_norm_stderr": 0.004989630887066195 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393161, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393161 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2515964240102171, + "acc_stderr": 0.015517322365529631, + "acc_norm": 0.2515964240102171, + "acc_norm_stderr": 0.015517322365529631 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2170212765957447, + "acc_stderr": 0.026947483121496245, + "acc_norm": 0.2170212765957447, + "acc_norm_stderr": 0.026947483121496245 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21084337349397592, + "acc_stderr": 0.0317555478662992, + "acc_norm": 0.21084337349397592, + "acc_norm_stderr": 0.0317555478662992 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3215434083601286, + "acc_stderr": 0.026527724079528872, + "acc_norm": 0.3215434083601286, + "acc_norm_stderr": 0.026527724079528872 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.15695067264573992, + "acc_stderr": 0.02441358717490739, + "acc_norm": 0.15695067264573992, + "acc_norm_stderr": 0.02441358717490739 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.037276735755969174, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.037276735755969174 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713549, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713549 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438015, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438015 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237656, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237656 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868966, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868966 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.02127839386358628, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.02127839386358628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.03108982600293752, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.03108982600293752 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.02468597928623997, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.02468597928623997 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24786324786324787, + "acc_stderr": 0.028286324075564386, + "acc_norm": 0.24786324786324787, + "acc_norm_stderr": 0.028286324075564386 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.026616482980501715, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.026616482980501715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184408, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184408 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360383, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360383 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21890547263681592, + "acc_stderr": 0.029239174636647, + "acc_norm": 0.21890547263681592, + "acc_norm_stderr": 0.029239174636647 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047873, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047873 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184766, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184766 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071138, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071138 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724148, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724148 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.02570264026060376, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.02570264026060376 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23486238532110093, + "acc_stderr": 0.018175110510343588, + "acc_norm": 0.23486238532110093, + "acc_norm_stderr": 0.018175110510343588 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333338, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333338 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.024170840879341016, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.024170840879341016 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.30578512396694213, + "acc_stderr": 0.04205953933884124, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.04205953933884124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03782728980865469, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03782728980865469 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.018185218954318082, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.018185218954318082 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432407, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432407 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298804, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298804 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.014551553659369922, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.014551553659369922 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898445, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898445 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.028123429335142783, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.028123429335142783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658335, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539265, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539265 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087298, + "mc2": 0.4211117529867161, + "mc2_stderr": 0.014959536407311791 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31286894923258557, + "acc_stderr": 0.015941010118302658, + "acc_norm": 0.3754427390791027, + "acc_norm_stderr": 0.016648411589511098 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ingeol/sft_merged", + "model_sha": "a958e5054c1935e86f418c797825ebccb9e7fd89", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ingeol/sft_merged_660/result_2023-10-15 23:44:57.json b/ingeol/sft_merged_660/result_2023-10-15 23:44:57.json new file mode 100644 index 0000000000000000000000000000000000000000..8b41a5d3aabbaca36068da0b0bac7cc85fd85c71 --- /dev/null +++ b/ingeol/sft_merged_660/result_2023-10-15 23:44:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.013203196088537364, + "acc_norm": 0.33532423208191126, + "acc_norm_stderr": 0.013796182947785564 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3910575582553276, + "acc_stderr": 0.00486989929773455, + "acc_norm": 0.5030870344552878, + "acc_norm_stderr": 0.004989686307484551 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.26900584795321636, + "acc_stderr": 0.03401052620104088, + "acc_norm": 0.26900584795321636, + "acc_norm_stderr": 0.03401052620104088 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23371647509578544, + "acc_stderr": 0.015133383278988832, + "acc_norm": 0.23371647509578544, + "acc_norm_stderr": 0.015133383278988832 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.24680851063829787, + "acc_stderr": 0.02818544130123409, + "acc_norm": 0.24680851063829787, + "acc_norm_stderr": 0.02818544130123409 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3215434083601286, + "acc_stderr": 0.026527724079528872, + "acc_norm": 0.3215434083601286, + "acc_norm_stderr": 0.026527724079528872 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.15695067264573992, + "acc_stderr": 0.024413587174907405, + "acc_norm": 0.15695067264573992, + "acc_norm_stderr": 0.024413587174907405 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.03258630383836555, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.03258630383836555 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.037800192304380156, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.037800192304380156 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24102564102564103, + "acc_stderr": 0.021685546665333188, + "acc_norm": 0.24102564102564103, + "acc_norm_stderr": 0.021685546665333188 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.03127090713297698, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.03127090713297698 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239973, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239973 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24786324786324787, + "acc_stderr": 0.028286324075564393, + "acc_norm": 0.24786324786324787, + "acc_norm_stderr": 0.028286324075564393 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.02674989977124124, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.02674989977124124 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.038950910157241364, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.038950910157241364 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184408, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184408 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.029929415408348384, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.029929415408348384 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047873, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047873 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2275132275132275, + "acc_stderr": 0.021591269407823778, + "acc_norm": 0.2275132275132275, + "acc_norm_stderr": 0.021591269407823778 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071145, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071145 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02584224870090218, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02584224870090218 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147601, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147601 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26788990825688075, + "acc_stderr": 0.018987462257978652, + "acc_norm": 0.26788990825688075, + "acc_norm_stderr": 0.018987462257978652 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03718489006818115, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03718489006818115 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023805186524888142, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023805186524888142 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.017630827375148383, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.017630827375148383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.025518731049537766, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.025518731049537766 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.02981263070156974, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.02981263070156974 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.02812342933514278, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.02812342933514278 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.011328734403140332, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.011328734403140332 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.03317505930009179, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.03317505930009179 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608753, + "mc2": 0.42256277632208605, + "mc2_stderr": 0.014988663316140667 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30342384887839435, + "acc_stderr": 0.015806072717909573, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.016756921571069415 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ingeol/sft_merged_660", + "model_sha": "2426d1b6f2940a808b68c578e0fafdab1a515707", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inoutro/phi2-ko-instruction-tune/result_2024-03-25 01:59:45.json b/inoutro/phi2-ko-instruction-tune/result_2024-03-25 01:59:45.json new file mode 100644 index 0000000000000000000000000000000000000000..8dc2139c754b030fcdadadef6f8eb49908284d97 --- /dev/null +++ b/inoutro/phi2-ko-instruction-tune/result_2024-03-25 01:59:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27474402730375425, + "acc_stderr": 0.013044617212771227, + "acc_norm": 0.31569965870307165, + "acc_norm_stderr": 0.013582571095815291 + }, + "harness|ko_hellaswag|10": { + "acc": 0.31905994821748657, + "acc_stderr": 0.0046515972099930945, + "acc_norm": 0.37054371639115713, + "acc_norm_stderr": 0.004819633668832542 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2573099415204678, + "acc_stderr": 0.03352799844161865, + "acc_norm": 0.2573099415204678, + "acc_norm_stderr": 0.03352799844161865 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28991060025542786, + "acc_stderr": 0.01622501794477098, + "acc_norm": 0.28991060025542786, + "acc_norm_stderr": 0.01622501794477098 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.02964400657700962, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.02964400657700962 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.03571609230053481, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.03571609230053481 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.02685882587948855, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.02685882587948855 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.25112107623318386, + "acc_stderr": 0.02910522083322462, + "acc_norm": 0.25112107623318386, + "acc_norm_stderr": 0.02910522083322462 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467766, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467766 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.031911782267135466, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.031911782267135466 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727772, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727772 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863818, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863818 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24615384615384617, + "acc_stderr": 0.021840866990423077, + "acc_norm": 0.24615384615384617, + "acc_norm_stderr": 0.021840866990423077 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764815, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764815 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.33760683760683763, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.33760683760683763, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30566037735849055, + "acc_stderr": 0.028353298073322663, + "acc_norm": 0.30566037735849055, + "acc_norm_stderr": 0.028353298073322663 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.04172343038705383, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.04172343038705383 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.02620276653465215, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.02620276653465215 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473836, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473836 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.0321473730202947, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.0321473730202947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.22486772486772486, + "acc_stderr": 0.02150209607822914, + "acc_norm": 0.22486772486772486, + "acc_norm_stderr": 0.02150209607822914 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.25722543352601157, + "acc_stderr": 0.023532925431044283, + "acc_norm": 0.25722543352601157, + "acc_norm_stderr": 0.023532925431044283 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.032910995786157686, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.032910995786157686 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02492200116888633, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02492200116888633 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21243523316062177, + "acc_stderr": 0.029519282616817254, + "acc_norm": 0.21243523316062177, + "acc_norm_stderr": 0.029519282616817254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.037752050135836386, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.037752050135836386 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24954128440366974, + "acc_stderr": 0.018553897629501617, + "acc_norm": 0.24954128440366974, + "acc_norm_stderr": 0.018553897629501617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020534, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020534 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.034597776068105365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.034597776068105365 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26633986928104575, + "acc_stderr": 0.01788318813466721, + "acc_norm": 0.26633986928104575, + "acc_norm_stderr": 0.01788318813466721 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984301, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984301 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755805, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755805 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1574074074074074, + "acc_stderr": 0.024837173518242384, + "acc_norm": 0.1574074074074074, + "acc_norm_stderr": 0.024837173518242384 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2426470588235294, + "acc_stderr": 0.02604066247420126, + "acc_norm": 0.2426470588235294, + "acc_norm_stderr": 0.02604066247420126 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2, + "acc_stderr": 0.025607375986579153, + "acc_norm": 0.2, + "acc_norm_stderr": 0.025607375986579153 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3037974683544304, + "acc_stderr": 0.0299366963871386, + "acc_norm": 0.3037974683544304, + "acc_norm_stderr": 0.0299366963871386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2503259452411995, + "acc_stderr": 0.011064151027165436, + "acc_norm": 0.2503259452411995, + "acc_norm_stderr": 0.011064151027165436 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.029102254389674082, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.029102254389674082 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456418, + "mc2": 0.4130255185853868, + "mc2_stderr": 0.015705343509271937 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2798110979929162, + "acc_stderr": 0.015433715795427764, + "acc_norm": 0.3152302243211334, + "acc_norm_stderr": 0.015973534923794476 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inoutro/phi2-ko-instruction-tune", + "model_sha": "68c7efa442b43d896363b9a0ba2cf05784ea5e41", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/instructkr/ko-storywriter-nano/result_2024-01-18 08:11:26.json b/instructkr/ko-storywriter-nano/result_2024-01-18 08:11:26.json new file mode 100644 index 0000000000000000000000000000000000000000..a84d4f04cac3df8c54739154eb6f8ee2b2edae64 --- /dev/null +++ b/instructkr/ko-storywriter-nano/result_2024-01-18 08:11:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32593856655290104, + "acc_stderr": 0.013697432466693237, + "acc_norm": 0.40102389078498296, + "acc_norm_stderr": 0.014322255790719867 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3919537940649273, + "acc_stderr": 0.004871887422893585, + "acc_norm": 0.5185222067317268, + "acc_norm_stderr": 0.004986356526063971 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45338441890166026, + "acc_stderr": 0.017802087135850297, + "acc_norm": 0.45338441890166026, + "acc_norm_stderr": 0.017802087135850297 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.03175367846096625, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.03175367846096625 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.025124653525885134, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.025124653525885134 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969566, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969566 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347354, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347354 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.03032594578928611, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.03032594578928611 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230193, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230193 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.03522865864099597, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.03522865864099597 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.0233306540545359, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.0233306540545359 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.026589231142174256, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.026589231142174256 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.02751374728437942, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.02751374728437942 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5266055045871559, + "acc_stderr": 0.021406952688151588, + "acc_norm": 0.5266055045871559, + "acc_norm_stderr": 0.021406952688151588 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020514, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020514 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883034, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883034 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874142, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874142 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355435, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355435 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.02746470844202213, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.02746470844202213 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.0420327729146776, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.0420327729146776 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767865, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767865 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.027033041151681456, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.027033041151681456 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3109517601043025, + "acc_stderr": 0.011822252917799205, + "acc_norm": 0.3109517601043025, + "acc_norm_stderr": 0.011822252917799205 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833344, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833344 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.4003888710124389, + "mc2_stderr": 0.014753156814351433 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4769775678866588, + "acc_stderr": 0.017172121546727637, + "acc_norm": 0.5560802833530106, + "acc_norm_stderr": 0.017081884623542546 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "instructkr/ko-storywriter-nano", + "model_sha": "34808533d98b6003791387631ab00ef781530e58", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.10/result_2023-12-02 00:04:03.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.10/result_2023-12-02 00:04:03.json new file mode 100644 index 0000000000000000000000000000000000000000..c6eda4afd416e7b0f9fe837e8ef61e72e6fdeae7 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.10/result_2023-12-02 00:04:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3890784982935154, + "acc_stderr": 0.014247309976045607, + "acc_norm": 0.454778156996587, + "acc_norm_stderr": 0.014551507060836355 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42541326428998205, + "acc_stderr": 0.0049339509533808945, + "acc_norm": 0.5722963553077076, + "acc_norm_stderr": 0.004937345081868089 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5146871008939975, + "acc_stderr": 0.017872248024429122, + "acc_norm": 0.5146871008939975, + "acc_norm_stderr": 0.017872248024429122 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0314108219759624, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0314108219759624 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085328, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085328 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.043285772152629715, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.043285772152629715 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03156663099215416, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03156663099215416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767766, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767766 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969567, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.03000048544867599, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.03000048544867599 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.02339382650048488, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.02339382650048488 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637793, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637793 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02764847787741332, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02764847787741332 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03718489006818115, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03718489006818115 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.028036092273891776, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.028036092273891776 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.019450768432505518, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.019450768432505518 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005344, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005344 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.031591887529658504, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.031591887529658504 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30638852672750977, + "acc_stderr": 0.011773980329380708, + "acc_norm": 0.30638852672750977, + "acc_norm_stderr": 0.011773980329380708 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.034602283272391704, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.034602283272391704 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715008, + "mc2": 0.3934309385509067, + "mc2_stderr": 0.014616103785255416 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.01716818720142925, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.01707725413155622 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.10", + "model_sha": "4108403445d56ccc6adb1f1c4e3d4a9e50f1e95f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.11/result_2023-12-02 00:04:29.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.11/result_2023-12-02 00:04:29.json new file mode 100644 index 0000000000000000000000000000000000000000..1db123263727b67f02fc97e408f5e1918adae8b5 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.11/result_2023-12-02 00:04:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3856655290102389, + "acc_stderr": 0.014224250973257184, + "acc_norm": 0.45733788395904434, + "acc_norm_stderr": 0.014558106543924065 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4122684724158534, + "acc_stderr": 0.0049123700239130175, + "acc_norm": 0.5567616012746465, + "acc_norm_stderr": 0.004957524197900413 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5402298850574713, + "acc_stderr": 0.01782199409693354, + "acc_norm": 0.5402298850574713, + "acc_norm_stderr": 0.01782199409693354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101737, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101737 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.032219436365661956, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.032219436365661956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764194, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.033661244890514495, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.033661244890514495 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019413, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019413 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458006, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458006 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.0478200179138006, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.0478200179138006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489359, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489359 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5302752293577981, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.5302752293577981, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215923, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215923 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.0278079901413202, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.0278079901413202 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.031798763421768524, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.031798763421768524 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016643, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016643 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.011977676704715993, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.011977676704715993 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766372, + "mc2": 0.424161773343599, + "mc2_stderr": 0.01477730596837744 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44037780401416765, + "acc_stderr": 0.01706769977431298, + "acc_norm": 0.5631641086186541, + "acc_norm_stderr": 0.017052633559856065 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.11", + "model_sha": "11119fbc9382e06b75e210a028fc72307551a508", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.12/result_2023-12-02 00:04:34.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.12/result_2023-12-02 00:04:34.json new file mode 100644 index 0000000000000000000000000000000000000000..2968aa7e74af7f7cb6d912cdef150d641f6c51b6 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.12/result_2023-12-02 00:04:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38822525597269625, + "acc_stderr": 0.01424161420741405, + "acc_norm": 0.4539249146757679, + "acc_norm_stderr": 0.01454922110517187 + }, + "harness|ko_hellaswag|10": { + "acc": 0.416849233220474, + "acc_stderr": 0.004920298437884909, + "acc_norm": 0.5608444532961562, + "acc_norm_stderr": 0.004952698802275644 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.017867695938429774, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.017867695938429774 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.040287315329475604, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.040287315329475604 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236153, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236153 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.0305032920133426, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.0305032920133426 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360385, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360385 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918407, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918407 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4653179190751445, + "acc_stderr": 0.026854257928258882, + "acc_norm": 0.4653179190751445, + "acc_norm_stderr": 0.026854257928258882 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5229357798165137, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.5229357798165137, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928724, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928724 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.027870745278290313, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.027870745278290313 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.0387813988879761, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.0387813988879761 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215927, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215927 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534792, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534792 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190714, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353604, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353604 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.032007041833595914, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.032007041833595914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3194263363754889, + "acc_stderr": 0.011908357176756153, + "acc_norm": 0.3194263363754889, + "acc_norm_stderr": 0.011908357176756153 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.01557284045287583, + "mc2": 0.4373212814432078, + "mc2_stderr": 0.014894808317447994 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45218417945690675, + "acc_stderr": 0.017111567130916785, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.017019847535972202 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.12", + "model_sha": "98b594a5b23d281b5d562d9bca39cdb7bbcd5bed", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.13/result_2023-12-02 00:09:38.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.13/result_2023-12-02 00:09:38.json new file mode 100644 index 0000000000000000000000000000000000000000..6c9d85bea389fcc456a48bd46f3cfd87ce199f36 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.13/result_2023-12-02 00:09:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40187713310580203, + "acc_stderr": 0.014327268614578278, + "acc_norm": 0.45733788395904434, + "acc_norm_stderr": 0.014558106543924058 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42491535550687115, + "acc_stderr": 0.004933198776700267, + "acc_norm": 0.5699063931487751, + "acc_norm_stderr": 0.004940771559475496 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5351213282247765, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.5351213282247765, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.028386198084177673, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.028386198084177673 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993177, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.02498535492310231, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.02498535492310231 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.03125610824421881, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.03125610824421881 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275798, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275798 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.03528131472933607, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.03528131472933607 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.0233306540545359, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.0233306540545359 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5376146788990825, + "acc_stderr": 0.021376575274397576, + "acc_norm": 0.5376146788990825, + "acc_norm_stderr": 0.021376575274397576 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.02799672318063146, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.02799672318063146 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412243, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412243 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966727, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966727 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744858, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.01210681720306721, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.01210681720306721 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.034924061041636124, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.034924061041636124 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826828, + "mc2": 0.4144614046981997, + "mc2_stderr": 0.014804144568912091 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4887839433293979, + "acc_stderr": 0.017186028469489283, + "acc_norm": 0.5525383707201889, + "acc_norm_stderr": 0.01709519030150058 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.13", + "model_sha": "957a247ca02f1ff29ed75871a13f4f46f9672b62", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.21/result_2023-12-19 01:54:11.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.21/result_2023-12-19 01:54:11.json new file mode 100644 index 0000000000000000000000000000000000000000..79e286d627305029d80486cf3680b452cd97c044 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.21/result_2023-12-19 01:54:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40273037542662116, + "acc_stderr": 0.014332236306790152, + "acc_norm": 0.4684300341296928, + "acc_norm_stderr": 0.01458223646086698 + }, + "harness|ko_hellaswag|10": { + "acc": 0.420035849432384, + "acc_stderr": 0.004925556104679419, + "acc_norm": 0.5699063931487751, + "acc_norm_stderr": 0.0049407715594755 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.049486373240266356, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.049486373240266356 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5389527458492975, + "acc_stderr": 0.017825621793239006, + "acc_norm": 0.5389527458492975, + "acc_norm_stderr": 0.017825621793239006 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234355, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234355 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849734, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849734 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377906, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377906 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.0358701498607566, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.0358701498607566 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353985, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353985 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791434, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577454, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577454 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.030998666304560534, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.030998666304560534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.333116036505867, + "acc_stderr": 0.01203793045151205, + "acc_norm": 0.333116036505867, + "acc_norm_stderr": 0.01203793045151205 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156472, + "mc2": 0.40602040127616007, + "mc2_stderr": 0.014779225998703538 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45336481700118064, + "acc_stderr": 0.01711541822522687, + "acc_norm": 0.5596221959858324, + "acc_norm_stderr": 0.017067699774312984 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.21", + "model_sha": "0f7564ff085f7eedbefdd13b0e390b0eca29f9d9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.22/result_2023-12-18 04:57:44.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.22/result_2023-12-18 04:57:44.json new file mode 100644 index 0000000000000000000000000000000000000000..479409ec3b86eccc35b2e444a75be70f4b43db04 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.22/result_2023-12-18 04:57:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42406143344709896, + "acc_stderr": 0.014441889627464396, + "acc_norm": 0.48890784982935154, + "acc_norm_stderr": 0.014607794914013041 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44722166899024096, + "acc_stderr": 0.00496190494917139, + "acc_norm": 0.5941047600079665, + "acc_norm_stderr": 0.004900608529778609 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5402298850574713, + "acc_stderr": 0.01782199409693354, + "acc_norm": 0.5402298850574713, + "acc_norm_stderr": 0.01782199409693354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.031489558297455304, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.031489558297455304 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.02833327710956279, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.02833327710956279 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828065, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828065 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836928, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389184, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230193, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230193 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.0343751933733825, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.0343751933733825 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655812, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655812 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5688073394495413, + "acc_stderr": 0.021233365030319563, + "acc_norm": 0.5688073394495413, + "acc_norm_stderr": 0.021233365030319563 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626567, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626567 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.019861155193829163, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.019861155193829163 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952688, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952688 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.03324708911809117, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.03324708911809117 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.01457265038340916, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.01457265038340916 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254167, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254167 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5387755102040817, + "acc_stderr": 0.03191282052669278, + "acc_norm": 0.5387755102040817, + "acc_norm_stderr": 0.03191282052669278 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301854, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301854 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3494132985658409, + "acc_stderr": 0.012177306252786683, + "acc_norm": 0.3494132985658409, + "acc_norm_stderr": 0.012177306252786683 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.03843566993588717, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.03843566993588717 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33414932680538556, + "mc1_stderr": 0.016512530677150517, + "mc2": 0.5007120389559494, + "mc2_stderr": 0.015469844656182272 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46871310507674147, + "acc_stderr": 0.01715666685978546, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.0171427361176433 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.22", + "model_sha": "d097a2107108c56e1e64d56df2650ad1005f15a6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.23/result_2023-12-18 06:20:23.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.23/result_2023-12-18 06:20:23.json new file mode 100644 index 0000000000000000000000000000000000000000..d1d51d3ac72ef9adaec04c52fcd828a23545ec50 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.23/result_2023-12-18 06:20:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41467576791808874, + "acc_stderr": 0.014397070564409172, + "acc_norm": 0.4735494880546075, + "acc_norm_stderr": 0.014590931358120163 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44503087034455285, + "acc_stderr": 0.004959535443170612, + "acc_norm": 0.6004779924317865, + "acc_norm_stderr": 0.004887991225950278 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5632183908045977, + "acc_stderr": 0.01773647083780069, + "acc_norm": 0.5632183908045977, + "acc_norm_stderr": 0.01773647083780069 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806231, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806231 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617748, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617748 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650776, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650776 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009794, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009794 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230182, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230182 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101806, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101806 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5578034682080925, + "acc_stderr": 0.0267386036438074, + "acc_norm": 0.5578034682080925, + "acc_norm_stderr": 0.0267386036438074 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.03919415545048411, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.03919415545048411 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5246913580246914, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.5246913580246914, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5798165137614679, + "acc_stderr": 0.02116242004827351, + "acc_norm": 0.5798165137614679, + "acc_norm_stderr": 0.02116242004827351 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.01992211568278667, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.01992211568278667 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176851, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176851 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396587, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.03197694118713671, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.03197694118713671 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702358, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702358 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35723598435462844, + "acc_stderr": 0.012238615750316496, + "acc_norm": 0.35723598435462844, + "acc_norm_stderr": 0.012238615750316496 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.47413164678266917, + "mc2_stderr": 0.015081026411493382 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4923258559622196, + "acc_stderr": 0.01718832921965428, + "acc_norm": 0.5584415584415584, + "acc_norm_stderr": 0.017072525875563106 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.23", + "model_sha": "d67bdf521bd71da0aa1c5070c4f0dc89e988344c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.24/result_2023-12-18 23:59:33.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.24/result_2023-12-18 23:59:33.json new file mode 100644 index 0000000000000000000000000000000000000000..4fe52bc529cdbf84585734f1bc8f1dce5be1851a --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.24/result_2023-12-18 23:59:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42406143344709896, + "acc_stderr": 0.014441889627464396, + "acc_norm": 0.4880546075085324, + "acc_norm_stderr": 0.014607220340597171 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4508066122286397, + "acc_stderr": 0.00496557224680386, + "acc_norm": 0.6041625174268074, + "acc_norm_stderr": 0.004880303863138504 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5402298850574713, + "acc_stderr": 0.01782199409693354, + "acc_norm": 0.5402298850574713, + "acc_norm_stderr": 0.01782199409693354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.02506909438729654, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.02506909438729654 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962956, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047736, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047736 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03981240543717861, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03981240543717861 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.039158572914369714, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.039158572914369714 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5, + "acc_stderr": 0.02782074420373286, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02782074420373286 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.0358701498607566, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.0358701498607566 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5688073394495413, + "acc_stderr": 0.021233365030319563, + "acc_norm": 0.5688073394495413, + "acc_norm_stderr": 0.021233365030319563 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.028452639985088006, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.028452639985088006 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449845, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449845 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767867, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767867 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2977941176470588, + "acc_stderr": 0.027778298701545443, + "acc_norm": 0.2977941176470588, + "acc_norm_stderr": 0.027778298701545443 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.03189141832421396, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.03189141832421396 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.03058732629470236, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.03058732629470236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741515, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741515 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35006119951040393, + "mc1_stderr": 0.01669794942015103, + "mc2": 0.5095614732032704, + "mc2_stderr": 0.01563779443660761 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.017182864434998567, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.24", + "model_sha": "e6d5f42930c2e1d2310474735a5358c546f767ce", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.25/result_2023-12-19 02:06:59.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.25/result_2023-12-19 02:06:59.json new file mode 100644 index 0000000000000000000000000000000000000000..0c6d57612f70907d66bb596a4be92fe71b50b708 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.25/result_2023-12-19 02:06:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41723549488054607, + "acc_stderr": 0.014409825518403079, + "acc_norm": 0.48464163822525597, + "acc_norm_stderr": 0.014604496129394915 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4494124676359291, + "acc_stderr": 0.004964177035221415, + "acc_norm": 0.6041625174268074, + "acc_norm_stderr": 0.004880303863138502 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299798, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299798 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.02833327710956279, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.02833327710956279 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638627, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638627 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.02506909438729654, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.02506909438729654 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568392, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568392 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138653, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138653 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.030365050829115208, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.030365050829115208 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.02663653974111609, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.02663653974111609 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5853211009174312, + "acc_stderr": 0.021122903208602592, + "acc_norm": 0.5853211009174312, + "acc_norm_stderr": 0.021122903208602592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777473, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777473 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.01988622103750188, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.01988622103750188 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639882, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639882 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176851, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176851 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824845, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824845 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.03197694118713671, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.03197694118713671 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03068582059661081, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03068582059661081 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3474576271186441, + "acc_stderr": 0.012161417729749806, + "acc_norm": 0.3474576271186441, + "acc_norm_stderr": 0.012161417729749806 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.03804913653971011, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.03804913653971011 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31701346389228885, + "mc1_stderr": 0.016289203374403392, + "mc2": 0.4867878475809202, + "mc2_stderr": 0.015343742215624163 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46989374262101535, + "acc_stderr": 0.017159163590170216, + "acc_norm": 0.5395513577331759, + "acc_norm_stderr": 0.017136487626049846 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.25", + "model_sha": "677d2c24efb0f3d0568944dfde58795cbb21b16b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.3/result_2023-11-30 11:57:48.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.3/result_2023-11-30 11:57:48.json new file mode 100644 index 0000000000000000000000000000000000000000..3c4ae177764c7c1bfc44b01a88ebe0a716623f2a --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.3/result_2023-11-30 11:57:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3728668941979522, + "acc_stderr": 0.014131176760131174, + "acc_norm": 0.4462457337883959, + "acc_norm_stderr": 0.014526705548539982 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41794463254331804, + "acc_stderr": 0.004922129568919583, + "acc_norm": 0.5683130850428202, + "acc_norm_stderr": 0.004942990623131124 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.51213282247765, + "acc_stderr": 0.017874698667491338, + "acc_norm": 0.51213282247765, + "acc_norm_stderr": 0.017874698667491338 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.02825666072336018, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.02825666072336018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.03355746535223264, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.03355746535223264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.043171711948702535, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.043171711948702535 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.025124653525885138, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.025124653525885138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.030197611600197953, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.030197611600197953 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.02719593480408563, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.02719593480408563 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159665, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159665 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.023000086859068642, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.023000086859068642 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4653179190751445, + "acc_stderr": 0.026854257928258875, + "acc_norm": 0.4653179190751445, + "acc_norm_stderr": 0.026854257928258875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.021436420955529428, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.021436420955529428 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147125, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147125 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424506, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424506 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.03977749934622074, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.03977749934622074 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.01939305840235544, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.01939305840235544 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053479, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053479 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687758, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687758 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.031912820526692774, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.031912820526692774 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.032335327775334835, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.032335327775334835 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3161668839634941, + "acc_stderr": 0.011875780894386578, + "acc_norm": 0.3161668839634941, + "acc_norm_stderr": 0.011875780894386578 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485083, + "mc2": 0.3961332596017898, + "mc2_stderr": 0.01470715664536681 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4946871310507674, + "acc_stderr": 0.017189383627229687, + "acc_norm": 0.5596221959858324, + "acc_norm_stderr": 0.01706769977431297 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.3", + "model_sha": "edba15648cad18bb50c8f586e984742dfa7609e1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.4/result_2023-11-30 13:50:44.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.4/result_2023-11-30 13:50:44.json new file mode 100644 index 0000000000000000000000000000000000000000..49969c198b62a1e3ba9c8faf5b955ebcec08396d --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.4/result_2023-11-30 13:50:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.014212444980651889, + "acc_norm": 0.4402730375426621, + "acc_norm_stderr": 0.014506769524804243 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4217287392949612, + "acc_stderr": 0.004928263494616731, + "acc_norm": 0.5674168492332204, + "acc_norm_stderr": 0.00494421593702139 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49680715197956576, + "acc_stderr": 0.017879598945933068, + "acc_norm": 0.49680715197956576, + "acc_norm_stderr": 0.017879598945933068 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357773, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357773 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.03521224908841583, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.03521224908841583 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424385, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424385 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03156663099215416, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03156663099215416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.03295797566311271, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.03295797566311271 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5811965811965812, + "acc_stderr": 0.03232128912157792, + "acc_norm": 0.5811965811965812, + "acc_norm_stderr": 0.03232128912157792 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.03000048544867599, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.03000048544867599 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.03567603799639172, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.03567603799639172 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.0233306540545359, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.0233306540545359 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.026830805998952243, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.026830805998952243 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.038258255488486076, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.038258255488486076 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.027339546640662737, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.027339546640662737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40414507772020725, + "acc_stderr": 0.03541508578884019, + "acc_norm": 0.40414507772020725, + "acc_norm_stderr": 0.03541508578884019 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44770642201834865, + "acc_stderr": 0.021319754962425462, + "acc_norm": 0.44770642201834865, + "acc_norm_stderr": 0.021319754962425462 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.02824513402438729, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.02824513402438729 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.01933314202079706, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.01933314202079706 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983583, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983583 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.303129074315515, + "acc_stderr": 0.011738669951254296, + "acc_norm": 0.303129074315515, + "acc_norm_stderr": 0.011738669951254296 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.4115946865899359, + "mc2_stderr": 0.014692840096098678 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.512396694214876, + "acc_stderr": 0.01718506973267654, + "acc_norm": 0.577331759149941, + "acc_norm_stderr": 0.016983506079577607 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.4", + "model_sha": "b750a1bafd65119569927ea34d464a6c707a433a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.5/result_2023-11-30 16:16:06.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.5/result_2023-11-30 16:16:06.json new file mode 100644 index 0000000000000000000000000000000000000000..69a7aef17fcc9e1fef571de4beea3c0c3cfda087 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.5/result_2023-11-30 16:16:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39334470989761094, + "acc_stderr": 0.01427510146569302, + "acc_norm": 0.4590443686006826, + "acc_norm_stderr": 0.01456229107360123 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43995220075682134, + "acc_stderr": 0.0049536670286543846, + "acc_norm": 0.5958972316271659, + "acc_norm_stderr": 0.004897146690596255 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.36893203883495146, + "acc_stderr": 0.04777615181156739, + "acc_norm": 0.36893203883495146, + "acc_norm_stderr": 0.04777615181156739 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4942528735632184, + "acc_stderr": 0.017878782326129234, + "acc_norm": 0.4942528735632184, + "acc_norm_stderr": 0.017878782326129234 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.02825666072336018, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.02825666072336018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.04260735157644559, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.04260735157644559 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.039215453124671215, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.039215453124671215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.031753678460966245, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.031753678460966245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37948717948717947, + "acc_stderr": 0.024603626924097417, + "acc_norm": 0.37948717948717947, + "acc_norm_stderr": 0.024603626924097417 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3967741935483871, + "acc_stderr": 0.02783123160576794, + "acc_norm": 0.3967741935483871, + "acc_norm_stderr": 0.02783123160576794 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.03271298896811159, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.03271298896811159 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.030102793781791194, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.030102793781791194 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48258706467661694, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.48258706467661694, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.023000086859068642, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.023000086859068642 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.026483392042098177, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.026483392042098177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899616, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899616 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005135, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005135 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47522935779816516, + "acc_stderr": 0.02141099975363592, + "acc_norm": 0.47522935779816516, + "acc_norm_stderr": 0.02141099975363592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283693, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283693 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.019542101564854125, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.019542101564854125 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022135, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022135 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.030388051301678116, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.030388051301678116 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681404, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681404 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.031251275910891656, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.031251275910891656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28748370273794005, + "acc_stderr": 0.011559337355708509, + "acc_norm": 0.28748370273794005, + "acc_norm_stderr": 0.011559337355708509 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.03434131164719129, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.03434131164719129 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.4538096352988952, + "mc2_stderr": 0.015290893328767008 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4344746162927981, + "acc_stderr": 0.017042098620824935, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.017175671279836446 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.5", + "model_sha": "4f3438e97f69f93269a2f78e6678647d45dd0e47", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.8/result_2023-12-01 15:29:09.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.8/result_2023-12-01 15:29:09.json new file mode 100644 index 0000000000000000000000000000000000000000..7445ba35d961d3baaad39aab9589d5e68e95303b --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.8/result_2023-12-01 15:29:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38054607508532423, + "acc_stderr": 0.014188277712349812, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955262 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4138617805218084, + "acc_stderr": 0.0049151774069562575, + "acc_norm": 0.5646285600477993, + "acc_norm_stderr": 0.00494792269268884 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5236270753512133, + "acc_stderr": 0.017859989765176453, + "acc_norm": 0.5236270753512133, + "acc_norm_stderr": 0.017859989765176453 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489425, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489425 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177505, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177505 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.033442837442804574, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.033442837442804574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.02804098138076155, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.02804098138076155 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.02339382650048487, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.02339382650048487 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.026772990653361823, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.026772990653361823 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489358, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489358 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47339449541284406, + "acc_stderr": 0.021406952688151577, + "acc_norm": 0.47339449541284406, + "acc_norm_stderr": 0.021406952688151577 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.019162418588623553, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.019162418588623553 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503796, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503796 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.040598672469526864, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.040598672469526864 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312548, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312548 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.03241920684693335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.03241920684693335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2940026075619296, + "acc_stderr": 0.011636062953698607, + "acc_norm": 0.2940026075619296, + "acc_norm_stderr": 0.011636062953698607 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.034602283272391704, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.034602283272391704 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087297, + "mc2": 0.4221992902902898, + "mc2_stderr": 0.014789127497911234 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42266824085005905, + "acc_stderr": 0.016983506079577604, + "acc_norm": 0.5430932703659976, + "acc_norm_stderr": 0.017126389093086784 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.8", + "model_sha": "101fcd5b704a9994471805741f9da3f7f4959088", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.9/result_2023-12-01 15:26:25.json b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.9/result_2023-12-01 15:26:25.json new file mode 100644 index 0000000000000000000000000000000000000000..b07403e951fa83c14f77c31d8c2faddf33599838 --- /dev/null +++ b/inswave/AISquare-Instruct-llama2-koen-13b-v0.9.9/result_2023-12-01 15:26:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.363481228668942, + "acc_stderr": 0.014056207319068287, + "acc_norm": 0.43430034129692835, + "acc_norm_stderr": 0.014484703048857359 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4124676359290978, + "acc_stderr": 0.004912723848944788, + "acc_norm": 0.5605457080262896, + "acc_norm_stderr": 0.004953063404791439 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041694, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041694 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5057471264367817, + "acc_stderr": 0.01787878232612923, + "acc_norm": 0.5057471264367817, + "acc_norm_stderr": 0.01787878232612923 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4494949494949495, + "acc_stderr": 0.03544132491947969, + "acc_norm": 0.4494949494949495, + "acc_norm_stderr": 0.03544132491947969 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03156663099215416, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03156663099215416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767766, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767766 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969567, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3709677419354839, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.3709677419354839, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.0355068398916558, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.0355068398916558 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.026803720583206177, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.026803720583206177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4104938271604938, + "acc_stderr": 0.027371350925124768, + "acc_norm": 0.4104938271604938, + "acc_norm_stderr": 0.027371350925124768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39896373056994816, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.39896373056994816, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43302752293577984, + "acc_stderr": 0.021244146569074338, + "acc_norm": 0.43302752293577984, + "acc_norm_stderr": 0.021244146569074338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.027684181883302898, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.027684181883302898 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.0378272898086547, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.0378272898086547 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223977, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223977 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.029886910547626978, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.029886910547626978 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28552803129074317, + "acc_stderr": 0.01153575158666565, + "acc_norm": 0.28552803129074317, + "acc_norm_stderr": 0.01153575158666565 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.03402272044340705, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.03402272044340705 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766373, + "mc2": 0.41064069519153584, + "mc2_stderr": 0.014727550409349975 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42384887839433294, + "acc_stderr": 0.01698981083462825, + "acc_norm": 0.538370720188902, + "acc_norm_stderr": 0.01713966022184555 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-llama2-koen-13b-v0.9.9", + "model_sha": "7d87974397be753ca5759d09c0688cc126becb31", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-yi-ko-6b-v0.9.16/result_2023-12-12 06:49:39.json b/inswave/AISquare-Instruct-yi-ko-6b-v0.9.16/result_2023-12-12 06:49:39.json new file mode 100644 index 0000000000000000000000000000000000000000..000116332a65b1ed06f5664a61df558f2c2049d4 --- /dev/null +++ b/inswave/AISquare-Instruct-yi-ko-6b-v0.9.16/result_2023-12-12 06:49:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36945392491467577, + "acc_stderr": 0.014104578366491887, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.014494421584256524 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4115714001194981, + "acc_stderr": 0.0049111251010646425, + "acc_norm": 0.5487950607448715, + "acc_norm_stderr": 0.004965963647210317 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.565772669220945, + "acc_stderr": 0.01772458938967779, + "acc_norm": 0.565772669220945, + "acc_norm_stderr": 0.01772458938967779 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04316378599511326, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04316378599511326 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016337, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016337 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179327, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179327 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115007, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.028434533152681848, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.028434533152681848 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.03077265364207567, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.03077265364207567 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.02397386199899207, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.02397386199899207 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6, + "acc_stderr": 0.021004201260420078, + "acc_norm": 0.6, + "acc_norm_stderr": 0.021004201260420078 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604674, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604674 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.019933627776857428, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.019933627776857428 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176852, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176852 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32920469361147325, + "acc_stderr": 0.012002091666902307, + "acc_norm": 0.32920469361147325, + "acc_norm_stderr": 0.012002091666902307 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.038517163193983926, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.038517163193983926 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777308, + "mc2": 0.4369384111906916, + "mc2_stderr": 0.01525557244220662 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5360094451003542, + "acc_stderr": 0.01714571536548667, + "acc_norm": 0.5726092089728453, + "acc_norm_stderr": 0.017008129844823156 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-yi-ko-6b-v0.9.16", + "model_sha": "cbec29938730e1d3ac36c931bd5b1ee275d3dae0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/inswave/AISquare-Instruct-yi-ko-6b-v0.9.26/result_2023-12-21 01:20:21.json b/inswave/AISquare-Instruct-yi-ko-6b-v0.9.26/result_2023-12-21 01:20:21.json new file mode 100644 index 0000000000000000000000000000000000000000..b210c4c83d06bae9b36456860b2275983a065835 --- /dev/null +++ b/inswave/AISquare-Instruct-yi-ko-6b-v0.9.26/result_2023-12-21 01:20:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36177474402730375, + "acc_stderr": 0.014041957945038078, + "acc_norm": 0.4308873720136519, + "acc_norm_stderr": 0.014471133392642468 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40798645688109936, + "acc_stderr": 0.004904561795919, + "acc_norm": 0.5443138816968731, + "acc_norm_stderr": 0.004970145708187995 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5849297573435505, + "acc_stderr": 0.01762013700365528, + "acc_norm": 0.5849297573435505, + "acc_norm_stderr": 0.01762013700365528 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6414141414141414, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.6414141414141414, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933907, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933907 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942656, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.0343751933733825, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.0343751933733825 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425086, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425086 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.02680372058320617, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.02680372058320617 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6055045871559633, + "acc_stderr": 0.020954642108587492, + "acc_norm": 0.6055045871559633, + "acc_norm_stderr": 0.020954642108587492 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.04065771002562603, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.04065771002562603 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.019821843688271765, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.019821843688271765 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291517, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291517 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289804, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289804 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364545, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364545 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121596, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301847, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301847 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32985658409387225, + "acc_stderr": 0.012008129938540476, + "acc_norm": 0.32985658409387225, + "acc_norm_stderr": 0.012008129938540476 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.034924061041636124, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.034924061041636124 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.038517163193983926, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.038517163193983926 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32313341493268055, + "mc1_stderr": 0.016371836286454604, + "mc2": 0.4670658990793913, + "mc2_stderr": 0.01522338794267629 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5631641086186541, + "acc_stderr": 0.017052633559856065, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.0168194386429714 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "inswave/AISquare-Instruct-yi-ko-6b-v0.9.26", + "model_sha": "3e0b1aaecaf0b1ca18382f799245a65f79177a21", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/invalid-coder/Sakura-SOLAR-Instruct-CarbonVillain-en-10.7B-v2-slerp/result_2024-05-16 06:09:15.json b/invalid-coder/Sakura-SOLAR-Instruct-CarbonVillain-en-10.7B-v2-slerp/result_2024-05-16 06:09:15.json new file mode 100644 index 0000000000000000000000000000000000000000..3be03f91ad038b3ee6ad4fd6bd95c094dbf9ac19 --- /dev/null +++ b/invalid-coder/Sakura-SOLAR-Instruct-CarbonVillain-en-10.7B-v2-slerp/result_2024-05-16 06:09:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3856655290102389, + "acc_stderr": 0.014224250973257187, + "acc_norm": 0.4786689419795222, + "acc_norm_stderr": 0.01459808797312711 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40151364270065726, + "acc_stderr": 0.004892026457294714, + "acc_norm": 0.5388368850826528, + "acc_norm_stderr": 0.004974706428434288 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.0380579750559046, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.0380579750559046 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.01757070523925659, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.01757070523925659 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.028173917761762906, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.028173917761762906 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123005, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123005 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6767676767676768, + "acc_stderr": 0.033322999210706444, + "acc_norm": 0.6767676767676768, + "acc_norm_stderr": 0.033322999210706444 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.041641887201693775, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.041641887201693775 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.03228410626716391, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.03228410626716391 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5153846153846153, + "acc_stderr": 0.025339003010106505, + "acc_norm": 0.5153846153846153, + "acc_norm_stderr": 0.025339003010106505 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5290322580645161, + "acc_stderr": 0.028396016402761, + "acc_norm": 0.5290322580645161, + "acc_norm_stderr": 0.028396016402761 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.03794012674697029, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.03794012674697029 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137282, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137282 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.02663653974111609, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.02663653974111609 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.027339546640662727, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.027339546640662727 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.689119170984456, + "acc_stderr": 0.03340361906276586, + "acc_norm": 0.689119170984456, + "acc_norm_stderr": 0.03340361906276586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6275229357798165, + "acc_stderr": 0.0207283684576385, + "acc_norm": 0.6275229357798165, + "acc_norm_stderr": 0.0207283684576385 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874143, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874143 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.020206653187884786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.020206653187884786 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281288, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281288 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.03385177976044809, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.03385177976044809 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.33743016759776534, + "acc_stderr": 0.015813901283913048, + "acc_norm": 0.33743016759776534, + "acc_norm_stderr": 0.015813901283913048 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.0303720158854282, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.0303720158854282 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.03086214492108755, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.03086214492108755 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702368, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702368 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39113428943937417, + "acc_stderr": 0.01246386183998206, + "acc_norm": 0.39113428943937417, + "acc_norm_stderr": 0.01246386183998206 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165633, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165633 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3659730722154223, + "mc1_stderr": 0.01686294168408835, + "mc2": 0.5281543081904363, + "mc2_stderr": 0.016461053601800883 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4899645808736718, + "acc_stderr": 0.01718689128689406, + "acc_norm": 0.4911452184179457, + "acc_norm_stderr": 0.01718765819933674 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "invalid-coder/Sakura-SOLAR-Instruct-CarbonVillain-en-10.7B-v2-slerp", + "model_sha": "39a1c76ddb5fa3a82c5b4071121d2e4866a25300", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jaekwanyda/Yi-Ko-6B_KO_Open-Platypus/result_2023-12-29 06:19:39.json b/jaekwanyda/Yi-Ko-6B_KO_Open-Platypus/result_2023-12-29 06:19:39.json new file mode 100644 index 0000000000000000000000000000000000000000..7f6c96fcbba07c0c855294bea38d3342be670e92 --- /dev/null +++ b/jaekwanyda/Yi-Ko-6B_KO_Open-Platypus/result_2023-12-29 06:19:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3395904436860068, + "acc_stderr": 0.013839039762820167, + "acc_norm": 0.40017064846416384, + "acc_norm_stderr": 0.014317197787809174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39543915554670384, + "acc_stderr": 0.004879455474663811, + "acc_norm": 0.530870344552878, + "acc_norm_stderr": 0.0049802620254724775 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5402298850574713, + "acc_stderr": 0.017821994096933535, + "acc_norm": 0.5402298850574713, + "acc_norm_stderr": 0.017821994096933535 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.02834504586484062, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.02834504586484062 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182087, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182087 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461227, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461227 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.033442837442804574, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.033442837442804574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556552, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556552 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.034457899643627485, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.034457899643627485 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.024026846392873502, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.024026846392873502 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539284, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539284 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5724770642201835, + "acc_stderr": 0.021210910204300434, + "acc_norm": 0.5724770642201835, + "acc_norm_stderr": 0.021210910204300434 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924316, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528784, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528784 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061173, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061173 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022128, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467761, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467761 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210746, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210746 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3224489795918367, + "acc_stderr": 0.029923100563683906, + "acc_norm": 0.3224489795918367, + "acc_norm_stderr": 0.029923100563683906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3122555410691004, + "acc_stderr": 0.01183579813568318, + "acc_norm": 0.3122555410691004, + "acc_norm_stderr": 0.01183579813568318 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606787, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606787 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522512, + "mc2": 0.4100825053117308, + "mc2_stderr": 0.014781636083926547 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4923258559622196, + "acc_stderr": 0.017188329219654287, + "acc_norm": 0.5962219598583235, + "acc_norm_stderr": 0.01686903154029863 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jaekwanyda/Yi-Ko-6B_KO_Open-Platypus", + "model_sha": "0e85d36838b09082b433d619c93744245219e9bf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jb723/3B_test_model/result_2023-11-21 05:17:41.json b/jb723/3B_test_model/result_2023-11-21 05:17:41.json new file mode 100644 index 0000000000000000000000000000000000000000..53e1c4df046e07347fb72e7f800ca39c58f41479 --- /dev/null +++ b/jb723/3B_test_model/result_2023-11-21 05:17:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19368600682593856, + "acc_stderr": 0.01154842540997854, + "acc_norm": 0.2627986348122867, + "acc_norm_stderr": 0.012862523175351333 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2524397530372436, + "acc_stderr": 0.0043352434344868275, + "acc_norm": 0.26030671181039633, + "acc_norm_stderr": 0.004379051357024134 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2046783625730994, + "acc_stderr": 0.03094445977853321, + "acc_norm": 0.2046783625730994, + "acc_norm_stderr": 0.03094445977853321 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28735632183908044, + "acc_stderr": 0.0161824107306827, + "acc_norm": 0.28735632183908044, + "acc_norm_stderr": 0.0161824107306827 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386687, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386687 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.21864951768488747, + "acc_stderr": 0.023475581417861106, + "acc_norm": 0.21864951768488747, + "acc_norm_stderr": 0.023475581417861106 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.032361983509282745, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.032361983509282745 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863807, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863807 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.258974358974359, + "acc_stderr": 0.02221110681006167, + "acc_norm": 0.258974358974359, + "acc_norm_stderr": 0.02221110681006167 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22660098522167488, + "acc_stderr": 0.02945486383529295, + "acc_norm": 0.22660098522167488, + "acc_norm_stderr": 0.02945486383529295 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24838709677419354, + "acc_stderr": 0.024580028921480996, + "acc_norm": 0.24838709677419354, + "acc_norm_stderr": 0.024580028921480996 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.20085470085470086, + "acc_stderr": 0.02624677294689048, + "acc_norm": 0.20085470085470086, + "acc_norm_stderr": 0.02624677294689048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.02674989977124124, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.02674989977124124 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.19090909090909092, + "acc_stderr": 0.03764425585984926, + "acc_norm": 0.19090909090909092, + "acc_norm_stderr": 0.03764425585984926 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.19205298013245034, + "acc_stderr": 0.032162984205936135, + "acc_norm": 0.19205298013245034, + "acc_norm_stderr": 0.032162984205936135 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.0321473730202947, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.0321473730202947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2630057803468208, + "acc_stderr": 0.023703099525258172, + "acc_norm": 0.2630057803468208, + "acc_norm_stderr": 0.023703099525258172 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24382716049382716, + "acc_stderr": 0.023891879541959614, + "acc_norm": 0.24382716049382716, + "acc_norm_stderr": 0.023891879541959614 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.024170840879341016, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.024170840879341016 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.016729937565537537, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.016729937565537537 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2245810055865922, + "acc_stderr": 0.01395680366654464, + "acc_norm": 0.2245810055865922, + "acc_norm_stderr": 0.01395680366654464 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174934, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174934 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.5049262297308551, + "mc2_stderr": 0.01678411384401745 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08736717827626919, + "acc_stderr": 0.009708162004168805, + "acc_norm": 0.2833530106257379, + "acc_norm_stderr": 0.015492852084597239 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jb723/3B_test_model", + "model_sha": "1a19c136fddd374cf68262eeb647d42f36626495", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jb723/LLaMA2-en-ko-7B-model/result_2023-10-26 04:11:34.json b/jb723/LLaMA2-en-ko-7B-model/result_2023-10-26 04:11:34.json new file mode 100644 index 0000000000000000000000000000000000000000..7fcc671852bd04a432237153b5ab61c8140b7b12 --- /dev/null +++ b/jb723/LLaMA2-en-ko-7B-model/result_2023-10-26 04:11:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20051194539249148, + "acc_stderr": 0.011700318050499354, + "acc_norm": 0.24573378839590443, + "acc_norm_stderr": 0.012581033453730099 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2593108942441745, + "acc_stderr": 0.004373608212561024, + "acc_norm": 0.2818163712407887, + "acc_norm_stderr": 0.004489648865080873 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03565079670708311, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03565079670708311 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2796934865900383, + "acc_stderr": 0.016050792148036536, + "acc_norm": 0.2796934865900383, + "acc_norm_stderr": 0.016050792148036536 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2, + "acc_stderr": 0.034554737023254366, + "acc_norm": 0.2, + "acc_norm_stderr": 0.034554737023254366 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.030251237579213167, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.030251237579213167 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370519, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370519 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3054662379421222, + "acc_stderr": 0.026160584450140485, + "acc_norm": 0.3054662379421222, + "acc_norm_stderr": 0.026160584450140485 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.03021683101150877, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.03021683101150877 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.031156269519646836, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.031156269519646836 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.32413793103448274, + "acc_stderr": 0.03900432069185554, + "acc_norm": 0.32413793103448274, + "acc_norm_stderr": 0.03900432069185554 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416542, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416542 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2773109243697479, + "acc_stderr": 0.029079374539480007, + "acc_norm": 0.2773109243697479, + "acc_norm_stderr": 0.029079374539480007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.258974358974359, + "acc_stderr": 0.022211106810061672, + "acc_norm": 0.258974358974359, + "acc_norm_stderr": 0.022211106810061672 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.030315099285617722, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.030315099285617722 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.025649381063029265, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.025649381063029265 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3034188034188034, + "acc_stderr": 0.030118210106942662, + "acc_norm": 0.3034188034188034, + "acc_norm_stderr": 0.030118210106942662 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.02804918631569525, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.02804918631569525 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945266, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945266 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31840796019900497, + "acc_stderr": 0.032941184790540944, + "acc_norm": 0.31840796019900497, + "acc_norm_stderr": 0.032941184790540944 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641143, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641143 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2976878612716763, + "acc_stderr": 0.024617055388677003, + "acc_norm": 0.2976878612716763, + "acc_norm_stderr": 0.024617055388677003 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724146, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724146 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25617283950617287, + "acc_stderr": 0.0242885336377261, + "acc_norm": 0.25617283950617287, + "acc_norm_stderr": 0.0242885336377261 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29015544041450775, + "acc_stderr": 0.03275264467791516, + "acc_norm": 0.29015544041450775, + "acc_norm_stderr": 0.03275264467791516 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299102, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299102 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02564686309713791, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02564686309713791 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3305785123966942, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312337, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312337 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.017986615304030312, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.017986615304030312 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467764, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467764 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.028353212866863434, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.028353212866863434 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.026679252270103124, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.026679252270103124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.027212835884073146, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.027212835884073146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658342, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658342 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.011005971399927227, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.011005971399927227 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02910225438967409, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02910225438967409 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23378212974296206, + "mc1_stderr": 0.014816195991931586, + "mc2": 0.4292237253037698, + "mc2_stderr": 0.016355958546968995 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.11452184179456906, + "acc_stderr": 0.010948330698808925, + "acc_norm": 0.1959858323494687, + "acc_norm_stderr": 0.013647685567768858 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jb723/LLaMA2-en-ko-7B-model", + "model_sha": "24e455bbf4039f360a37833583c335582d2c6030", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jb723/LLaMA2_crosslingual_transfer_1/result_2023-10-26 05:14:36.json b/jb723/LLaMA2_crosslingual_transfer_1/result_2023-10-26 05:14:36.json new file mode 100644 index 0000000000000000000000000000000000000000..b8859f59612c4c8e7b9bf1fc7899f6e6e6d6b596 --- /dev/null +++ b/jb723/LLaMA2_crosslingual_transfer_1/result_2023-10-26 05:14:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2226962457337884, + "acc_stderr": 0.012158314774829931, + "acc_norm": 0.2687713310580205, + "acc_norm_stderr": 0.012955065963710695 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2657837084246166, + "acc_stderr": 0.004408468107262734, + "acc_norm": 0.2920732921728739, + "acc_norm_stderr": 0.004537865171414028 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.03722965741385539, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.03722965741385539 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.0398913985953177, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.0398913985953177 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.280970625798212, + "acc_stderr": 0.016073127851221232, + "acc_norm": 0.280970625798212, + "acc_norm_stderr": 0.016073127851221232 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.0335567721631314, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.0335567721631314 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.02895734278834235, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.02895734278834235 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.03484331592680588, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.03484331592680588 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2315112540192926, + "acc_stderr": 0.023956532766639133, + "acc_norm": 0.2315112540192926, + "acc_norm_stderr": 0.023956532766639133 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416827, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416827 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3282442748091603, + "acc_stderr": 0.04118438565806298, + "acc_norm": 0.3282442748091603, + "acc_norm_stderr": 0.04118438565806298 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179964, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179964 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2689075630252101, + "acc_stderr": 0.028801392193631276, + "acc_norm": 0.2689075630252101, + "acc_norm_stderr": 0.028801392193631276 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.22564102564102564, + "acc_stderr": 0.021193632525148533, + "acc_norm": 0.22564102564102564, + "acc_norm_stderr": 0.021193632525148533 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18719211822660098, + "acc_stderr": 0.027444924966882618, + "acc_norm": 0.18719211822660098, + "acc_norm_stderr": 0.027444924966882618 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.02528441611490016, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.02528441611490016 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.37606837606837606, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.37606837606837606, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782855, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782855 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.1962962962962963, + "acc_stderr": 0.024217421327417162, + "acc_norm": 0.1962962962962963, + "acc_norm_stderr": 0.024217421327417162 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.03445406271987054, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.03445406271987054 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339191, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339191 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1907514450867052, + "acc_stderr": 0.029957851329869327, + "acc_norm": 0.1907514450867052, + "acc_norm_stderr": 0.029957851329869327 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.020742740560122666, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.020742740560122666 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2976878612716763, + "acc_stderr": 0.024617055388676985, + "acc_norm": 0.2976878612716763, + "acc_norm_stderr": 0.024617055388676985 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.033519538795212696, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.033519538795212696 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.024836057868294677, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.024836057868294677 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845415, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845415 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.038351539543994194, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.038351539543994194 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22385321100917432, + "acc_stderr": 0.017871217767790215, + "acc_norm": 0.22385321100917432, + "acc_norm_stderr": 0.017871217767790215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2809917355371901, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882924, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.017776947157528044, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.017776947157528044 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290392, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290392 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.02746740180405799, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.02746740180405799 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20955882352941177, + "acc_stderr": 0.02472311040767704, + "acc_norm": 0.20955882352941177, + "acc_norm_stderr": 0.02472311040767704 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2571428571428571, + "acc_stderr": 0.02797982353874455, + "acc_norm": 0.2571428571428571, + "acc_norm_stderr": 0.02797982353874455 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31645569620253167, + "acc_stderr": 0.03027497488021897, + "acc_norm": 0.31645569620253167, + "acc_norm_stderr": 0.03027497488021897 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.20098039215686275, + "acc_stderr": 0.028125972265654362, + "acc_norm": 0.20098039215686275, + "acc_norm_stderr": 0.028125972265654362 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.03287666758603488, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.03287666758603488 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.49334428566474076, + "mc2_stderr": 0.016873715132849066 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08146399055489964, + "acc_stderr": 0.009404717441946268, + "acc_norm": 0.32113341204250295, + "acc_norm_stderr": 0.016052762579111562 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jb723/LLaMA2_crosslingual_transfer_1", + "model_sha": "ece29b636ef0b0c4b6d945ed66e97510b3ad6b0a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jb723/cross_lingual_epoch2/result_2023-10-26 12:25:31.json b/jb723/cross_lingual_epoch2/result_2023-10-26 12:25:31.json new file mode 100644 index 0000000000000000000000000000000000000000..93698f8438aeb4967cde29429905b3fcce1cd766 --- /dev/null +++ b/jb723/cross_lingual_epoch2/result_2023-10-26 12:25:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2226962457337884, + "acc_stderr": 0.012158314774829928, + "acc_norm": 0.2841296928327645, + "acc_norm_stderr": 0.013179442447653887 + }, + "harness|ko_hellaswag|10": { + "acc": 0.26628161720772753, + "acc_stderr": 0.004411099046251013, + "acc_norm": 0.29107747460665206, + "acc_norm_stderr": 0.004533307758521328 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03615507630310935, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03615507630310935 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.039891398595317706, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.039891398595317706 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3205619412515964, + "acc_stderr": 0.016688893310803775, + "acc_norm": 0.3205619412515964, + "acc_norm_stderr": 0.016688893310803775 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501116, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501116 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.029513196625539355, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.029513196625539355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.034843315926805875, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.034843315926805875 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3504823151125402, + "acc_stderr": 0.027098652621301757, + "acc_norm": 0.3504823151125402, + "acc_norm_stderr": 0.027098652621301757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.0403931497872456, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.0403931497872456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2878787878787879, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.2878787878787879, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.03038835355188685, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.03038835355188685 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2153846153846154, + "acc_stderr": 0.020843034557462878, + "acc_norm": 0.2153846153846154, + "acc_norm_stderr": 0.020843034557462878 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.04414343666854932, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.04414343666854932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114475, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114475 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335134, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335134 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.43162393162393164, + "acc_stderr": 0.0324483553531149, + "acc_norm": 0.43162393162393164, + "acc_norm_stderr": 0.0324483553531149 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708076, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708076 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910509, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910509 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.02620276653465215, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.02620276653465215 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.36318407960199006, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.36318407960199006, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.315028901734104, + "acc_stderr": 0.025009313790069706, + "acc_norm": 0.315028901734104, + "acc_norm_stderr": 0.025009313790069706 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33024691358024694, + "acc_stderr": 0.026168298456732846, + "acc_norm": 0.33024691358024694, + "acc_norm_stderr": 0.026168298456732846 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.03027690994517826, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.03027690994517826 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28256880733944956, + "acc_stderr": 0.01930424349770715, + "acc_norm": 0.28256880733944956, + "acc_norm_stderr": 0.01930424349770715 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604673, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604673 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.026336613469046637, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.026336613469046637 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.32231404958677684, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952924, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.01866335967146366, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.01866335967146366 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467764, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467764 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.026491914727355154, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.026491914727355154 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2681564245810056, + "acc_stderr": 0.014816119635317005, + "acc_norm": 0.2681564245810056, + "acc_norm_stderr": 0.014816119635317005 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1948529411764706, + "acc_stderr": 0.024060599423487424, + "acc_norm": 0.1948529411764706, + "acc_norm_stderr": 0.024060599423487424 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3306122448979592, + "acc_stderr": 0.030116426296540585, + "acc_norm": 0.3306122448979592, + "acc_norm_stderr": 0.030116426296540585 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2911392405063291, + "acc_stderr": 0.029571601065753374, + "acc_norm": 0.2911392405063291, + "acc_norm_stderr": 0.029571601065753374 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.027325470966716305, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.027325470966716305 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23255813953488372, + "mc1_stderr": 0.014789157531080522, + "mc2": 0.494893188252647, + "mc2_stderr": 0.016817822778795313 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09681227863046045, + "acc_stderr": 0.010166443512074711, + "acc_norm": 0.3612750885478158, + "acc_norm_stderr": 0.016515463022411997 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jb723/cross_lingual_epoch2", + "model_sha": "aa1654ae948febe0f7cf3e27d5f81a8df7a58118", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jb723/llama2-ko-7B-model/result_2023-09-27 11:00:22.json b/jb723/llama2-ko-7B-model/result_2023-09-27 11:00:22.json new file mode 100644 index 0000000000000000000000000000000000000000..4c70ff55d7f3259fbfa4284535dab5e62f5f0475 --- /dev/null +++ b/jb723/llama2-ko-7B-model/result_2023-09-27 11:00:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2226962457337884, + "acc_stderr": 0.012158314774829948, + "acc_norm": 0.2627986348122867, + "acc_norm_stderr": 0.012862523175351331 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2726548496315475, + "acc_stderr": 0.004444146875436292, + "acc_norm": 0.29635530770762797, + "acc_norm_stderr": 0.004557163175885563 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824561, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824561 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161549, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161549 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2848020434227331, + "acc_stderr": 0.016139174096522553, + "acc_norm": 0.2848020434227331, + "acc_norm_stderr": 0.016139174096522553 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066654, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066654 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231008, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231008 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3440514469453376, + "acc_stderr": 0.026981478043648026, + "acc_norm": 0.3440514469453376, + "acc_norm_stderr": 0.026981478043648026 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.037276735755969195, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.037276735755969195 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.029857515673386417, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.029857515673386417 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386215, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2689075630252101, + "acc_stderr": 0.028801392193631276, + "acc_norm": 0.2689075630252101, + "acc_norm_stderr": 0.028801392193631276 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2512820512820513, + "acc_stderr": 0.021992016662370568, + "acc_norm": 0.2512820512820513, + "acc_norm_stderr": 0.021992016662370568 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114454, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114454 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.02528441611490016, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.02528441611490016 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3418803418803419, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.3418803418803419, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.028049186315695248, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.028049186315695248 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.044612721759105065, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.044612721759105065 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871927, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871927 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.38308457711442784, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.38308457711442784, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633345, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633345 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.315028901734104, + "acc_stderr": 0.025009313790069692, + "acc_norm": 0.315028901734104, + "acc_norm_stderr": 0.025009313790069692 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.02570264026060375, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.02570264026060375 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3160621761658031, + "acc_stderr": 0.03355397369686173, + "acc_norm": 0.3160621761658031, + "acc_norm_stderr": 0.03355397369686173 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.01827257581023186, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.01827257581023186 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.512396694214876, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2236842105263158, + "acc_stderr": 0.03391160934343604, + "acc_norm": 0.2236842105263158, + "acc_norm_stderr": 0.03391160934343604 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.017952449196987866, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.017952449196987866 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880585, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880585 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19907407407407407, + "acc_stderr": 0.027232298462690218, + "acc_norm": 0.19907407407407407, + "acc_norm_stderr": 0.027232298462690218 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02518778666022727, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02518778666022727 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20408163265306123, + "acc_stderr": 0.025801283475090496, + "acc_norm": 0.20408163265306123, + "acc_norm_stderr": 0.025801283475090496 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3037974683544304, + "acc_stderr": 0.029936696387138598, + "acc_norm": 0.3037974683544304, + "acc_norm_stderr": 0.029936696387138598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24837027379400262, + "acc_stderr": 0.011035212598034494, + "acc_norm": 0.24837027379400262, + "acc_norm_stderr": 0.011035212598034494 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591362, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591362 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557966, + "mc2": 0.43443146146429873, + "mc2_stderr": 0.01580310882533787 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.11452184179456906, + "acc_stderr": 0.010948330698808925, + "acc_norm": 0.1959858323494687, + "acc_norm_stderr": 0.013647685567768858 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jb723/llama2-ko-7B-model", + "model_sha": "24e455bbf4039f360a37833583c335582d2c6030", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jcwee0873/llama3-8b-cv-swap-v0.1/result_2024-05-16 09:02:10.json b/jcwee0873/llama3-8b-cv-swap-v0.1/result_2024-05-16 09:02:10.json new file mode 100644 index 0000000000000000000000000000000000000000..592f42650de4e9034cd4c6b241f44c2848e5090a --- /dev/null +++ b/jcwee0873/llama3-8b-cv-swap-v0.1/result_2024-05-16 09:02:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3609215017064846, + "acc_stderr": 0.014034761386175452, + "acc_norm": 0.43430034129692835, + "acc_norm_stderr": 0.01448470304885736 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3732324238199562, + "acc_stderr": 0.00482674616083019, + "acc_norm": 0.47301334395538736, + "acc_norm_stderr": 0.004982508198584264 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.03815827365913235, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.03815827365913235 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.04931801994220416, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.04931801994220416 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.438058748403576, + "acc_stderr": 0.017742232238257227, + "acc_norm": 0.438058748403576, + "acc_norm_stderr": 0.017742232238257227 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.03260038511835771, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.03260038511835771 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489424, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489424 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.46464646464646464, + "acc_stderr": 0.035534363688280626, + "acc_norm": 0.46464646464646464, + "acc_norm_stderr": 0.035534363688280626 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764187, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764187 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.047803436269367894, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.047803436269367894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138646, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138646 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731571, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731571 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253252, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253252 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.0350349092367328, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.0350349092367328 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342654, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342654 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4653179190751445, + "acc_stderr": 0.026854257928258893, + "acc_norm": 0.4653179190751445, + "acc_norm_stderr": 0.026854257928258893 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45137614678899085, + "acc_stderr": 0.02133571471126879, + "acc_norm": 0.45137614678899085, + "acc_norm_stderr": 0.02133571471126879 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017087, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017087 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5359477124183006, + "acc_stderr": 0.028555827516528784, + "acc_norm": 0.5359477124183006, + "acc_norm_stderr": 0.028555827516528784 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.03977749934622074, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.03977749934622074 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.019373332420724514, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.019373332420724514 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631296, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631296 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.014635185616527826, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.014635185616527826 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681404, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681404 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5021097046413502, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.5021097046413502, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34876140808344197, + "acc_stderr": 0.012172035157127116, + "acc_norm": 0.34876140808344197, + "acc_norm_stderr": 0.012172035157127116 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237265, + "mc2": 0.4576878615093378, + "mc2_stderr": 0.01625662558382081 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2939787485242031, + "acc_stderr": 0.015663242569091112, + "acc_norm": 0.3270365997638725, + "acc_norm_stderr": 0.01612904748545703 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jcwee0873/llama3-8b-cv-swap-v0.1", + "model_sha": "fa9449e720feb488a02c3114afd03643749325dc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jeonsworld/CarbonVillain-10.7B-v1/result_2023-12-31 06:47:29.json b/jeonsworld/CarbonVillain-10.7B-v1/result_2023-12-31 06:47:29.json new file mode 100644 index 0000000000000000000000000000000000000000..df38298049cc197c9e31a3c4bc4ac3e4bfeb7b3a --- /dev/null +++ b/jeonsworld/CarbonVillain-10.7B-v1/result_2023-12-31 06:47:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4564846416382253, + "acc_stderr": 0.01455594976049644, + "acc_norm": 0.4991467576791809, + "acc_norm_stderr": 0.014611369529813279 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44592710615415254, + "acc_stderr": 0.004960516570284905, + "acc_norm": 0.6064528978291177, + "acc_norm_stderr": 0.004875379352079818 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7184466019417476, + "acc_stderr": 0.04453254836326468, + "acc_norm": 0.7184466019417476, + "acc_norm_stderr": 0.04453254836326468 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6819923371647509, + "acc_stderr": 0.016653486275615418, + "acc_norm": 0.6819923371647509, + "acc_norm_stderr": 0.016653486275615418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5191489361702127, + "acc_stderr": 0.0326620429906468, + "acc_norm": 0.5191489361702127, + "acc_norm_stderr": 0.0326620429906468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6109324758842444, + "acc_stderr": 0.027690337536485372, + "acc_norm": 0.6109324758842444, + "acc_norm_stderr": 0.027690337536485372 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.57847533632287, + "acc_stderr": 0.03314190222110658, + "acc_norm": 0.57847533632287, + "acc_norm_stderr": 0.03314190222110658 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.043285772152629735, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.043285772152629735 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465918, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465918 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062947, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062947 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5743589743589743, + "acc_stderr": 0.025069094387296525, + "acc_norm": 0.5743589743589743, + "acc_norm_stderr": 0.025069094387296525 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6193548387096774, + "acc_stderr": 0.02762171783290703, + "acc_norm": 0.6193548387096774, + "acc_norm_stderr": 0.02762171783290703 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.02645350805404033, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.02645350805404033 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5433962264150943, + "acc_stderr": 0.030656748696739435, + "acc_norm": 0.5433962264150943, + "acc_norm_stderr": 0.030656748696739435 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.029560707392465715, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.029560707392465715 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.039580272311215706, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.039580272311215706 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4470899470899471, + "acc_stderr": 0.025606723995777025, + "acc_norm": 0.4470899470899471, + "acc_norm_stderr": 0.025606723995777025 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6127167630057804, + "acc_stderr": 0.02622615860512465, + "acc_norm": 0.6127167630057804, + "acc_norm_stderr": 0.02622615860512465 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5987654320987654, + "acc_stderr": 0.027272582849839792, + "acc_norm": 0.5987654320987654, + "acc_norm_stderr": 0.027272582849839792 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7202072538860104, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.7202072538860104, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.047028804320496165, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.047028804320496165 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.689908256880734, + "acc_stderr": 0.019830849684439752, + "acc_norm": 0.689908256880734, + "acc_norm_stderr": 0.019830849684439752 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.028304576673141107, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.028304576673141107 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591206, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591206 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.020192808271433788, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.020192808271433788 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.43617021276595747, + "acc_stderr": 0.029583452036284062, + "acc_norm": 0.43617021276595747, + "acc_norm_stderr": 0.029583452036284062 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19553072625698323, + "acc_stderr": 0.013264579220945098, + "acc_norm": 0.19553072625698323, + "acc_norm_stderr": 0.013264579220945098 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5183823529411765, + "acc_stderr": 0.03035230339535196, + "acc_norm": 0.5183823529411765, + "acc_norm_stderr": 0.03035230339535196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7679324894514767, + "acc_stderr": 0.027479744550808517, + "acc_norm": 0.7679324894514767, + "acc_norm_stderr": 0.027479744550808517 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4152542372881356, + "acc_stderr": 0.012585471793400665, + "acc_norm": 0.4152542372881356, + "acc_norm_stderr": 0.012585471793400665 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.036462049632538136, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.036462049632538136 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3182374541003672, + "mc1_stderr": 0.016305988648920605, + "mc2": 0.4821580350888159, + "mc2_stderr": 0.01537715862983969 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5855962219598583, + "acc_stderr": 0.016936583383943608, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.016616612843224948 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jeonsworld/CarbonVillain-10.7B-v1", + "model_sha": "f016ba7ef7a51ce15b334176f25df87104af655f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jeonsworld/CarbonVillain-10.7B-v2/result_2024-01-01 06:55:32.json b/jeonsworld/CarbonVillain-10.7B-v2/result_2024-01-01 06:55:32.json new file mode 100644 index 0000000000000000000000000000000000000000..59cc7a245d50a8fef5d76d51b4b5b9f58213cdf9 --- /dev/null +++ b/jeonsworld/CarbonVillain-10.7B-v2/result_2024-01-01 06:55:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43600682593856654, + "acc_stderr": 0.014491225699230916, + "acc_norm": 0.48464163822525597, + "acc_norm_stderr": 0.014604496129394906 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42959569806811393, + "acc_stderr": 0.004940067402031032, + "acc_norm": 0.5977892850029874, + "acc_norm_stderr": 0.004893418929918279 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.04689765937278133, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.04689765937278133 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6538952745849298, + "acc_stderr": 0.017011965266412077, + "acc_norm": 0.6538952745849298, + "acc_norm_stderr": 0.017011965266412077 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.032662042990646775, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.032662042990646775 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6205787781350482, + "acc_stderr": 0.027559949802347817, + "acc_norm": 0.6205787781350482, + "acc_norm_stderr": 0.027559949802347817 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7070707070707071, + "acc_stderr": 0.03242497958178818, + "acc_norm": 0.7070707070707071, + "acc_norm_stderr": 0.03242497958178818 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.03149930577784906, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.03149930577784906 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5794871794871795, + "acc_stderr": 0.025028610276710866, + "acc_norm": 0.5794871794871795, + "acc_norm_stderr": 0.025028610276710866 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.603225806451613, + "acc_stderr": 0.027831231605767934, + "acc_norm": 0.603225806451613, + "acc_norm_stderr": 0.027831231605767934 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.026853450377009144, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.026853450377009144 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.047093069786618945, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.047093069786618945 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131147, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131147 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.03170056183497309, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.03170056183497309 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.02535574126305526, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.02535574126305526 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5809248554913294, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.5809248554913294, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6080246913580247, + "acc_stderr": 0.027163686038271146, + "acc_norm": 0.6080246913580247, + "acc_norm_stderr": 0.027163686038271146 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.046774730044912, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.046774730044912 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6825688073394496, + "acc_stderr": 0.0199571521984605, + "acc_norm": 0.6825688073394496, + "acc_norm_stderr": 0.0199571521984605 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591206, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591206 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.511437908496732, + "acc_stderr": 0.020222541515610856, + "acc_norm": 0.511437908496732, + "acc_norm_stderr": 0.020222541515610856 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.42907801418439717, + "acc_stderr": 0.02952591430255855, + "acc_norm": 0.42907801418439717, + "acc_norm_stderr": 0.02952591430255855 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5509259259259259, + "acc_stderr": 0.03392238405321616, + "acc_norm": 0.5509259259259259, + "acc_norm_stderr": 0.03392238405321616 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475363, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475363 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5073529411764706, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.5073529411764706, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.031557828165561644, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.031557828165561644 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.02798569938703641, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.02798569938703641 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4152542372881356, + "acc_stderr": 0.012585471793400664, + "acc_norm": 0.4152542372881356, + "acc_norm_stderr": 0.012585471793400664 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.703030303030303, + "acc_stderr": 0.03567969772268047, + "acc_norm": 0.703030303030303, + "acc_norm_stderr": 0.03567969772268047 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3635250917992656, + "mc1_stderr": 0.01683886288396582, + "mc2": 0.5546986100176308, + "mc2_stderr": 0.015665918238726685 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6115702479338843, + "acc_stderr": 0.016756921571069422, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.01653869160332771 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jeonsworld/CarbonVillain-10.7B-v2", + "model_sha": "3f28bd25b0d05681c5641304ca60dc8857b81b7f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jeonsworld/CarbonVillain-10.7B-v3/result_2024-01-01 07:12:09.json b/jeonsworld/CarbonVillain-10.7B-v3/result_2024-01-01 07:12:09.json new file mode 100644 index 0000000000000000000000000000000000000000..d67d60d0d0f012f71e6ad7a320e3d898a1c6d4bb --- /dev/null +++ b/jeonsworld/CarbonVillain-10.7B-v3/result_2024-01-01 07:12:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4232081911262799, + "acc_stderr": 0.01443803622084803, + "acc_norm": 0.4803754266211604, + "acc_norm_stderr": 0.014600132075947096 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4291973710416252, + "acc_stderr": 0.004939500404882175, + "acc_norm": 0.5976897032463653, + "acc_norm_stderr": 0.0048936170149752965 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5906432748538012, + "acc_stderr": 0.03771283107626544, + "acc_norm": 0.5906432748538012, + "acc_norm_stderr": 0.03771283107626544 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280042, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280042 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6526181353767561, + "acc_stderr": 0.01702667174865573, + "acc_norm": 0.6526181353767561, + "acc_norm_stderr": 0.01702667174865573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6205787781350482, + "acc_stderr": 0.027559949802347817, + "acc_norm": 0.6205787781350482, + "acc_norm_stderr": 0.027559949802347817 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5605381165919282, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.5605381165919282, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786753, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786753 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5666666666666667, + "acc_stderr": 0.025124653525885096, + "acc_norm": 0.5666666666666667, + "acc_norm_stderr": 0.025124653525885096 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6064516129032258, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.6064516129032258, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922765, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922765 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389184, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.031343283582089536, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.031343283582089536 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41798941798941797, + "acc_stderr": 0.02540255550326091, + "acc_norm": 0.41798941798941797, + "acc_norm_stderr": 0.02540255550326091 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5751445086705202, + "acc_stderr": 0.02661335084026174, + "acc_norm": 0.5751445086705202, + "acc_norm_stderr": 0.02661335084026174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6080246913580247, + "acc_stderr": 0.027163686038271146, + "acc_norm": 0.6080246913580247, + "acc_norm_stderr": 0.027163686038271146 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7512953367875648, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.7512953367875648, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.046774730044912, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.046774730044912 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6642201834862386, + "acc_stderr": 0.02024808139675293, + "acc_norm": 0.6642201834862386, + "acc_norm_stderr": 0.02024808139675293 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5179738562091504, + "acc_stderr": 0.020214761037872397, + "acc_norm": 0.5179738562091504, + "acc_norm_stderr": 0.020214761037872397 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.0294621892333706, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.0294621892333706 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5601851851851852, + "acc_stderr": 0.03385177976044812, + "acc_norm": 0.5601851851851852, + "acc_norm_stderr": 0.03385177976044812 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249612, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249612 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.03030625772246831, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.03030625772246831 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149675, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149675 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41003911342894395, + "acc_stderr": 0.012561837621962026, + "acc_norm": 0.41003911342894395, + "acc_norm_stderr": 0.012561837621962026 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.03296245110172229, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.03296245110172229 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.03588624800091709, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.03588624800091709 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3684210526315789, + "mc1_stderr": 0.016886551261046042, + "mc2": 0.557243824973016, + "mc2_stderr": 0.015708758883428212 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6092089728453365, + "acc_stderr": 0.016775298465108265, + "acc_norm": 0.6316410861865407, + "acc_norm_stderr": 0.016583858982639085 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jeonsworld/CarbonVillain-10.7B-v3", + "model_sha": "e642a7ff449cee44fa2d8e30355e3f0e2e61443b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jeonsworld/CarbonVillain-13B-v1/result_2023-12-28 20:02:14.json b/jeonsworld/CarbonVillain-13B-v1/result_2023-12-28 20:02:14.json new file mode 100644 index 0000000000000000000000000000000000000000..ffde1ee29ed84249c7a59afe42f44bfb0d216e45 --- /dev/null +++ b/jeonsworld/CarbonVillain-13B-v1/result_2023-12-28 20:02:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4232081911262799, + "acc_stderr": 0.01443803622084803, + "acc_norm": 0.48378839590443684, + "acc_norm_stderr": 0.014603708567414933 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45200159330810596, + "acc_stderr": 0.00496673681101049, + "acc_norm": 0.6045608444532962, + "acc_norm_stderr": 0.004879455474663812 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299798, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299798 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.042763494943765974, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.042763494943765974 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840622, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828064, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828064 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126174, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126174 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036543, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036543 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.02829205683011273, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.02829205683011273 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618554, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618554 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.03056159042673183, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.03056159042673183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655816, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655816 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.026817718130348923, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.026817718130348923 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5834862385321101, + "acc_stderr": 0.02113637650403087, + "acc_norm": 0.5834862385321101, + "acc_norm_stderr": 0.02113637650403087 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626567, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626567 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.02804594694204239, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.02804594694204239 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.014635185616527836, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.014635185616527836 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254163, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254163 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687578, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030685820596610805, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030685820596610805 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36571056062581486, + "acc_stderr": 0.01230102818884057, + "acc_norm": 0.36571056062581486, + "acc_norm_stderr": 0.01230102818884057 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.03843566993588717, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.03843566993588717 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33659730722154224, + "mc1_stderr": 0.016542412809494873, + "mc2": 0.5085732867841173, + "mc2_stderr": 0.015520250860491847 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4887839433293979, + "acc_stderr": 0.017186028469489287, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.017122829143292658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jeonsworld/CarbonVillain-13B-v1", + "model_sha": "3ddeca5a6993bdb8f4a456f7e0db598b0841d87e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jeonsworld/CarbonVillain-en-10.7B-v4/result_2024-04-26 07:29:19.json b/jeonsworld/CarbonVillain-en-10.7B-v4/result_2024-04-26 07:29:19.json new file mode 100644 index 0000000000000000000000000000000000000000..5a95517f467c4a83e0aa294e1eccffe630bf72e4 --- /dev/null +++ b/jeonsworld/CarbonVillain-en-10.7B-v4/result_2024-04-26 07:29:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3856655290102389, + "acc_stderr": 0.014224250973257187, + "acc_norm": 0.4786689419795222, + "acc_norm_stderr": 0.01459808797312711 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4017128062139016, + "acc_stderr": 0.004892425356375712, + "acc_norm": 0.5397331208922526, + "acc_norm_stderr": 0.004974001515580955 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.038110796698335316, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.038110796698335316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.047504583990416974, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.047504583990416974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5913154533844189, + "acc_stderr": 0.017579250148153373, + "acc_norm": 0.5913154533844189, + "acc_norm_stderr": 0.017579250148153373 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.028125340983972708, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.028125340983972708 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123005, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123005 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.03345678422756776, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.03345678422756776 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.03228410626716391, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.03228410626716391 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5153846153846153, + "acc_stderr": 0.025339003010106505, + "acc_norm": 0.5153846153846153, + "acc_norm_stderr": 0.025339003010106505 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.532258064516129, + "acc_stderr": 0.028384747788813332, + "acc_norm": 0.532258064516129, + "acc_norm_stderr": 0.028384747788813332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.02777883590493543, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.02777883590493543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6865671641791045, + "acc_stderr": 0.032801882053486414, + "acc_norm": 0.6865671641791045, + "acc_norm_stderr": 0.032801882053486414 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137282, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137282 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.02663653974111609, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.02663653974111609 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.027339546640662727, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.027339546640662727 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.689119170984456, + "acc_stderr": 0.03340361906276586, + "acc_norm": 0.689119170984456, + "acc_norm_stderr": 0.03340361906276586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6275229357798165, + "acc_stderr": 0.0207283684576385, + "acc_norm": 0.6275229357798165, + "acc_norm_stderr": 0.0207283684576385 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874143, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874143 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.020206653187884786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.020206653187884786 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347237, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347237 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.033922384053216174, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.033922384053216174 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.33743016759776534, + "acc_stderr": 0.015813901283913048, + "acc_norm": 0.33743016759776534, + "acc_norm_stderr": 0.015813901283913048 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5, + "acc_stderr": 0.030372836961539352, + "acc_norm": 0.5, + "acc_norm_stderr": 0.030372836961539352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.030862144921087555, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.030862144921087555 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702368, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702368 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3878748370273794, + "acc_stderr": 0.012444998309675628, + "acc_norm": 0.3878748370273794, + "acc_norm_stderr": 0.012444998309675628 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205983, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205983 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3671970624235006, + "mc1_stderr": 0.016874805001453178, + "mc2": 0.5278829949349568, + "mc2_stderr": 0.0164660176069498 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4887839433293979, + "acc_stderr": 0.01718602846948929, + "acc_norm": 0.4911452184179457, + "acc_norm_stderr": 0.01718765819933674 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jeonsworld/CarbonVillain-en-10.7B-v4", + "model_sha": "57d6ad4d705d336aba228356683d9f221507440a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jhflow/komt-mistral7b-kor-orca-lora/result_2023-10-28 07:47:54.json b/jhflow/komt-mistral7b-kor-orca-lora/result_2023-10-28 07:47:54.json new file mode 100644 index 0000000000000000000000000000000000000000..68cf1f07617c63fd062e8eb9b0c158269cd95bc3 --- /dev/null +++ b/jhflow/komt-mistral7b-kor-orca-lora/result_2023-10-28 07:47:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3302047781569966, + "acc_stderr": 0.013743085603760424, + "acc_norm": 0.3720136518771331, + "acc_norm_stderr": 0.014124597881844461 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3610834495120494, + "acc_stderr": 0.004793330525656211, + "acc_norm": 0.4630551682931687, + "acc_norm_stderr": 0.004976141457736879 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.037439798259263996, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.037439798259263996 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.04944901092973781, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.04944901092973781 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46360153256704983, + "acc_stderr": 0.01783252407959326, + "acc_norm": 0.46360153256704983, + "acc_norm_stderr": 0.01783252407959326 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621502, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621502 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357766, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357766 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38974358974358975, + "acc_stderr": 0.024726967886647078, + "acc_norm": 0.38974358974358975, + "acc_norm_stderr": 0.024726967886647078 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.034524539038220385, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.034524539038220385 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4129032258064516, + "acc_stderr": 0.028009138125400387, + "acc_norm": 0.4129032258064516, + "acc_norm_stderr": 0.028009138125400387 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.03158539157745636, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.03158539157745636 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123935, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123935 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463084, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463084 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.03874102859818083, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.03874102859818083 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.02716368603827124, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.02716368603827124 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.03606065001832919, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.03606065001832919 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46238532110091746, + "acc_stderr": 0.021376575274397576, + "acc_norm": 0.46238532110091746, + "acc_norm_stderr": 0.021376575274397576 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852387, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852387 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35947712418300654, + "acc_stderr": 0.01941253924203216, + "acc_norm": 0.35947712418300654, + "acc_norm_stderr": 0.01941253924203216 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.015024083883322895, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.015024083883322895 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.0290294228156814, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.0290294228156814 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3285528031290743, + "acc_stderr": 0.011996027247502927, + "acc_norm": 0.3285528031290743, + "acc_norm_stderr": 0.011996027247502927 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398393, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398393 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834562, + "mc2": 0.4512199737148749, + "mc2_stderr": 0.015325712009535085 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4474616292798111, + "acc_stderr": 0.01709519030150058, + "acc_norm": 0.51357733175915, + "acc_norm_stderr": 0.017184015060401455 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jhflow/komt-mistral7b-kor-orca-lora", + "model_sha": "16c036d4e96674aa4210dfce64482dbc155b6b44", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jhflow/mistral7b-lora-multi-turn-v2/result_2023-11-02 00:49:20.json b/jhflow/mistral7b-lora-multi-turn-v2/result_2023-11-02 00:49:20.json new file mode 100644 index 0000000000000000000000000000000000000000..114451e285d0cfe9b6b37ec41c365894325c329c --- /dev/null +++ b/jhflow/mistral7b-lora-multi-turn-v2/result_2023-11-02 00:49:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3293515358361775, + "acc_stderr": 0.013734057652635474, + "acc_norm": 0.38993174061433444, + "acc_norm_stderr": 0.014252959848892889 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36715793666600277, + "acc_stderr": 0.004810449343572393, + "acc_norm": 0.4765982871937861, + "acc_norm_stderr": 0.004984313205791441 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4763729246487867, + "acc_stderr": 0.01785998976517645, + "acc_norm": 0.4763729246487867, + "acc_norm_stderr": 0.01785998976517645 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707546, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707546 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042335, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042335 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.02827241018621491, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.02827241018621491 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431187, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431187 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683512, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683512 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067877, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067877 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089768, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089768 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490435, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490435 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.01980828131744985, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.01980828131744985 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966727, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966727 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20782122905027933, + "acc_stderr": 0.013570248325081347, + "acc_norm": 0.20782122905027933, + "acc_norm_stderr": 0.013570248325081347 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125468, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125468 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.0321481463024037, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.0321481463024037 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.012150699768228575, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.012150699768228575 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.03418931233833343, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.03418931233833343 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524753, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524753 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454892, + "mc2": 0.45851375159014823, + "mc2_stderr": 0.015537179333977727 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.017177301992342558, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.017175671279836446 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jhflow/mistral7b-lora-multi-turn-v2", + "model_sha": "a425082361b06134ccebef1b5f841c2edd27f644", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jhflow/mistral7b-lora-multi-turn-v3/result_2023-11-06 01:57:51.json b/jhflow/mistral7b-lora-multi-turn-v3/result_2023-11-06 01:57:51.json new file mode 100644 index 0000000000000000000000000000000000000000..a50c934dcafbe1315abc46eae3533c1278e432c3 --- /dev/null +++ b/jhflow/mistral7b-lora-multi-turn-v3/result_2023-11-06 01:57:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.013855831287497717, + "acc_norm": 0.39419795221843, + "acc_norm_stderr": 0.014280522667467316 + }, + "harness|ko_hellaswag|10": { + "acc": 0.371539533957379, + "acc_stderr": 0.0048222865563052175, + "acc_norm": 0.48088030272854015, + "acc_norm_stderr": 0.004986131919673968 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.017852981266633955, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.017852981266633955 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4180064308681672, + "acc_stderr": 0.028013651891995072, + "acc_norm": 0.4180064308681672, + "acc_norm_stderr": 0.028013651891995072 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006938, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006938 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017838, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017838 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431194, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230186, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230186 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.033742355504256936, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.033742355504256936 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129274, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129274 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.02683080599895223, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.02683080599895223 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.021414757058175502, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.021414757058175502 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094597, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094597 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650144, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650144 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.031141447823536044, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.031141447823536044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409151, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409151 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.03175195237583322, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.03175195237583322 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.031376240725616185, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.031376240725616185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3546284224250326, + "acc_stderr": 0.012218576439090167, + "acc_norm": 0.3546284224250326, + "acc_norm_stderr": 0.012218576439090167 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398396, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398396 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627887, + "mc2": 0.4728929290392366, + "mc2_stderr": 0.01563566589182946 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4155844155844156, + "acc_stderr": 0.016943586313076568, + "acc_norm": 0.46989374262101535, + "acc_norm_stderr": 0.017159163590170216 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jhflow/mistral7b-lora-multi-turn-v3", + "model_sha": "6ff6149ce4b66cbd5acb5e9683c44c50aae2ccd7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jhflow/mistral7b-lora-multiturn-v4/result_2023-12-03 11:37:39.json b/jhflow/mistral7b-lora-multiturn-v4/result_2023-12-03 11:37:39.json new file mode 100644 index 0000000000000000000000000000000000000000..1dd83057be7931a405c872560d789fa51379a850 --- /dev/null +++ b/jhflow/mistral7b-lora-multiturn-v4/result_2023-12-03 11:37:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4121160409556314, + "acc_stderr": 0.014383915302225395, + "acc_norm": 0.4761092150170648, + "acc_norm_stderr": 0.014594701798071654 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4041027683728341, + "acc_stderr": 0.004897146690596254, + "acc_norm": 0.5364469229237204, + "acc_norm_stderr": 0.004976507121076267 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.037792759455032014, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.037792759455032014 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.578544061302682, + "acc_stderr": 0.017657976412654854, + "acc_norm": 0.578544061302682, + "acc_norm_stderr": 0.017657976412654854 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.032600385118357715, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.032600385118357715 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232963, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232963 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714506, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714506 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5051282051282051, + "acc_stderr": 0.025349672906838636, + "acc_norm": 0.5051282051282051, + "acc_norm_stderr": 0.025349672906838636 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004257, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004257 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683512, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683512 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119994, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119994 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.03400598505599015, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.03400598505599015 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.02507598176760168, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.02507598176760168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670788, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670788 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.03594413711272436, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.03594413711272436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5284403669724771, + "acc_stderr": 0.021402615697348044, + "acc_norm": 0.5284403669724771, + "acc_norm_stderr": 0.021402615697348044 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.02858034106513829, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.02858034106513829 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884124, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19664804469273742, + "acc_stderr": 0.01329318302745465, + "acc_norm": 0.19664804469273742, + "acc_norm_stderr": 0.01329318302745465 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030685820596610812, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030685820596610812 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585892, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585892 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.035091433756067866, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.035091433756067866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32558139534883723, + "mc1_stderr": 0.01640398946990781, + "mc2": 0.4915099229946796, + "mc2_stderr": 0.015373142456080352 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4769775678866588, + "acc_stderr": 0.017172121546727634, + "acc_norm": 0.51357733175915, + "acc_norm_stderr": 0.017184015060401455 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jhflow/mistral7b-lora-multiturn-v4", + "model_sha": "6e9923c239780e00a982e5a212ab70aa5b19c071", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jhflow/yi-ko-6b-dpo-further/result_2023-12-20 08:31:30.json b/jhflow/yi-ko-6b-dpo-further/result_2023-12-20 08:31:30.json new file mode 100644 index 0000000000000000000000000000000000000000..0a9bdfbcf3ef40007b46fd5f363e3bc7490611ee --- /dev/null +++ b/jhflow/yi-ko-6b-dpo-further/result_2023-12-20 08:31:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34982935153583616, + "acc_stderr": 0.013936809212158287, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.014422181226303026 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4018123879705238, + "acc_stderr": 0.004892624490937208, + "acc_norm": 0.5337582154949213, + "acc_norm_stderr": 0.004978395540514387 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5696040868454662, + "acc_stderr": 0.01770586877629239, + "acc_norm": 0.5696040868454662, + "acc_norm_stderr": 0.01770586877629239 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.034273086529999365, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.034273086529999365 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.025317649726448652, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.025317649726448652 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5161290322580645, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.5161290322580645, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942645, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942645 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.030767394707808093, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.030767394707808093 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307706, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307706 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583302, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583302 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958215, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958215 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6091743119266055, + "acc_stderr": 0.020920058346111065, + "acc_norm": 0.6091743119266055, + "acc_norm_stderr": 0.020920058346111065 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.04065771002562605, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.04065771002562605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.01982184368827177, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.01982184368827177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915185, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915185 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802749, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409163, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409163 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016643, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016643 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935893, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935893 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3272490221642764, + "acc_stderr": 0.011983819806464752, + "acc_norm": 0.3272490221642764, + "acc_norm_stderr": 0.011983819806464752 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.4470960631487972, + "mc2_stderr": 0.014964323216719578 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5301062573789846, + "acc_stderr": 0.01715916359017022, + "acc_norm": 0.6080283353010626, + "acc_norm_stderr": 0.016784332119424084 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jhflow/yi-ko-6b-dpo-further", + "model_sha": "152a5039537a1898c6a352619dfb0740176c3965", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jhflow/yi-ko-6b-lora-v1/result_2023-12-05 01:06:53.json b/jhflow/yi-ko-6b-lora-v1/result_2023-12-05 01:06:53.json new file mode 100644 index 0000000000000000000000000000000000000000..5081caf4bd59816cc95fdfbc4b203c5158bd7dfc --- /dev/null +++ b/jhflow/yi-ko-6b-lora-v1/result_2023-12-05 01:06:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34812286689419797, + "acc_stderr": 0.013921008595179349, + "acc_norm": 0.4138225255972696, + "acc_norm_stderr": 0.014392730009221009 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4023102967536347, + "acc_stderr": 0.004893617014975309, + "acc_norm": 0.5377414857598088, + "acc_norm_stderr": 0.0049755460189506735 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5606641123882503, + "acc_stderr": 0.017747874245683606, + "acc_norm": 0.5606641123882503, + "acc_norm_stderr": 0.017747874245683606 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04316378599511326, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04316378599511326 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016337, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016337 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933903, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933903 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5193548387096775, + "acc_stderr": 0.028422687404312117, + "acc_norm": 0.5193548387096775, + "acc_norm_stderr": 0.028422687404312117 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.03011821010694265, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.03011821010694265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518026, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518026 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.024180497164376896, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.024180497164376896 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5944954128440367, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.5944954128440367, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604674, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604674 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.04065771002562605, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.04065771002562605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.019922115682786682, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.019922115682786682 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.044642857142857116, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.044642857142857116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125478, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125478 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.031001209039894836, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.031001209039894836 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3194263363754889, + "acc_stderr": 0.01190835717675616, + "acc_norm": 0.3194263363754889, + "acc_norm_stderr": 0.01190835717675616 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.038783721137112745, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.038783721137112745 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608742, + "mc2": 0.4320582855204373, + "mc2_stderr": 0.014839195488728087 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5076741440377804, + "acc_stderr": 0.017188329219654273, + "acc_norm": 0.564344746162928, + "acc_norm_stderr": 0.017047415229476316 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jhflow/yi-ko-6b-lora-v1", + "model_sha": "d987b8419e44ab180e843b39fb75d24b2530ffd7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jieunhan/TEST_MODEL/result_2024-04-17 15:01:58.json b/jieunhan/TEST_MODEL/result_2024-04-17 15:01:58.json new file mode 100644 index 0000000000000000000000000000000000000000..cbceb127db90313ab9ca7a46d2778d7be7f8f5ba --- /dev/null +++ b/jieunhan/TEST_MODEL/result_2024-04-17 15:01:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4709897610921502, + "acc_stderr": 0.014586776355294314, + "acc_norm": 0.5324232081911263, + "acc_norm_stderr": 0.01458063756999543 + }, + "harness|ko_hellaswag|10": { + "acc": 0.463752240589524, + "acc_stderr": 0.00497665198975765, + "acc_norm": 0.6360286795459071, + "acc_norm_stderr": 0.004801572028920799 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7134502923976608, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.7134502923976608, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.722860791826309, + "acc_stderr": 0.016005636294122435, + "acc_norm": 0.722860791826309, + "acc_norm_stderr": 0.016005636294122435 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5404255319148936, + "acc_stderr": 0.03257901482099834, + "acc_norm": 0.5404255319148936, + "acc_norm_stderr": 0.03257901482099834 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.027604689028581986, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.027604689028581986 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5650224215246636, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.5650224215246636, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.04243869242230523, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.04243869242230523 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932046, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932046 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196156, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196156 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.558974358974359, + "acc_stderr": 0.025174048384000718, + "acc_norm": 0.558974358974359, + "acc_norm_stderr": 0.025174048384000718 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6516129032258065, + "acc_stderr": 0.027104826328100944, + "acc_norm": 0.6516129032258065, + "acc_norm_stderr": 0.027104826328100944 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.02704685763071666, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.02704685763071666 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.030437794342983052, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.030437794342983052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37407407407407406, + "acc_stderr": 0.02950286112895529, + "acc_norm": 0.37407407407407406, + "acc_norm_stderr": 0.02950286112895529 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555404, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555404 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.025075981767601684, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.025075981767601684 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5486111111111112, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.5486111111111112, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.79, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.79, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6127167630057804, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.6127167630057804, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5705521472392638, + "acc_stderr": 0.03889066619112722, + "acc_norm": 0.5705521472392638, + "acc_norm_stderr": 0.03889066619112722 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6358024691358025, + "acc_stderr": 0.02677492989972232, + "acc_norm": 0.6358024691358025, + "acc_norm_stderr": 0.02677492989972232 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.03221024508041154, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.03221024508041154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7155963302752294, + "acc_stderr": 0.0193420365877026, + "acc_norm": 0.7155963302752294, + "acc_norm_stderr": 0.0193420365877026 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6437908496732027, + "acc_stderr": 0.027420477662629245, + "acc_norm": 0.6437908496732027, + "acc_norm_stderr": 0.027420477662629245 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.020226106567657803, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.020226106567657803 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125146, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5324074074074074, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.5324074074074074, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21564245810055865, + "acc_stderr": 0.01375483597548234, + "acc_norm": 0.21564245810055865, + "acc_norm_stderr": 0.01375483597548234 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03032024326500413, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03032024326500413 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.689795918367347, + "acc_stderr": 0.02961345987248438, + "acc_norm": 0.689795918367347, + "acc_norm_stderr": 0.02961345987248438 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7468354430379747, + "acc_stderr": 0.0283046579430353, + "acc_norm": 0.7468354430379747, + "acc_norm_stderr": 0.0283046579430353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41134289439374183, + "acc_stderr": 0.012567882673803682, + "acc_norm": 0.41134289439374183, + "acc_norm_stderr": 0.012567882673803682 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7058823529411765, + "acc_stderr": 0.03198001660115071, + "acc_norm": 0.7058823529411765, + "acc_norm_stderr": 0.03198001660115071 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7151515151515152, + "acc_stderr": 0.035243908445117815, + "acc_norm": 0.7151515151515152, + "acc_norm_stderr": 0.035243908445117815 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394812, + "mc2": 0.4519409957800782, + "mc2_stderr": 0.015416455478996633 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5950413223140496, + "acc_stderr": 0.016876941165045612, + "acc_norm": 0.6233766233766234, + "acc_norm_stderr": 0.01665879987405198 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jieunhan/TEST_MODEL", + "model_sha": "648bad40edb235772c27d76c896f563f4f1b6eba", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jieunhan/solar-merge-v1.0/result_2024-04-20 06:19:44.json b/jieunhan/solar-merge-v1.0/result_2024-04-20 06:19:44.json new file mode 100644 index 0000000000000000000000000000000000000000..5965a37af80998b92227c285c3be5f5d550b917c --- /dev/null +++ b/jieunhan/solar-merge-v1.0/result_2024-04-20 06:19:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.591296928327645, + "acc_stderr": 0.014365750345427006, + "acc_norm": 0.6646757679180887, + "acc_norm_stderr": 0.013796182947785562 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4012148974307907, + "acc_stderr": 0.004891426533390624, + "acc_norm": 0.5218084047002589, + "acc_norm_stderr": 0.004985032806802436 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.037792759455032014, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.037792759455032014 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.632183908045977, + "acc_stderr": 0.01724382889184629, + "acc_norm": 0.632183908045977, + "acc_norm_stderr": 0.01724382889184629 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936338, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936338 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.027466610213140095, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.027466610213140095 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.041641887201693775, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.041641887201693775 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6386554621848739, + "acc_stderr": 0.031204691225150023, + "acc_norm": 0.6386554621848739, + "acc_norm_stderr": 0.031204691225150023 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5692307692307692, + "acc_stderr": 0.025106820660539757, + "acc_norm": 0.5692307692307692, + "acc_norm_stderr": 0.025106820660539757 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5580645161290323, + "acc_stderr": 0.02825155790684975, + "acc_norm": 0.5580645161290323, + "acc_norm_stderr": 0.02825155790684975 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623101, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623101 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.02925290592725198, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.02925290592725198 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7512437810945274, + "acc_stderr": 0.03056767593891672, + "acc_norm": 0.7512437810945274, + "acc_norm_stderr": 0.03056767593891672 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02510742548113729, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02510742548113729 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206177, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6787564766839378, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.6787564766839378, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583704, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583704 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6330275229357798, + "acc_stderr": 0.020664675659520522, + "acc_norm": 0.6330275229357798, + "acc_norm_stderr": 0.020664675659520522 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.47619047619047616, + "acc_stderr": 0.04467062628403273, + "acc_norm": 0.47619047619047616, + "acc_norm_stderr": 0.04467062628403273 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5849673202614379, + "acc_stderr": 0.0282135041778241, + "acc_norm": 0.5849673202614379, + "acc_norm_stderr": 0.0282135041778241 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212092, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212092 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5081699346405228, + "acc_stderr": 0.02022513434305726, + "acc_norm": 0.5081699346405228, + "acc_norm_stderr": 0.02022513434305726 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40070921985815605, + "acc_stderr": 0.029233465745573086, + "acc_norm": 0.40070921985815605, + "acc_norm_stderr": 0.029233465745573086 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.49107142857142855, + "acc_stderr": 0.04745033255489123, + "acc_norm": 0.49107142857142855, + "acc_norm_stderr": 0.04745033255489123 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5324074074074074, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.5324074074074074, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.01502408388332289, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.01502408388332289 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.030306257722468317, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.030306257722468317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6489795918367347, + "acc_stderr": 0.030555316755573644, + "acc_norm": 0.6489795918367347, + "acc_norm_stderr": 0.030555316755573644 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.031376240725616185, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.031376240725616185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41199478487614083, + "acc_stderr": 0.012570871032146063, + "acc_norm": 0.41199478487614083, + "acc_norm_stderr": 0.012570871032146063 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03815494308688931, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03815494308688931 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5312117503059975, + "mc1_stderr": 0.017469364874577547, + "mc2": 0.6416511208946742, + "mc2_stderr": 0.015366606949742696 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.448642266824085, + "acc_stderr": 0.017099430514725792, + "acc_norm": 0.5726092089728453, + "acc_norm_stderr": 0.017008129844823156 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jieunhan/solar-merge-v1.0", + "model_sha": "88bd309c18b4a5083967c3bbd3333a30eaa9c494", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jieunhan/solar_merge_test_1-1/result_2024-04-23 01:16:40.json b/jieunhan/solar_merge_test_1-1/result_2024-04-23 01:16:40.json new file mode 100644 index 0000000000000000000000000000000000000000..fc9b96c588643702b9c90abc372dbda62a15c8e0 --- /dev/null +++ b/jieunhan/solar_merge_test_1-1/result_2024-04-23 01:16:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4513651877133106, + "acc_stderr": 0.014542104569955264, + "acc_norm": 0.5110921501706485, + "acc_norm_stderr": 0.014607794914013053 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4462258514240191, + "acc_stderr": 0.004960839986099527, + "acc_norm": 0.6097390957976498, + "acc_norm_stderr": 0.0048681175984819445 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6432748538011696, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.6432748538011696, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6947637292464879, + "acc_stderr": 0.01646771194763513, + "acc_norm": 0.6947637292464879, + "acc_norm_stderr": 0.01646771194763513 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.038823108508905954, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.038823108508905954 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5787781350482315, + "acc_stderr": 0.028043399858210628, + "acc_norm": 0.5787781350482315, + "acc_norm_stderr": 0.028043399858210628 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5695067264573991, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.5695067264573991, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5435897435897435, + "acc_stderr": 0.025254485424799595, + "acc_norm": 0.5435897435897435, + "acc_norm_stderr": 0.025254485424799595 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.603225806451613, + "acc_stderr": 0.027831231605767958, + "acc_norm": 0.603225806451613, + "acc_norm_stderr": 0.027831231605767958 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417614, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417614 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815646, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815646 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658752, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658752 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.032658195885126966, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.032658195885126966 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762602, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.026817718130348927, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.026817718130348927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5339506172839507, + "acc_stderr": 0.027756535257347666, + "acc_norm": 0.5339506172839507, + "acc_norm_stderr": 0.027756535257347666 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6424870466321243, + "acc_stderr": 0.034588160421810114, + "acc_norm": 0.6424870466321243, + "acc_norm_stderr": 0.034588160421810114 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6311926605504588, + "acc_stderr": 0.02068622756072954, + "acc_norm": 0.6311926605504588, + "acc_norm_stderr": 0.02068622756072954 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874143, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874143 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.020154685712590898, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.020154685712590898 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.014736926383761974, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.014736926383761974 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5387755102040817, + "acc_stderr": 0.03191282052669279, + "acc_norm": 0.5387755102040817, + "acc_norm_stderr": 0.03191282052669279 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7215189873417721, + "acc_stderr": 0.029178682304842548, + "acc_norm": 0.7215189873417721, + "acc_norm_stderr": 0.029178682304842548 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3533246414602347, + "acc_stderr": 0.012208408211082428, + "acc_norm": 0.3533246414602347, + "acc_norm_stderr": 0.012208408211082428 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6225490196078431, + "acc_stderr": 0.03402272044340703, + "acc_norm": 0.6225490196078431, + "acc_norm_stderr": 0.03402272044340703 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32313341493268055, + "mc1_stderr": 0.016371836286454607, + "mc2": 0.4906161022311745, + "mc2_stderr": 0.015269275021545079 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6233766233766234, + "acc_stderr": 0.016658799874051982, + "acc_norm": 0.6481700118063755, + "acc_norm_stderr": 0.016418206451218054 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jieunhan/solar_merge_test_1-1", + "model_sha": "aeee795c1ceecafc38153b00ea926ed3a77a4545", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jieunhan/solar_merge_test_1/result_2024-04-21 05:19:06.json b/jieunhan/solar_merge_test_1/result_2024-04-21 05:19:06.json new file mode 100644 index 0000000000000000000000000000000000000000..5f4df3046cd8eec040c056372b84c17c36672a90 --- /dev/null +++ b/jieunhan/solar_merge_test_1/result_2024-04-21 05:19:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.45051194539249145, + "acc_stderr": 0.014539646098471627, + "acc_norm": 0.5102389078498294, + "acc_norm_stderr": 0.014608326906285015 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4462258514240191, + "acc_stderr": 0.0049608399860995266, + "acc_norm": 0.6096395140410277, + "acc_norm_stderr": 0.00486834105656622 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.036871306155620606, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.036871306155620606 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6960408684546615, + "acc_stderr": 0.01644832168676904, + "acc_norm": 0.6960408684546615, + "acc_norm_stderr": 0.01644832168676904 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4851063829787234, + "acc_stderr": 0.032671518489247764, + "acc_norm": 0.4851063829787234, + "acc_norm_stderr": 0.032671518489247764 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.038823108508905954, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.038823108508905954 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5787781350482315, + "acc_stderr": 0.028043399858210628, + "acc_norm": 0.5787781350482315, + "acc_norm_stderr": 0.028043399858210628 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5695067264573991, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.5695067264573991, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5461538461538461, + "acc_stderr": 0.025242770987126194, + "acc_norm": 0.5461538461538461, + "acc_norm_stderr": 0.025242770987126194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.027869320571664625, + "acc_norm": 0.6, + "acc_norm_stderr": 0.027869320571664625 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.027778835904935427, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.027778835904935427 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815646, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815646 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658752, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658752 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.03265819588512696, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.03265819588512696 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.037786210790920545, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.037786210790920545 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149135, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149135 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756643, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756643 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5308641975308642, + "acc_stderr": 0.027767689606833932, + "acc_norm": 0.5308641975308642, + "acc_norm_stderr": 0.027767689606833932 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6424870466321243, + "acc_stderr": 0.034588160421810114, + "acc_norm": 0.6424870466321243, + "acc_norm_stderr": 0.034588160421810114 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.634862385321101, + "acc_stderr": 0.020642801454383998, + "acc_norm": 0.634862385321101, + "acc_norm_stderr": 0.020642801454383998 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.020165523313907894, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.020165523313907894 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347247, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347247 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.032847388576472056, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.032847388576472056 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26256983240223464, + "acc_stderr": 0.014716824273017756, + "acc_norm": 0.26256983240223464, + "acc_norm_stderr": 0.014716824273017756 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.03189141832421395, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.03189141832421395 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.729957805907173, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.729957805907173, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3500651890482399, + "acc_stderr": 0.012182552313215177, + "acc_norm": 0.3500651890482399, + "acc_norm_stderr": 0.012182552313215177 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.0341078533890472, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.0341078533890472 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31946144430844553, + "mc1_stderr": 0.0163226441829605, + "mc2": 0.48741596883652144, + "mc2_stderr": 0.015258517557454455 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6186540731995277, + "acc_stderr": 0.016699301768828074, + "acc_norm": 0.6458087367178277, + "acc_norm_stderr": 0.01644317574921476 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jieunhan/solar_merge_test_1", + "model_sha": "b73a2285620305998417c57e5c484b6452bedaab", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jieunhan/solar_merge_test_3/result_2024-04-23 04:36:33.json b/jieunhan/solar_merge_test_3/result_2024-04-23 04:36:33.json new file mode 100644 index 0000000000000000000000000000000000000000..766fbaf35fedd37c5ea1e3aaa868524ace9ac933 --- /dev/null +++ b/jieunhan/solar_merge_test_3/result_2024-04-23 04:36:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4616040955631399, + "acc_stderr": 0.01456824555029636, + "acc_norm": 0.5255972696245734, + "acc_norm_stderr": 0.014592230885298967 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4691296554471221, + "acc_stderr": 0.004980262025472485, + "acc_norm": 0.6399123680541725, + "acc_norm_stderr": 0.004790445139186363 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6549707602339181, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.6549707602339181, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6768837803320562, + "acc_stderr": 0.016723726512343044, + "acc_norm": 0.6768837803320562, + "acc_norm_stderr": 0.016723726512343044 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5063829787234042, + "acc_stderr": 0.03268335899936335, + "acc_norm": 0.5063829787234042, + "acc_norm_stderr": 0.03268335899936335 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.0274666102131401, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.0274666102131401 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6322869955156951, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.6322869955156951, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.648854961832061, + "acc_stderr": 0.0418644516301375, + "acc_norm": 0.648854961832061, + "acc_norm_stderr": 0.0418644516301375 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.031730712390717244, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.031730712390717244 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.02532399086173626, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.02532399086173626 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.02786932057166462, + "acc_norm": 0.6, + "acc_norm_stderr": 0.02786932057166462 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8076923076923077, + "acc_stderr": 0.025819233256483706, + "acc_norm": 0.8076923076923077, + "acc_norm_stderr": 0.025819233256483706 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5622641509433962, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.5622641509433962, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7562189054726368, + "acc_stderr": 0.03036049015401465, + "acc_norm": 0.7562189054726368, + "acc_norm_stderr": 0.03036049015401465 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.025355741263055256, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.025355741263055256 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5625, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.5625, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6329479768786127, + "acc_stderr": 0.02595005433765407, + "acc_norm": 0.6329479768786127, + "acc_norm_stderr": 0.02595005433765407 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6265432098765432, + "acc_stderr": 0.026915003011380154, + "acc_norm": 0.6265432098765432, + "acc_norm_stderr": 0.026915003011380154 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.689119170984456, + "acc_stderr": 0.03340361906276586, + "acc_norm": 0.689119170984456, + "acc_norm_stderr": 0.03340361906276586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594964, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594964 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.673394495412844, + "acc_stderr": 0.020106990889937303, + "acc_norm": 0.673394495412844, + "acc_norm_stderr": 0.020106990889937303 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6013071895424836, + "acc_stderr": 0.028036092273891765, + "acc_norm": 0.6013071895424836, + "acc_norm_stderr": 0.028036092273891765 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.040179012759817494, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.040179012759817494 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.020206653187884786, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.020206653187884786 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3971631205673759, + "acc_stderr": 0.029189805673587095, + "acc_norm": 0.3971631205673759, + "acc_norm_stderr": 0.029189805673587095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.034086558679777494, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.034086558679777494 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22793296089385476, + "acc_stderr": 0.014030149950805097, + "acc_norm": 0.22793296089385476, + "acc_norm_stderr": 0.014030149950805097 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6816326530612244, + "acc_stderr": 0.02982253379398204, + "acc_norm": 0.6816326530612244, + "acc_norm_stderr": 0.02982253379398204 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3970013037809648, + "acc_stderr": 0.012496346982909554, + "acc_norm": 0.3970013037809648, + "acc_norm_stderr": 0.012496346982909554 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7450980392156863, + "acc_stderr": 0.030587591351604236, + "acc_norm": 0.7450980392156863, + "acc_norm_stderr": 0.030587591351604236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7151515151515152, + "acc_stderr": 0.0352439084451178, + "acc_norm": 0.7151515151515152, + "acc_norm_stderr": 0.0352439084451178 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32558139534883723, + "mc1_stderr": 0.016403989469907815, + "mc2": 0.4873577873695966, + "mc2_stderr": 0.015413866500140939 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5796930342384888, + "acc_stderr": 0.01697059828117771, + "acc_norm": 0.602125147579693, + "acc_norm_stderr": 0.016827959054733395 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jieunhan/solar_merge_test_3", + "model_sha": "6b271d2c43350ef0e8b786f7f2aa6f9e6c68a303", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jin05102518/Astral-7B-0.5Epoch-Test/result_2023-11-03 02:11:10.json b/jin05102518/Astral-7B-0.5Epoch-Test/result_2023-11-03 02:11:10.json new file mode 100644 index 0000000000000000000000000000000000000000..815d14e7d4f2470144c0e99800a505add14dd174 --- /dev/null +++ b/jin05102518/Astral-7B-0.5Epoch-Test/result_2023-11-03 02:11:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21416382252559726, + "acc_stderr": 0.011988383205966482, + "acc_norm": 0.25170648464163825, + "acc_norm_stderr": 0.01268249633404296 + }, + "harness|ko_hellaswag|10": { + "acc": 0.273451503684525, + "acc_stderr": 0.004448196648383001, + "acc_norm": 0.29635530770762797, + "acc_norm_stderr": 0.004557163175885562 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3391812865497076, + "acc_stderr": 0.036310534964889056, + "acc_norm": 0.3391812865497076, + "acc_norm_stderr": 0.036310534964889056 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.04498676320572924, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.04498676320572924 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2720306513409962, + "acc_stderr": 0.01591336744750051, + "acc_norm": 0.2720306513409962, + "acc_norm_stderr": 0.01591336744750051 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.02937917046412481, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.02937917046412481 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.23493975903614459, + "acc_stderr": 0.03300533186128922, + "acc_norm": 0.23493975903614459, + "acc_norm_stderr": 0.03300533186128922 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2347266881028939, + "acc_stderr": 0.024071805887677045, + "acc_norm": 0.2347266881028939, + "acc_norm_stderr": 0.024071805887677045 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.03089861088247751, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.03089861088247751 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20707070707070707, + "acc_stderr": 0.028869778460267042, + "acc_norm": 0.20707070707070707, + "acc_norm_stderr": 0.028869778460267042 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3277310924369748, + "acc_stderr": 0.03048991141767323, + "acc_norm": 0.3277310924369748, + "acc_norm_stderr": 0.03048991141767323 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2794871794871795, + "acc_stderr": 0.022752388839776823, + "acc_norm": 0.2794871794871795, + "acc_norm_stderr": 0.022752388839776823 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591311, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591311 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114482, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114482 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36752136752136755, + "acc_stderr": 0.03158539157745635, + "acc_norm": 0.36752136752136755, + "acc_norm_stderr": 0.03158539157745635 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708083, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708083 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.04309118709946459, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.04309118709946459 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844072, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31343283582089554, + "acc_stderr": 0.032801882053486414, + "acc_norm": 0.31343283582089554, + "acc_norm_stderr": 0.032801882053486414 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02306818884826111, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02306818884826111 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30346820809248554, + "acc_stderr": 0.02475241196091722, + "acc_norm": 0.30346820809248554, + "acc_norm_stderr": 0.02475241196091722 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2808641975308642, + "acc_stderr": 0.025006469755799208, + "acc_norm": 0.2808641975308642, + "acc_norm_stderr": 0.025006469755799208 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29533678756476683, + "acc_stderr": 0.032922966391551414, + "acc_norm": 0.29533678756476683, + "acc_norm_stderr": 0.032922966391551414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.03646758875075566, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.03646758875075566 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25137614678899084, + "acc_stderr": 0.01859920636028741, + "acc_norm": 0.25137614678899084, + "acc_norm_stderr": 0.01859920636028741 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488795, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488795 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.02736359328468494, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.02736359328468494 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.33884297520661155, + "acc_stderr": 0.043207678075366705, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.043207678075366705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810538, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810538 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898435, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898435 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.02737294220178816, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.02737294220178816 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3291139240506329, + "acc_stderr": 0.03058732629470236, + "acc_norm": 0.3291139240506329, + "acc_norm_stderr": 0.03058732629470236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2666232073011734, + "acc_stderr": 0.011293836031612142, + "acc_norm": 0.2666232073011734, + "acc_norm_stderr": 0.011293836031612142 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.03256685484460389, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.03256685484460389 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.034277431758165236, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.034277431758165236 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752322, + "mc2": 0.45037975897741206, + "mc2_stderr": 0.015986987451663295 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.179456906729634, + "acc_stderr": 0.013193062031400433, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.015163499477892412 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jin05102518/Astral-7B-0.5Epoch-Test", + "model_sha": "cf77310443930dfb98bc55603555822c98af0309", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jin05102518/Astral-7B-1.0Epoch-Instruct-v0.05/result_2023-10-22 13:07:21.json b/jin05102518/Astral-7B-1.0Epoch-Instruct-v0.05/result_2023-10-22 13:07:21.json new file mode 100644 index 0000000000000000000000000000000000000000..b8d8f7c59fa8353cdf052a487a3d8a6add7f64c9 --- /dev/null +++ b/jin05102518/Astral-7B-1.0Epoch-Instruct-v0.05/result_2023-10-22 13:07:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3148464163822526, + "acc_stderr": 0.01357265770308495, + "acc_norm": 0.37542662116040953, + "acc_norm_stderr": 0.014150631435111726 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3571997610037841, + "acc_stderr": 0.004781950883460504, + "acc_norm": 0.4569806811392153, + "acc_norm_stderr": 0.004971278309204196 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4342273307790549, + "acc_stderr": 0.017724589389677785, + "acc_norm": 0.4342273307790549, + "acc_norm_stderr": 0.017724589389677785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610334, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610334 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.02834504586484068, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.02834504586484068 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755292, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755292 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.47474747474747475, + "acc_stderr": 0.035578062450873145, + "acc_norm": 0.47474747474747475, + "acc_norm_stderr": 0.035578062450873145 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539746, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539746 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009805, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009805 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.037038511930995215, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.037038511930995215 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.02475747390275205, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.02475747390275205 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720685, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720685 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47889908256880737, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.47889908256880737, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142624, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142624 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.01943177567703731, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.01943177567703731 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042405, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.33631284916201115, + "acc_stderr": 0.015801003729145908, + "acc_norm": 0.33631284916201115, + "acc_norm_stderr": 0.015801003729145908 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163906, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31877444589308995, + "acc_stderr": 0.011901895635786084, + "acc_norm": 0.31877444589308995, + "acc_norm_stderr": 0.011901895635786084 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386854, + "mc2": 0.4745826617149022, + "mc2_stderr": 0.015464604846827046 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38016528925619836, + "acc_stderr": 0.0166893335969801, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.01694358631307657 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jin05102518/Astral-7B-1.0Epoch-Instruct-v0.05", + "model_sha": "fb04a8d5574256eefe4faa1783874384c88eea9b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jin05102518/Astral-7B-1.0Epoch-Instruct-v0.06/result_2023-11-02 15:48:21.json b/jin05102518/Astral-7B-1.0Epoch-Instruct-v0.06/result_2023-11-02 15:48:21.json new file mode 100644 index 0000000000000000000000000000000000000000..593dc97fe3e01ce6a5d0a9844e3bcbe5dbe7fdce --- /dev/null +++ b/jin05102518/Astral-7B-1.0Epoch-Instruct-v0.06/result_2023-11-02 15:48:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3720136518771331, + "acc_stderr": 0.014124597881844463, + "acc_norm": 0.42150170648464164, + "acc_norm_stderr": 0.014430197069326023 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38398725353515234, + "acc_stderr": 0.004853608805843877, + "acc_norm": 0.5012945628360884, + "acc_norm_stderr": 0.0049897646867388306 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.51213282247765, + "acc_stderr": 0.017874698667491338, + "acc_norm": 0.51213282247765, + "acc_norm_stderr": 0.017874698667491338 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685516, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685516 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.0324498084999003, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.0324498084999003 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4128205128205128, + "acc_stderr": 0.02496268356433182, + "acc_norm": 0.4128205128205128, + "acc_norm_stderr": 0.02496268356433182 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674064, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674064 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.03070948699255655, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.03070948699255655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465076, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465076 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.02437319786798305, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.02437319786798305 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.026882643434022895, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.026882643434022895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833942, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833942 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46605504587155966, + "acc_stderr": 0.021387863350354, + "acc_norm": 0.46605504587155966, + "acc_norm_stderr": 0.021387863350354 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874141, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874141 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.019706875804085637, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.019706875804085637 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2122905027932961, + "acc_stderr": 0.013676644685831728, + "acc_norm": 0.2122905027932961, + "acc_norm_stderr": 0.013676644685831728 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411952, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411952 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.03106721126287249, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.03106721126287249 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.031843998738112236, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.031843998738112236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.01210681720306721, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.01210681720306721 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502332, + "mc2": 0.4460519958175022, + "mc2_stderr": 0.015200803602621195 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.34946871310507677, + "acc_stderr": 0.016392797085769843, + "acc_norm": 0.3907910271546635, + "acc_norm_stderr": 0.01677529846510825 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jin05102518/Astral-7B-1.0Epoch-Instruct-v0.06", + "model_sha": "ff04d583e74c05644558288bcbbec86f701fd5d9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jin05102518/Astral-7B-Instruct-v0.01/result_2023-10-13 16:06:56.json b/jin05102518/Astral-7B-Instruct-v0.01/result_2023-10-13 16:06:56.json new file mode 100644 index 0000000000000000000000000000000000000000..9aafe20bd47563f3be8595795e165f52e3db5055 --- /dev/null +++ b/jin05102518/Astral-7B-Instruct-v0.01/result_2023-10-13 16:06:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3199658703071672, + "acc_stderr": 0.013631345807016196, + "acc_norm": 0.38310580204778155, + "acc_norm_stderr": 0.014206472661672876 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36875124477195775, + "acc_stderr": 0.004814803098436803, + "acc_norm": 0.4794861581358295, + "acc_norm_stderr": 0.0049855800659464565 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.454661558109834, + "acc_stderr": 0.017806304585052602, + "acc_norm": 0.454661558109834, + "acc_norm_stderr": 0.017806304585052602 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.039992628766177214, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.039992628766177214 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197604, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.032737667254591575, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.032737667254591575 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.0438986995680878, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.0438986995680878 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.032219436365661956, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.032219436365661956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.02453759157283053, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.02453759157283053 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5769230769230769, + "acc_stderr": 0.032366121762202014, + "acc_norm": 0.5769230769230769, + "acc_norm_stderr": 0.032366121762202014 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871923, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871923 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851102, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851102 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.430635838150289, + "acc_stderr": 0.02665880027367238, + "acc_norm": 0.430635838150289, + "acc_norm_stderr": 0.02665880027367238 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39378238341968913, + "acc_stderr": 0.03526077095548237, + "acc_norm": 0.39378238341968913, + "acc_norm_stderr": 0.03526077095548237 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42385321100917434, + "acc_stderr": 0.02118726320908752, + "acc_norm": 0.42385321100917434, + "acc_norm_stderr": 0.02118726320908752 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617157, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.045077322787750874, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.045077322787750874 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.018999707383162662, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.018999707383162662 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611313, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611313 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29465449804432853, + "acc_stderr": 0.011643576764069548, + "acc_norm": 0.29465449804432853, + "acc_norm_stderr": 0.011643576764069548 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.0372820699868265, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.0372820699868265 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476199, + "mc2": 0.4322824441345256, + "mc2_stderr": 0.015763267859642997 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3730814639905549, + "acc_stderr": 0.016627318275137432, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.01688474950319139 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jin05102518/Astral-7B-Instruct-v0.01", + "model_sha": "095682dac7dc303e13f3c4135333e5c78db5afbf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/KoSoLAR-10.7B-v0.2_1.3_dedup/result_2024-01-29 05:01:45.json b/jingyeom/KoSoLAR-10.7B-v0.2_1.3_dedup/result_2024-01-29 05:01:45.json new file mode 100644 index 0000000000000000000000000000000000000000..e1cc37c44227f4838a0d3fa2e2b00cd8e4096f09 --- /dev/null +++ b/jingyeom/KoSoLAR-10.7B-v0.2_1.3_dedup/result_2024-01-29 05:01:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4709897610921502, + "acc_stderr": 0.014586776355294317, + "acc_norm": 0.5307167235494881, + "acc_norm_stderr": 0.014583792546304037 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46415056761601275, + "acc_stderr": 0.004976939333240074, + "acc_norm": 0.630551682931687, + "acc_norm_stderr": 0.004816690123209745 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.695906432748538, + "acc_stderr": 0.03528211258245231, + "acc_norm": 0.695906432748538, + "acc_norm_stderr": 0.03528211258245231 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6922094508301405, + "acc_stderr": 0.016506045045155626, + "acc_norm": 0.6922094508301405, + "acc_norm_stderr": 0.016506045045155626 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4851063829787234, + "acc_stderr": 0.03267151848924777, + "acc_norm": 0.4851063829787234, + "acc_norm_stderr": 0.03267151848924777 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5980707395498392, + "acc_stderr": 0.027846476005930477, + "acc_norm": 0.5980707395498392, + "acc_norm_stderr": 0.027846476005930477 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6278026905829597, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.6278026905829597, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.042764865428145914, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.042764865428145914 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.03149930577784906, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.03149930577784906 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.541025641025641, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.541025641025641, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04712821257426769, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04712821257426769 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.567741935483871, + "acc_stderr": 0.028181739720019403, + "acc_norm": 0.567741935483871, + "acc_norm_stderr": 0.028181739720019403 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922754, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922754 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851295, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851295 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425464, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425464 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7512437810945274, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.7512437810945274, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.02504375731852019, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.02504375731852019 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5694444444444444, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.5694444444444444, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.638728323699422, + "acc_stderr": 0.025862201852277913, + "acc_norm": 0.638728323699422, + "acc_norm_stderr": 0.025862201852277913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6080246913580247, + "acc_stderr": 0.02716368603827115, + "acc_norm": 0.6080246913580247, + "acc_norm_stderr": 0.02716368603827115 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6770642201834862, + "acc_stderr": 0.020048115923415336, + "acc_norm": 0.6770642201834862, + "acc_norm_stderr": 0.020048115923415336 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377562, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377562 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.028275490156791455, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.028275490156791455 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.625, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.625, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5408496732026143, + "acc_stderr": 0.020160213617222516, + "acc_norm": 0.5408496732026143, + "acc_norm_stderr": 0.020160213617222516 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347247, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347247 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.033922384053216174, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.033922384053216174 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.014102223623152594, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.014102223623152594 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5959183673469388, + "acc_stderr": 0.03141470802586587, + "acc_norm": 0.5959183673469388, + "acc_norm_stderr": 0.03141470802586587 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036423, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036423 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4061277705345502, + "acc_stderr": 0.012543154588412927, + "acc_norm": 0.4061277705345502, + "acc_norm_stderr": 0.012543154588412927 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7156862745098039, + "acc_stderr": 0.03166009679399814, + "acc_norm": 0.7156862745098039, + "acc_norm_stderr": 0.03166009679399814 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7333333333333333, + "acc_stderr": 0.03453131801885416, + "acc_norm": 0.7333333333333333, + "acc_norm_stderr": 0.03453131801885416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31334149326805383, + "mc1_stderr": 0.01623806506905961, + "mc2": 0.4626160763433255, + "mc2_stderr": 0.015567508333810372 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.58913813459268, + "acc_stderr": 0.01691497276784105, + "acc_norm": 0.6245572609208973, + "acc_norm_stderr": 0.016648411589511088 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/KoSoLAR-10.7B-v0.2_1.3_dedup", + "model_sha": "df5c63764f04e2d5863724ce9723d6cad2451e42", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/KoSoLAR-10.7B-v0.2_1.3_dedup_p/result_2024-01-29 05:05:25.json b/jingyeom/KoSoLAR-10.7B-v0.2_1.3_dedup_p/result_2024-01-29 05:05:25.json new file mode 100644 index 0000000000000000000000000000000000000000..a18989f21870b02a5fe670824347020354ebfc11 --- /dev/null +++ b/jingyeom/KoSoLAR-10.7B-v0.2_1.3_dedup_p/result_2024-01-29 05:05:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4709897610921502, + "acc_stderr": 0.014586776355294317, + "acc_norm": 0.5307167235494881, + "acc_norm_stderr": 0.014583792546304037 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46415056761601275, + "acc_stderr": 0.004976939333240074, + "acc_norm": 0.630551682931687, + "acc_norm_stderr": 0.004816690123209745 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.695906432748538, + "acc_stderr": 0.03528211258245231, + "acc_norm": 0.695906432748538, + "acc_norm_stderr": 0.03528211258245231 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6922094508301405, + "acc_stderr": 0.016506045045155626, + "acc_norm": 0.6922094508301405, + "acc_norm_stderr": 0.016506045045155626 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4851063829787234, + "acc_stderr": 0.03267151848924777, + "acc_norm": 0.4851063829787234, + "acc_norm_stderr": 0.03267151848924777 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5980707395498392, + "acc_stderr": 0.027846476005930477, + "acc_norm": 0.5980707395498392, + "acc_norm_stderr": 0.027846476005930477 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6278026905829597, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.6278026905829597, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.042764865428145914, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.042764865428145914 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.03149930577784906, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.03149930577784906 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.541025641025641, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.541025641025641, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04712821257426769, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04712821257426769 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.567741935483871, + "acc_stderr": 0.028181739720019403, + "acc_norm": 0.567741935483871, + "acc_norm_stderr": 0.028181739720019403 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922754, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922754 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851295, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851295 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425464, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425464 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7512437810945274, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.7512437810945274, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.02504375731852019, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.02504375731852019 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5694444444444444, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.5694444444444444, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.638728323699422, + "acc_stderr": 0.025862201852277913, + "acc_norm": 0.638728323699422, + "acc_norm_stderr": 0.025862201852277913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6080246913580247, + "acc_stderr": 0.02716368603827115, + "acc_norm": 0.6080246913580247, + "acc_norm_stderr": 0.02716368603827115 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6770642201834862, + "acc_stderr": 0.020048115923415336, + "acc_norm": 0.6770642201834862, + "acc_norm_stderr": 0.020048115923415336 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377562, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377562 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.028275490156791455, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.028275490156791455 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.625, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.625, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5408496732026143, + "acc_stderr": 0.020160213617222516, + "acc_norm": 0.5408496732026143, + "acc_norm_stderr": 0.020160213617222516 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347247, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347247 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.033922384053216174, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.033922384053216174 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.014102223623152594, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.014102223623152594 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5959183673469388, + "acc_stderr": 0.03141470802586587, + "acc_norm": 0.5959183673469388, + "acc_norm_stderr": 0.03141470802586587 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036423, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036423 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4061277705345502, + "acc_stderr": 0.012543154588412927, + "acc_norm": 0.4061277705345502, + "acc_norm_stderr": 0.012543154588412927 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7156862745098039, + "acc_stderr": 0.03166009679399814, + "acc_norm": 0.7156862745098039, + "acc_norm_stderr": 0.03166009679399814 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7333333333333333, + "acc_stderr": 0.03453131801885416, + "acc_norm": 0.7333333333333333, + "acc_norm_stderr": 0.03453131801885416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31334149326805383, + "mc1_stderr": 0.01623806506905961, + "mc2": 0.4626160763433255, + "mc2_stderr": 0.015567508333810372 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.58913813459268, + "acc_stderr": 0.01691497276784105, + "acc_norm": 0.6245572609208973, + "acc_norm_stderr": 0.016648411589511088 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/KoSoLAR-10.7B-v0.2_1.3_dedup_p", + "model_sha": "df5c63764f04e2d5863724ce9723d6cad2451e42", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/KoSoLAR-10.7B-v0.2_1.4_dedup_1/result_2024-02-01 05:18:04.json b/jingyeom/KoSoLAR-10.7B-v0.2_1.4_dedup_1/result_2024-02-01 05:18:04.json new file mode 100644 index 0000000000000000000000000000000000000000..f43561f6f2127657f331f4b2ef17089d15cbad1c --- /dev/null +++ b/jingyeom/KoSoLAR-10.7B-v0.2_1.4_dedup_1/result_2024-02-01 05:18:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43856655290102387, + "acc_stderr": 0.014500682618212864, + "acc_norm": 0.5042662116040956, + "acc_norm_stderr": 0.014610858923956952 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45628360884286, + "acc_stderr": 0.004970672651595841, + "acc_norm": 0.6199960167297351, + "acc_norm_stderr": 0.004843954338451449 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.04656147110012351, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.04656147110012351 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6909323116219668, + "acc_stderr": 0.016524988919702187, + "acc_norm": 0.6909323116219668, + "acc_norm_stderr": 0.016524988919702187 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.038922121953330446, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.038922121953330446 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6237942122186495, + "acc_stderr": 0.027513925683549427, + "acc_norm": 0.6237942122186495, + "acc_norm_stderr": 0.027513925683549427 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6098654708520179, + "acc_stderr": 0.03273766725459157, + "acc_norm": 0.6098654708520179, + "acc_norm_stderr": 0.03273766725459157 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.03196876989195779, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.03196876989195779 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5564102564102564, + "acc_stderr": 0.025189149894764222, + "acc_norm": 0.5564102564102564, + "acc_norm_stderr": 0.025189149894764222 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6096774193548387, + "acc_stderr": 0.02775125663696959, + "acc_norm": 0.6096774193548387, + "acc_norm_stderr": 0.02775125663696959 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922754, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922754 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5622641509433962, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.5622641509433962, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.04653429807913507, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.04653429807913507 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131133, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131133 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.031871875379197966, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.031871875379197966 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273957, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273957 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5809248554913294, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.5809248554913294, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6265432098765432, + "acc_stderr": 0.026915003011380154, + "acc_norm": 0.6265432098765432, + "acc_norm_stderr": 0.026915003011380154 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.032210245080411544, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.032210245080411544 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6752293577981652, + "acc_stderr": 0.020077729109310324, + "acc_norm": 0.6752293577981652, + "acc_norm_stderr": 0.020077729109310324 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5915032679738562, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.5915032679738562, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635464, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635464 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6118421052631579, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.6118421052631579, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.020220920829626916, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.020220920829626916 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3971631205673759, + "acc_stderr": 0.0291898056735871, + "acc_norm": 0.3971631205673759, + "acc_norm_stderr": 0.0291898056735871 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.03409386946992699, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.03409386946992699 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966351, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966351 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.49264705882352944, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.49264705882352944, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6163265306122448, + "acc_stderr": 0.031130880396235943, + "acc_norm": 0.6163265306122448, + "acc_norm_stderr": 0.031130880396235943 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7468354430379747, + "acc_stderr": 0.028304657943035313, + "acc_norm": 0.7468354430379747, + "acc_norm_stderr": 0.028304657943035313 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39374185136897, + "acc_stderr": 0.012478532272564433, + "acc_norm": 0.39374185136897, + "acc_norm_stderr": 0.012478532272564433 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7205882352941176, + "acc_stderr": 0.03149328104507955, + "acc_norm": 0.7205882352941176, + "acc_norm_stderr": 0.03149328104507955 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624335, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624335 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520693, + "mc2": 0.3888999524970837, + "mc2_stderr": 0.014738222524932119 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42621015348288077, + "acc_stderr": 0.01700212260948926, + "acc_norm": 0.4817001180637544, + "acc_norm_stderr": 0.01717883663917774 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/KoSoLAR-10.7B-v0.2_1.4_dedup_1", + "model_sha": "a72ed114d5e660e86bc0018e8faf06c17bf40fce", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/SOLAR_KO_1.3_deup/result_2024-01-16 08:09:11.json b/jingyeom/SOLAR_KO_1.3_deup/result_2024-01-16 08:09:11.json new file mode 100644 index 0000000000000000000000000000000000000000..147261b2ff8d2704ed5e45754e44c8f6e0a67a62 --- /dev/null +++ b/jingyeom/SOLAR_KO_1.3_deup/result_2024-01-16 08:09:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4513651877133106, + "acc_stderr": 0.014542104569955265, + "acc_norm": 0.5264505119453925, + "acc_norm_stderr": 0.014590931358120172 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44851623182632944, + "acc_stderr": 0.004963259311700556, + "acc_norm": 0.6092411870145389, + "acc_norm_stderr": 0.004869232758103326 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6608187134502924, + "acc_stderr": 0.036310534964889056, + "acc_norm": 0.6608187134502924, + "acc_norm_stderr": 0.036310534964889056 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.0458212416016155, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.0458212416016155 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6998722860791826, + "acc_stderr": 0.016389249691317418, + "acc_norm": 0.6998722860791826, + "acc_norm_stderr": 0.016389249691317418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04316378599511324, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04316378599511324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4851063829787234, + "acc_stderr": 0.03267151848924777, + "acc_norm": 0.4851063829787234, + "acc_norm_stderr": 0.03267151848924777 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5659163987138264, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.5659163987138264, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5650224215246636, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.5650224215246636, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.02534267129380725, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.02534267129380725 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6096774193548387, + "acc_stderr": 0.027751256636969587, + "acc_norm": 0.6096774193548387, + "acc_norm_stderr": 0.027751256636969587 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.026453508054040325, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.026453508054040325 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.03765746693865149, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.03765746693865149 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762613, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762613 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5208333333333334, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.5208333333333334, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.0275860062216077, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.0275860062216077 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6735751295336787, + "acc_stderr": 0.033840286211432945, + "acc_norm": 0.6735751295336787, + "acc_norm_stderr": 0.033840286211432945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6495412844036698, + "acc_stderr": 0.020456077599824457, + "acc_norm": 0.6495412844036698, + "acc_norm_stderr": 0.020456077599824457 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127155, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127155 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.0403356566784832, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.0403356566784832 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.0201655233139079, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.0201655233139079 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281285, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281285 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608044, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21452513966480447, + "acc_stderr": 0.013728923407828868, + "acc_norm": 0.21452513966480447, + "acc_norm_stderr": 0.013728923407828868 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6204081632653061, + "acc_stderr": 0.031067211262872468, + "acc_norm": 0.6204081632653061, + "acc_norm_stderr": 0.031067211262872468 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598035, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598035 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3683181225554107, + "acc_stderr": 0.012319403369564642, + "acc_norm": 0.3683181225554107, + "acc_norm_stderr": 0.012319403369564642 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.034107853389047205, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.034107853389047205 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398395, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398395 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.45139654928954936, + "mc2_stderr": 0.015165689791286727 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5655253837072018, + "acc_stderr": 0.017042098620824935, + "acc_norm": 0.5855962219598583, + "acc_norm_stderr": 0.01693658338394361 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/SOLAR_KO_1.3_deup", + "model_sha": "3218e4304fe55ec950347c96018f14f60baca25d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/Yi-ko-1.1-dedup/result_2023-12-26 01:45:47.json b/jingyeom/Yi-ko-1.1-dedup/result_2023-12-26 01:45:47.json new file mode 100644 index 0000000000000000000000000000000000000000..d9aa7ea8a8c317c2d7780116dff7ab04c13e2c4e --- /dev/null +++ b/jingyeom/Yi-ko-1.1-dedup/result_2023-12-26 01:45:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3779863481228669, + "acc_stderr": 0.014169664520303096, + "acc_norm": 0.4445392491467577, + "acc_norm_stderr": 0.014521226405627079 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40689105755825533, + "acc_stderr": 0.004902502514738602, + "acc_norm": 0.5487950607448715, + "acc_norm_stderr": 0.0049659636472103134 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5696040868454662, + "acc_stderr": 0.017705868776292398, + "acc_norm": 0.5696040868454662, + "acc_norm_stderr": 0.017705868776292398 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993178, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993178 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.0324498084999003, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.0324498084999003 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.025317649726448652, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.025317649726448652 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.0483036602463533, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.0483036602463533 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5032258064516129, + "acc_stderr": 0.02844341422643831, + "acc_norm": 0.5032258064516129, + "acc_norm_stderr": 0.02844341422643831 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.02375292871211214, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.02375292871211214 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5944954128440367, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.5944954128440367, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.028614624752805434, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.028614624752805434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534778, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534778 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.0142426300705749, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.0142426300705749 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933105, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42448979591836733, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.42448979591836733, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.011855911587048224, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.011855911587048224 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.4181648648839536, + "mc2_stderr": 0.014998383674803386 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4911452184179457, + "acc_stderr": 0.017187658199336736, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.017175671279836446 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/Yi-ko-1.1-dedup", + "model_sha": "9303f60af4c70b680d7391ab54b0b6d267862954", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/Yi-ko-1.1/result_2023-12-26 01:57:05.json b/jingyeom/Yi-ko-1.1/result_2023-12-26 01:57:05.json new file mode 100644 index 0000000000000000000000000000000000000000..152f4a3ad9f7d4dcfe1fac144d965ecbfac9b5a9 --- /dev/null +++ b/jingyeom/Yi-ko-1.1/result_2023-12-26 01:57:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3703071672354949, + "acc_stderr": 0.014111298751674948, + "acc_norm": 0.4402730375426621, + "acc_norm_stderr": 0.014506769524804234 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4159529974108743, + "acc_stderr": 0.004918781662373929, + "acc_norm": 0.5526787492531369, + "acc_norm_stderr": 0.0049620103382263464 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.017779225233394216, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.017779225233394216 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5259259259259259, + "acc_stderr": 0.04313531696750575, + "acc_norm": 0.5259259259259259, + "acc_norm_stderr": 0.04313531696750575 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.02529460802398648, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.02529460802398648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342596, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.02339382650048487, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.02339382650048487 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336936, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5908256880733945, + "acc_stderr": 0.02108067026443373, + "acc_norm": 0.5908256880733945, + "acc_norm_stderr": 0.02108067026443373 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805413, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534778, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534778 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.043642261558410445, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.043642261558410445 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010066, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010066 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.03137624072561619, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.03137624072561619 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33376792698826596, + "acc_stderr": 0.01204381265584614, + "acc_norm": 0.33376792698826596, + "acc_norm_stderr": 0.01204381265584614 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006514, + "mc2": 0.42276488773202564, + "mc2_stderr": 0.015556879375012815 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4639905548996458, + "acc_stderr": 0.01714571536548666, + "acc_norm": 0.4769775678866588, + "acc_norm_stderr": 0.017172121546727637 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/Yi-ko-1.1", + "model_sha": "81f5744d1fbdc6467e8df25ceff6f2f0cfaa06e1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/Yi-ko-1.2/result_2023-12-28 05:50:10.json b/jingyeom/Yi-ko-1.2/result_2023-12-28 05:50:10.json new file mode 100644 index 0000000000000000000000000000000000000000..49f78778d7441a9b42da06e9e5cf15d9fa39935c --- /dev/null +++ b/jingyeom/Yi-ko-1.2/result_2023-12-28 05:50:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37627986348122866, + "acc_stderr": 0.014157022555407163, + "acc_norm": 0.439419795221843, + "acc_norm_stderr": 0.014503747823580122 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41884086835291773, + "acc_stderr": 0.00492360920786154, + "acc_norm": 0.5478988249352719, + "acc_norm_stderr": 0.004966832553245044 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.0381107966983353, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.0381107966983353 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.017797751493865626, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.017797751493865626 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232964, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232964 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.02517404838400078, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.02517404838400078 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.02999695185834948, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.02999695185834948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113114, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159664, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762616, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762616 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.038470214204560246, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.038470214204560246 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.03606065001832919, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.03606065001832919 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.581651376146789, + "acc_stderr": 0.02114954859644388, + "acc_norm": 0.581651376146789, + "acc_norm_stderr": 0.02114954859644388 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805413, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775088, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.020071257886886528, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.020071257886886528 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.044642857142857116, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.044642857142857116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.01428834380392529, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.01428834380392529 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.029674288281311172, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.029674288281311172 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4530612244897959, + "acc_stderr": 0.03186785930004129, + "acc_norm": 0.4530612244897959, + "acc_norm_stderr": 0.03186785930004129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.03178471874564729, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.03178471874564729 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3324641460234681, + "acc_stderr": 0.012032022332260518, + "acc_norm": 0.3324641460234681, + "acc_norm_stderr": 0.012032022332260518 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630573, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630573 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165635, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165635 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015008, + "mc2": 0.40740897953830707, + "mc2_stderr": 0.015409125138870454 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4899645808736718, + "acc_stderr": 0.017186891286894043, + "acc_norm": 0.5100354191263282, + "acc_norm_stderr": 0.017186891286894063 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/Yi-ko-1.2", + "model_sha": "7b0cdd9271915ce57a6f6da93e1830f7210b8a24", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/Yi-ko_3_1_7/result_2023-12-21 09:24:47.json b/jingyeom/Yi-ko_3_1_7/result_2023-12-21 09:24:47.json new file mode 100644 index 0000000000000000000000000000000000000000..e6082a7691a0f9706c9cffa956a722c4c6bf8a18 --- /dev/null +++ b/jingyeom/Yi-ko_3_1_7/result_2023-12-21 09:24:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.371160409556314, + "acc_stderr": 0.014117971901142815, + "acc_norm": 0.4274744027303754, + "acc_norm_stderr": 0.014456862944650652 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4147580163314081, + "acc_stderr": 0.004916733258140296, + "acc_norm": 0.5525791674965146, + "acc_norm_stderr": 0.004962115526014293 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370608, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370608 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.017850410794380173, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.017850410794380173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.03488901616852729, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.03488901616852729 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.03046365674734026, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.03046365674734026 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251976, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251976 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.02467786284133278, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.02467786284133278 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548914, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548914 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.0358701498607566, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.0358701498607566 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6091743119266055, + "acc_stderr": 0.020920058346111065, + "acc_norm": 0.6091743119266055, + "acc_norm_stderr": 0.020920058346111065 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.01988622103750187, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.01988622103750187 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650144, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650144 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.029674288281311172, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.029674288281311172 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.031601069934496046, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.031601069934496046 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214941, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214941 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024626, + "mc2": 0.43705454925575216, + "mc2_stderr": 0.015879980235191146 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4604486422668241, + "acc_stderr": 0.017136487626049846, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.017161563949916348 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/Yi-ko_3_1_7", + "model_sha": "e05783de0d8576cfcf5c14e8e66ae1349c03e4ad", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/mistral_ko_all_inst/result_2023-12-18 11:26:46.json b/jingyeom/mistral_ko_all_inst/result_2023-12-18 11:26:46.json new file mode 100644 index 0000000000000000000000000000000000000000..89eb1126575b1d07e19ca158b267faef140a8513 --- /dev/null +++ b/jingyeom/mistral_ko_all_inst/result_2023-12-18 11:26:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32764505119453924, + "acc_stderr": 0.01371584794071934, + "acc_norm": 0.36860068259385664, + "acc_norm_stderr": 0.0140978106780422 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36715793666600277, + "acc_stderr": 0.004810449343572393, + "acc_norm": 0.47988448516231824, + "acc_norm_stderr": 0.004985741706385723 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.01785298126663395, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.01785298126663395 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.025230381238934833, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.025230381238934833 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813322, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813322 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.037038511930995194, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.037038511930995194 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.02467786284133278, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.02467786284133278 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668784, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668784 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.021421402982548878, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.021421402982548878 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138282, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138282 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.01967580813528151, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.01967580813528151 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963775, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963775 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219588, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219588 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364548, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364548 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.02850145286039657, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.02850145286039657 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32985658409387225, + "acc_stderr": 0.012008129938540476, + "acc_norm": 0.32985658409387225, + "acc_norm_stderr": 0.012008129938540476 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.0345423658538061, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.0345423658538061 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396708, + "mc2": 0.4665680569692903, + "mc2_stderr": 0.01556157417629164 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42739079102715466, + "acc_stderr": 0.017008129844823156, + "acc_norm": 0.4781582054309327, + "acc_norm_stderr": 0.01717394447429438 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/mistral_ko_all_inst", + "model_sha": "5782059fd899a370fc0e92a7d18501fd75f0b725", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/penguin3.1.6n_7b/result_2023-11-16 00:30:20.json b/jingyeom/penguin3.1.6n_7b/result_2023-11-16 00:30:20.json new file mode 100644 index 0000000000000000000000000000000000000000..c579500152b6d662c9a5649e90173dae4a7474d5 --- /dev/null +++ b/jingyeom/penguin3.1.6n_7b/result_2023-11-16 00:30:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3378839590443686, + "acc_stderr": 0.013822047922283509, + "acc_norm": 0.386518771331058, + "acc_norm_stderr": 0.01423008476191048 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3723361880103565, + "acc_stderr": 0.004824393076826635, + "acc_norm": 0.47211710814578767, + "acc_norm_stderr": 0.0049820167024459605 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4393358876117497, + "acc_stderr": 0.01774787424568361, + "acc_norm": 0.4393358876117497, + "acc_norm_stderr": 0.01774787424568361 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.042667634040995814, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.042667634040995814 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.02823776942208534, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.02823776942208534 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.024433016466052452, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.024433016466052452 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985905, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985905 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37358490566037733, + "acc_stderr": 0.02977308271331987, + "acc_norm": 0.37358490566037733, + "acc_norm_stderr": 0.02977308271331987 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425082, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425082 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.037311335196738925, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.037311335196738925 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607708, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607708 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38341968911917096, + "acc_stderr": 0.03508984236295341, + "acc_norm": 0.38341968911917096, + "acc_norm_stderr": 0.03508984236295341 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4, + "acc_stderr": 0.021004201260420075, + "acc_norm": 0.4, + "acc_norm_stderr": 0.021004201260420075 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.027870745278290324, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.027870745278290324 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.01918463932809249, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.01918463932809249 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29720670391061454, + "acc_stderr": 0.015285313353641597, + "acc_norm": 0.29720670391061454, + "acc_norm_stderr": 0.015285313353641597 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2977941176470588, + "acc_stderr": 0.02777829870154544, + "acc_norm": 0.2977941176470588, + "acc_norm_stderr": 0.02777829870154544 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.03078905113903081, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.03078905113903081 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.011759939618085457, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.011759939618085457 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.038154943086889305, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.038154943086889305 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237269, + "mc2": 0.4285943154065853, + "mc2_stderr": 0.01581116786098837 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2585596221959858, + "acc_stderr": 0.015053354438963981, + "acc_norm": 0.2939787485242031, + "acc_norm_stderr": 0.01566324256909113 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/penguin3.1.6n_7b", + "model_sha": "8e94b41ee3cf66c8448deb70f194e2c5c3c066c2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/seal3.1.3_ia3/result_2023-11-15 00:26:15.json b/jingyeom/seal3.1.3_ia3/result_2023-11-15 00:26:15.json new file mode 100644 index 0000000000000000000000000000000000000000..f63e44d3d6678620079655a6e9467f98146a5fcb --- /dev/null +++ b/jingyeom/seal3.1.3_ia3/result_2023-11-15 00:26:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38054607508532423, + "acc_stderr": 0.014188277712349814, + "acc_norm": 0.4522184300341297, + "acc_norm_stderr": 0.014544519880633832 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41784505078669587, + "acc_stderr": 0.0049219641338740195, + "acc_norm": 0.5613423620792671, + "acc_norm_stderr": 0.0049520870831289065 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49169859514687103, + "acc_stderr": 0.017877498991072008, + "acc_norm": 0.49169859514687103, + "acc_norm_stderr": 0.017877498991072008 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085328, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085328 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419034, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419034 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.040925639582376536, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.040925639582376536 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40336134453781514, + "acc_stderr": 0.031866081214088314, + "acc_norm": 0.40336134453781514, + "acc_norm_stderr": 0.031866081214088314 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.024756000382130945, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.024756000382130945 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.0343046241610387, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.0343046241610387 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3903225806451613, + "acc_stderr": 0.027751256636969573, + "acc_norm": 0.3903225806451613, + "acc_norm_stderr": 0.027751256636969573 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.031660988918880785, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.031660988918880785 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.030102793781791194, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.030102793781791194 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276612, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276612 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736412, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.023330654054535892, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.023330654054535892 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.02672003438051499, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.02672003438051499 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668767, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44403669724770645, + "acc_stderr": 0.021302621211654525, + "acc_norm": 0.44403669724770645, + "acc_norm_stderr": 0.021302621211654525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.02758281141515961, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.02758281141515961 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.019206606848825362, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.019206606848825362 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320196, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320196 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.03128039084329882, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.03128039084329882 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776125, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776125 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29595827900912647, + "acc_stderr": 0.011658518525277054, + "acc_norm": 0.29595827900912647, + "acc_norm_stderr": 0.011658518525277054 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237269, + "mc2": 0.4123953965733462, + "mc2_stderr": 0.014907127171846607 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45690672963400236, + "acc_stderr": 0.017126389093086784, + "acc_norm": 0.5667060212514758, + "acc_norm_stderr": 0.017036683641893098 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/seal3.1.3_ia3", + "model_sha": "9321128d80a13a06c07569136884d5c1bfa6fa66", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/seal3.1.6_ia3/result_2023-11-18 12:51:41.json b/jingyeom/seal3.1.6_ia3/result_2023-11-18 12:51:41.json new file mode 100644 index 0000000000000000000000000000000000000000..637ec5c0f2f5853dee75fcb30783e2fe66cd65fa --- /dev/null +++ b/jingyeom/seal3.1.6_ia3/result_2023-11-18 12:51:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3822525597269625, + "acc_stderr": 0.014200454049979277, + "acc_norm": 0.4522184300341297, + "acc_norm_stderr": 0.014544519880633837 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41943835889265085, + "acc_stderr": 0.004924586362301654, + "acc_norm": 0.5659231228838877, + "acc_norm_stderr": 0.004946221512145287 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5274584929757343, + "acc_stderr": 0.017852981266633938, + "acc_norm": 0.5274584929757343, + "acc_norm_stderr": 0.017852981266633938 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.03191863374478465, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.03191863374478465 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.024939313906940774, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.024939313906940774 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.031937057262002924, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.031937057262002924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.02646611753895992, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.02646611753895992 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.0365634365335316, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.0365634365335316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655816, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655816 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.02803609227389177, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.02803609227389177 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3545751633986928, + "acc_stderr": 0.019353360547553707, + "acc_norm": 0.3545751633986928, + "acc_norm_stderr": 0.019353360547553707 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.029886910547626974, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.029886910547626974 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.02866199620233531, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.02866199620233531 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.03200682020163909, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.03200682020163909 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31029986962190353, + "acc_stderr": 0.011815439293469829, + "acc_norm": 0.31029986962190353, + "acc_norm_stderr": 0.011815439293469829 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766373, + "mc2": 0.40532931367786407, + "mc2_stderr": 0.014837785058995488 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4675324675324675, + "acc_stderr": 0.01715407371668286, + "acc_norm": 0.5548996458087367, + "acc_norm_stderr": 0.017086417431005464 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/seal3.1.6_ia3", + "model_sha": "d9e866e20d75db50cb1648ffc9b87a27761a13cc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/seal3.1.6n_7b/result_2023-11-16 00:30:31.json b/jingyeom/seal3.1.6n_7b/result_2023-11-16 00:30:31.json new file mode 100644 index 0000000000000000000000000000000000000000..9aab1e5196070df4a433ed426c1d281f601560f6 --- /dev/null +++ b/jingyeom/seal3.1.6n_7b/result_2023-11-16 00:30:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3310580204778157, + "acc_stderr": 0.013752062419817829, + "acc_norm": 0.4035836177474403, + "acc_norm_stderr": 0.014337158914268447 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40111531567416847, + "acc_stderr": 0.004891226138578063, + "acc_norm": 0.5273849830711014, + "acc_norm_stderr": 0.004982291744069922 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.034240429246915824, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.034240429246915824 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.34355044699872284, + "acc_stderr": 0.016982145632652462, + "acc_norm": 0.34355044699872284, + "acc_norm_stderr": 0.016982145632652462 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.03610805018031023, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.03610805018031023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.02679542232789395, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.02679542232789395 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.28699551569506726, + "acc_stderr": 0.030360379710291947, + "acc_norm": 0.28699551569506726, + "acc_norm_stderr": 0.030360379710291947 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.0394170763206489, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.0394170763206489 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.02835962087053395, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.02835962087053395 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2512820512820513, + "acc_stderr": 0.02199201666237056, + "acc_norm": 0.2512820512820513, + "acc_norm_stderr": 0.02199201666237056 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.025284416114900156, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.025284416114900156 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3504273504273504, + "acc_stderr": 0.03125610824421881, + "acc_norm": 0.3504273504273504, + "acc_norm_stderr": 0.03125610824421881 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.027495663683724057, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.027495663683724057 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.031157150869355582, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.031157150869355582 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173043, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173043 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.1875, + "acc_stderr": 0.032639560491693344, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.032639560491693344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.02402774515526502, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.02402774515526502 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.029778663037752954, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.029778663037752954 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3247706422018349, + "acc_stderr": 0.02007772910931033, + "acc_norm": 0.3247706422018349, + "acc_norm_stderr": 0.02007772910931033 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.12698412698412698, + "acc_stderr": 0.02978041752268843, + "acc_norm": 0.12698412698412698, + "acc_norm_stderr": 0.02978041752268843 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.025457756696667874, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.025457756696667874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.037827289808654706, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.037827289808654706 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.018433427649401892, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.018433427649401892 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.028353212866863448, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.028353212866863448 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.027678468642144703, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.027678468642144703 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.02768297952296023, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.02768297952296023 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.34177215189873417, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.34177215189873417, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2790091264667536, + "acc_stderr": 0.011455208832803545, + "acc_norm": 0.2790091264667536, + "acc_norm_stderr": 0.011455208832803545 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456418, + "mc2": 0.3886319562727079, + "mc2_stderr": 0.014918898169695423 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27744982290436837, + "acc_stderr": 0.015393630236605973, + "acc_norm": 0.39787485242030696, + "acc_norm_stderr": 0.016827959054733388 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/seal3.1.6n_7b", + "model_sha": "637f6e9182b916de57bac9d80d166b4c7e961670", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/seal_all_13b/result_2023-12-04 11:39:35.json b/jingyeom/seal_all_13b/result_2023-12-04 11:39:35.json new file mode 100644 index 0000000000000000000000000000000000000000..eadeca57230ad946167fc736a3c0a41b12ffb18c --- /dev/null +++ b/jingyeom/seal_all_13b/result_2023-12-04 11:39:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40955631399317405, + "acc_stderr": 0.014370358632472434, + "acc_norm": 0.4616040955631399, + "acc_norm_stderr": 0.01456824555029636 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4246166102370046, + "acc_stderr": 0.004932745013072715, + "acc_norm": 0.5771758613821948, + "acc_norm_stderr": 0.004929983692795069 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.017850410794380173, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.017850410794380173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534436, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534436 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115476, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115476 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461227, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461227 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883232, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883232 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507755, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507755 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712152, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712152 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123935, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123935 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.037038511930995215, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.037038511930995215 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261117, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261117 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.038258255488486076, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.038258255488486076 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5192660550458715, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.5192660550458715, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.019506291693954854, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.019506291693954854 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169934, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169934 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.0317987634217685, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.0317987634217685 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396584, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396584 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763128, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763128 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30182529335071706, + "acc_stderr": 0.011724350518105888, + "acc_norm": 0.30182529335071706, + "acc_norm_stderr": 0.011724350518105888 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22276621787025705, + "mc1_stderr": 0.014566506961396756, + "mc2": 0.3670812629713066, + "mc2_stderr": 0.014598520460295474 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.31995277449822906, + "acc_stderr": 0.016037153840280552, + "acc_norm": 0.48288075560802834, + "acc_norm_stderr": 0.017180275246085626 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/seal_all_13b", + "model_sha": "3ad11ce5786df519f300ed28237e4337112ef01e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/seal_all_7b/result_2023-12-04 06:52:30.json b/jingyeom/seal_all_7b/result_2023-12-04 06:52:30.json new file mode 100644 index 0000000000000000000000000000000000000000..efee47cff8526f7740a2820d85ddd243660c8e09 --- /dev/null +++ b/jingyeom/seal_all_7b/result_2023-12-04 06:52:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33447098976109213, + "acc_stderr": 0.01378746032244138, + "acc_norm": 0.3924914675767918, + "acc_norm_stderr": 0.014269634635670726 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39075881298546106, + "acc_stderr": 0.004869232758103322, + "acc_norm": 0.5157339175463055, + "acc_norm_stderr": 0.004987310297290272 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457923, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457923 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.34738186462324394, + "acc_stderr": 0.01702667174865573, + "acc_norm": 0.34738186462324394, + "acc_norm_stderr": 0.01702667174865573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745653, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745653 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.0332939411907353, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.0332939411907353 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31511254019292606, + "acc_stderr": 0.026385273703464492, + "acc_norm": 0.31511254019292606, + "acc_norm_stderr": 0.026385273703464492 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.27802690582959644, + "acc_stderr": 0.030069584874494043, + "acc_norm": 0.27802690582959644, + "acc_norm_stderr": 0.030069584874494043 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.04039314978724561, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.04039314978724561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165085, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165085 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533084, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533084 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.039215453124671215, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.039215453124671215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.035240689515674495, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.035240689515674495 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.22564102564102564, + "acc_stderr": 0.021193632525148522, + "acc_norm": 0.22564102564102564, + "acc_norm_stderr": 0.021193632525148522 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.046166311118017125, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.046166311118017125 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.031447125816782426, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.031447125816782426 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.025560604721022884, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.025560604721022884 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3717948717948718, + "acc_stderr": 0.03166098891888078, + "acc_norm": 0.3717948717948718, + "acc_norm_stderr": 0.03166098891888078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.28679245283018867, + "acc_stderr": 0.027834912527544074, + "acc_norm": 0.28679245283018867, + "acc_norm_stderr": 0.027834912527544074 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708607, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708607 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.025070713719153193, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.025070713719153193 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.03119584087770029, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.03119584087770029 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28807339449541286, + "acc_stderr": 0.01941644589263602, + "acc_norm": 0.28807339449541286, + "acc_norm_stderr": 0.01941644589263602 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1349206349206349, + "acc_stderr": 0.030557101589417515, + "acc_norm": 0.1349206349206349, + "acc_norm_stderr": 0.030557101589417515 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.024954184324879905, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.024954184324879905 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2826797385620915, + "acc_stderr": 0.018217269552053435, + "acc_norm": 0.2826797385620915, + "acc_norm_stderr": 0.018217269552053435 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590638, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590638 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.026991454502036733, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.026991454502036733 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1801470588235294, + "acc_stderr": 0.023345163616544855, + "acc_norm": 0.1801470588235294, + "acc_norm_stderr": 0.023345163616544855 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.02599111767281329, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.02599111767281329 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3206751054852321, + "acc_stderr": 0.03038193194999041, + "acc_norm": 0.3206751054852321, + "acc_norm_stderr": 0.03038193194999041 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2796610169491525, + "acc_stderr": 0.011463397393861947, + "acc_norm": 0.2796610169491525, + "acc_norm_stderr": 0.011463397393861947 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591362, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591362 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268049, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268049 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.21542227662178703, + "mc1_stderr": 0.014391902652427685, + "mc2": 0.3511785481321989, + "mc2_stderr": 0.014738072611274355 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.014846044968252247, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.01645549600031453 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/seal_all_7b", + "model_sha": "d30240cec9e23bf26a7843a4b99e55a5f5c1d99d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jingyeom/zephyr_all_7b/result_2023-12-05 04:35:39.json b/jingyeom/zephyr_all_7b/result_2023-12-05 04:35:39.json new file mode 100644 index 0000000000000000000000000000000000000000..9ebeec923f8c5d694b8cae081979deb7b009cd1c --- /dev/null +++ b/jingyeom/zephyr_all_7b/result_2023-12-05 04:35:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2986348122866894, + "acc_stderr": 0.01337407861506875, + "acc_norm": 0.3455631399317406, + "acc_norm_stderr": 0.013896938461145687 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3601872137024497, + "acc_stderr": 0.004790734683704582, + "acc_norm": 0.4553873730332603, + "acc_norm_stderr": 0.004969879532843085 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.03743979825926399, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.03743979825926399 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41762452107279696, + "acc_stderr": 0.017635637326951534, + "acc_norm": 0.41762452107279696, + "acc_norm_stderr": 0.017635637326951534 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40514469453376206, + "acc_stderr": 0.027882383791325953, + "acc_norm": 0.40514469453376206, + "acc_norm_stderr": 0.027882383791325953 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572234, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572234 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707546, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707546 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993178, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993178 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.02508830145469484, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.02508830145469484 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.032826493853041504, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.032826493853041504 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.02790615082604114, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.02790615082604114 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.0302850092590098, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.0302850092590098 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.472636815920398, + "acc_stderr": 0.03530235517334683, + "acc_norm": 0.472636815920398, + "acc_norm_stderr": 0.03530235517334683 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.035995863012470784, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.035995863012470784 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.024594975128920938, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.024594975128920938 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.026074314851657083, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.026074314851657083 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3950617283950617, + "acc_stderr": 0.027201117666925657, + "acc_norm": 0.3950617283950617, + "acc_norm_stderr": 0.027201117666925657 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.03423465100104283, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.03423465100104283 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44036697247706424, + "acc_stderr": 0.02128431062376154, + "acc_norm": 0.44036697247706424, + "acc_norm_stderr": 0.02128431062376154 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.041349130183033156, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.041349130183033156 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.028304576673141114, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.028304576673141114 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2957516339869281, + "acc_stderr": 0.018463154132632813, + "acc_norm": 0.2957516339869281, + "acc_norm_stderr": 0.018463154132632813 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2994413407821229, + "acc_stderr": 0.015318257745976708, + "acc_norm": 0.2994413407821229, + "acc_norm_stderr": 0.015318257745976708 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.32653061224489793, + "acc_stderr": 0.03002105623844029, + "acc_norm": 0.32653061224489793, + "acc_norm_stderr": 0.03002105623844029 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.43037974683544306, + "acc_stderr": 0.032230171959375976, + "acc_norm": 0.43037974683544306, + "acc_norm_stderr": 0.032230171959375976 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28096479791395046, + "acc_stderr": 0.011479684550077692, + "acc_norm": 0.28096479791395046, + "acc_norm_stderr": 0.011479684550077692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.03343311240488419, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.03343311240488419 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3575757575757576, + "acc_stderr": 0.037425970438065864, + "acc_norm": 0.3575757575757576, + "acc_norm_stderr": 0.037425970438065864 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502342, + "mc2": 0.4515315923147205, + "mc2_stderr": 0.01639872227117553 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.282172373081464, + "acc_stderr": 0.01547327158398843, + "acc_norm": 0.29515938606847697, + "acc_norm_stderr": 0.01568153522919219 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jingyeom/zephyr_all_7b", + "model_sha": "bedb59500978ac3964aa9aaf9bd9c73c74ee025d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-13b-n1/result_2023-10-29 14:25:55.json b/jiwoochris/ko-llama2-13b-n1/result_2023-10-29 14:25:55.json new file mode 100644 index 0000000000000000000000000000000000000000..6d6daf4181ffd23349cce76f5a628d53a1ac7686 --- /dev/null +++ b/jiwoochris/ko-llama2-13b-n1/result_2023-10-29 14:25:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39419795221843, + "acc_stderr": 0.01428052266746733, + "acc_norm": 0.44368600682593856, + "acc_norm_stderr": 0.01451842182567044 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4219279028082055, + "acc_stderr": 0.004928578106026375, + "acc_norm": 0.5553674566819359, + "acc_norm_stderr": 0.004959094146471523 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.03815827365913236, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.03815827365913236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5031928480204342, + "acc_stderr": 0.017879598945933085, + "acc_norm": 0.5031928480204342, + "acc_norm_stderr": 0.017879598945933085 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480864, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480864 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.03115852213135778, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.03115852213135778 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489426, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34977578475336324, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.34977578475336324, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767776, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767776 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.033864057460620905, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.033864057460620905 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568385, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.031256108244218796, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.031256108244218796 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205608, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833713, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833713 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099522, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099522 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047736, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047736 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.02656417811142262, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.02656417811142262 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569653, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569653 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362233, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362233 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033522, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033522 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.01885008469646872, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.01885008469646872 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320203, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320203 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176852, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176852 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.02679956202488769, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.02679956202488769 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.032006820201639065, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.032006820201639065 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.0325446201076786, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.0325446201076786 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29791395045632335, + "acc_stderr": 0.011680717340400031, + "acc_norm": 0.29791395045632335, + "acc_norm_stderr": 0.011680717340400031 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.034542365853806094, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.034542365853806094 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713613, + "mc2": 0.4716545169998464, + "mc2_stderr": 0.015260622885261386 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5147579693034239, + "acc_stderr": 0.01718286443499856, + "acc_norm": 0.5489964580873672, + "acc_norm_stderr": 0.017107618859549346 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/ko-llama2-13b-n1", + "model_sha": "12d3e6b98013770e4f8c8d37ff76d6c9134b39be", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-13b-v4/result_2023-10-22 15:44:08.json b/jiwoochris/ko-llama2-13b-v4/result_2023-10-22 15:44:08.json new file mode 100644 index 0000000000000000000000000000000000000000..afb4ed0a8ec55654db30fdb94c460e1978bd3660 --- /dev/null +++ b/jiwoochris/ko-llama2-13b-v4/result_2023-10-22 15:44:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40017064846416384, + "acc_stderr": 0.014317197787809167, + "acc_norm": 0.4539249146757679, + "acc_norm_stderr": 0.014549221105171864 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4224258115913165, + "acc_stderr": 0.004929361040558258, + "acc_norm": 0.5571599283011353, + "acc_norm_stderr": 0.004957068377516512 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49169859514687103, + "acc_stderr": 0.017877498991072008, + "acc_norm": 0.49169859514687103, + "acc_norm_stderr": 0.017877498991072008 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.03115852213135778, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.03115852213135778 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956278, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956278 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008732, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008732 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.02478431694215636, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.02478431694215636 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165894, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165894 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.03067609659938918, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.03067609659938918 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.024180497164376882, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.024180497164376882 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4421965317919075, + "acc_stderr": 0.026738603643807403, + "acc_norm": 0.4421965317919075, + "acc_norm_stderr": 0.026738603643807403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.039015918258361836, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.039015918258361836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442207, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442207 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48623853211009177, + "acc_stderr": 0.02142920208987408, + "acc_norm": 0.48623853211009177, + "acc_norm_stderr": 0.02142920208987408 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.018975427920507205, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.018975427920507205 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639875, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639875 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190735, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190735 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298825, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298825 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.026799562024887688, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.026799562024887688 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3089960886571056, + "acc_stderr": 0.011801729777239249, + "acc_norm": 0.3089960886571056, + "acc_norm_stderr": 0.011801729777239249 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589674, + "mc2": 0.4713625301918517, + "mc2_stderr": 0.015403994277020416 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4946871310507674, + "acc_stderr": 0.01718938362722969, + "acc_norm": 0.5525383707201889, + "acc_norm_stderr": 0.01709519030150058 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/ko-llama2-13b-v4", + "model_sha": "a3773012adb9e13b9bd9b15634dfaeb18718c24d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-13b-v5/result_2023-10-24 16:25:31.json b/jiwoochris/ko-llama2-13b-v5/result_2023-10-24 16:25:31.json new file mode 100644 index 0000000000000000000000000000000000000000..fff32d4a288d182ab0fa138dca1accb4c7410a77 --- /dev/null +++ b/jiwoochris/ko-llama2-13b-v5/result_2023-10-24 16:25:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40273037542662116, + "acc_stderr": 0.014332236306790154, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42123083051185023, + "acc_stderr": 0.004927473370720142, + "acc_norm": 0.5584544911372237, + "acc_norm_stderr": 0.004955564650016176 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.017879948914431665, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.017879948914431665 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416828, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416828 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.035315058793591834, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.035315058793591834 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.02475600038213094, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.02475600038213094 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507748, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507748 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389184, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.02418049716437689, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.02418049716437689 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.02677299065336183, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.02677299065336183 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272436, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48256880733944957, + "acc_stderr": 0.02142429187185315, + "acc_norm": 0.48256880733944957, + "acc_norm_stderr": 0.02142429187185315 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.019023726160724553, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.019023726160724553 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639872, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639872 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.03128039084329882, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.03128039084329882 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.0273658611315138, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.0273658611315138 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3116036505867014, + "acc_stderr": 0.011829039182849645, + "acc_norm": 0.3116036505867014, + "acc_norm_stderr": 0.011829039182849645 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.4663029303420103, + "mc2_stderr": 0.015238838193243256 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5100354191263282, + "acc_stderr": 0.017186891286894056, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.017077254131556224 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/ko-llama2-13b-v5", + "model_sha": "28f20014bc519440b6c16a65adf6545c1c9687b6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-13b-v6/result_2023-10-28 13:18:01.json b/jiwoochris/ko-llama2-13b-v6/result_2023-10-28 13:18:01.json new file mode 100644 index 0000000000000000000000000000000000000000..12677716637fc1a18660b5f0090ed3424a335675 --- /dev/null +++ b/jiwoochris/ko-llama2-13b-v6/result_2023-10-28 13:18:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938215, + "acc_norm": 0.45733788395904434, + "acc_norm_stderr": 0.014558106543924067 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4204341764588727, + "acc_stderr": 0.004926198483948701, + "acc_norm": 0.5538737303326031, + "acc_norm_stderr": 0.004960732382255241 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4955300127713921, + "acc_stderr": 0.017879248970584388, + "acc_norm": 0.4955300127713921, + "acc_norm_stderr": 0.017879248970584388 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.042561937679014075, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.042561937679014075 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459157, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459157 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37948717948717947, + "acc_stderr": 0.024603626924097417, + "acc_norm": 0.37948717948717947, + "acc_norm_stderr": 0.024603626924097417 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731837, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731837 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.024026846392873506, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.024026846392873506 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.026772990653361826, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.026772990653361826 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47155963302752296, + "acc_stderr": 0.021402615697348047, + "acc_norm": 0.47155963302752296, + "acc_norm_stderr": 0.021402615697348047 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3349673202614379, + "acc_stderr": 0.01909422816700032, + "acc_norm": 0.3349673202614379, + "acc_norm_stderr": 0.01909422816700032 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190735, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190735 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03141554629402543, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03141554629402543 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.02725720260611495, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.02725720260611495 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.03251215201141018, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.03251215201141018 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.011787910251664587, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.011787910251664587 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.4626627507088543, + "mc2_stderr": 0.015202563658823371 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.51357733175915, + "acc_stderr": 0.01718401506040145, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.01712282914329265 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/ko-llama2-13b-v6", + "model_sha": "2606639bb18ca27586615693d937d41d1a756391", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-v1/result_2023-10-21 08:55:17.json b/jiwoochris/ko-llama2-v1/result_2023-10-21 08:55:17.json new file mode 100644 index 0000000000000000000000000000000000000000..88f2ca51e279159b37153ebfb989b7d458f455b6 --- /dev/null +++ b/jiwoochris/ko-llama2-v1/result_2023-10-21 08:55:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39419795221843, + "acc_stderr": 0.01428052266746733, + "acc_norm": 0.454778156996587, + "acc_norm_stderr": 0.014551507060836355 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4217287392949612, + "acc_stderr": 0.004928263494616727, + "acc_norm": 0.5544712208723361, + "acc_norm_stderr": 0.004960082528852438 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.017879948914431662, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.017879948914431662 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.03219079200419995, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.03219079200419995 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.02478431694215636, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.02478431694215636 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507748, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507748 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.02646611753895992, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.02646611753895992 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.0365634365335316, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.0365634365335316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523864, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.026772990653361826, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.026772990653361826 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033522, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033522 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33169934640522875, + "acc_stderr": 0.01904748523936038, + "acc_norm": 0.33169934640522875, + "acc_norm_stderr": 0.01904748523936038 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.03070137211151092, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.03070137211151092 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.027033041151681456, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.027033041151681456 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2966101694915254, + "acc_stderr": 0.011665946586082854, + "acc_norm": 0.2966101694915254, + "acc_norm_stderr": 0.011665946586082854 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155062, + "mc2": 0.4746429594651757, + "mc2_stderr": 0.01531218992321956 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5206611570247934, + "acc_stderr": 0.017175671279836446, + "acc_norm": 0.5678866587957497, + "acc_norm_stderr": 0.017031170198851742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/ko-llama2-v1", + "model_sha": "4253098940413125f8f0847038c076d42e5b2c59", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-v2/result_2023-10-21 08:55:35.json b/jiwoochris/ko-llama2-v2/result_2023-10-21 08:55:35.json new file mode 100644 index 0000000000000000000000000000000000000000..54de9bb1e0b8f317ba408adc4ed746e2f1614c8e --- /dev/null +++ b/jiwoochris/ko-llama2-v2/result_2023-10-21 08:55:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.014049106564955, + "acc_norm": 0.40187713310580203, + "acc_norm_stderr": 0.014327268614578273 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41276638119896436, + "acc_stderr": 0.004913253031155685, + "acc_norm": 0.5246962756423024, + "acc_norm_stderr": 0.004983691099110914 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.01778403453499246, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.01778403453499246 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.41479099678456594, + "acc_stderr": 0.02798268045975956, + "acc_norm": 0.41479099678456594, + "acc_norm_stderr": 0.02798268045975956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.0354413249194797, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.0354413249194797 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102315, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102315 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4129032258064516, + "acc_stderr": 0.028009138125400387, + "acc_norm": 0.4129032258064516, + "acc_norm_stderr": 0.028009138125400387 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235897, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235897 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145654, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145654 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952166, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952166 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.02357760479165582, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.02357760479165582 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.026538189104705484, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.026538189104705484 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.027339546640662724, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.027339546640662724 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.038351539543994194, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.038351539543994194 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4954128440366973, + "acc_stderr": 0.021436420955529424, + "acc_norm": 0.4954128440366973, + "acc_norm_stderr": 0.021436420955529424 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138286, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138286 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.018521756215423027, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.018521756215423027 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.15178571428571427, + "acc_stderr": 0.034057028381856924, + "acc_norm": 0.15178571428571427, + "acc_norm_stderr": 0.034057028381856924 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.02841820861940679, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.02841820861940679 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.031912820526692774, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.031912820526692774 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3070404172099087, + "acc_stderr": 0.011780959114513778, + "acc_norm": 0.3070404172099087, + "acc_norm_stderr": 0.011780959114513778 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589674, + "mc2": 0.47296551445370655, + "mc2_stderr": 0.016489115600580966 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5631641086186541, + "acc_stderr": 0.01705263355985607, + "acc_norm": 0.5855962219598583, + "acc_norm_stderr": 0.016936583383943615 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/ko-llama2-v2", + "model_sha": "bfe6a2095cc43e82103cbdff36721810ef4057e3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/ko-llama2-v3/result_2023-10-21 15:59:59.json b/jiwoochris/ko-llama2-v3/result_2023-10-21 15:59:59.json new file mode 100644 index 0000000000000000000000000000000000000000..0ef8bf12fb98ef853cf37ee57322b477166c83a1 --- /dev/null +++ b/jiwoochris/ko-llama2-v3/result_2023-10-21 15:59:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938169, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471625 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4231228838876718, + "acc_stderr": 0.0049304485271466575, + "acc_norm": 0.5584544911372237, + "acc_norm_stderr": 0.004955564650016177 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.017879948914431665, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.017879948914431665 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.02834504586484068, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.02834504586484068 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929187, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929187 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555498, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555498 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.02491524398598784, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.02491524398598784 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.03050329201334259, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.03050329201334259 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.02742001935094527, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.02742001935094527 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047736, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047736 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.026720034380514995, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.026720034380514995 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194045, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4954128440366973, + "acc_stderr": 0.021436420955529424, + "acc_norm": 0.4954128440366973, + "acc_norm_stderr": 0.021436420955529424 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176647, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176647 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.018771683893528186, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.018771683893528186 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639872, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639872 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353604, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353604 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.0273658611315138, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.0273658611315138 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.032006820201639065, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.032006820201639065 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.0325446201076786, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.0325446201076786 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3089960886571056, + "acc_stderr": 0.011801729777239246, + "acc_norm": 0.3089960886571056, + "acc_norm_stderr": 0.011801729777239246 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589678, + "mc2": 0.46187837195291875, + "mc2_stderr": 0.015227305019069102 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5041322314049587, + "acc_stderr": 0.01718976703213082, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.017077254131556224 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/ko-llama2-v3", + "model_sha": "277462786fe73ea1b6f50d5e45ee1be5854611a1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/llama2_cot-13b-v2/result_2023-11-15 06:20:20.json b/jiwoochris/llama2_cot-13b-v2/result_2023-11-15 06:20:20.json new file mode 100644 index 0000000000000000000000000000000000000000..a29c13327e3ddce6ab09f8396253134a7efd27d9 --- /dev/null +++ b/jiwoochris/llama2_cot-13b-v2/result_2023-11-15 06:20:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938217, + "acc_norm": 0.4453924914675768, + "acc_norm_stderr": 0.014523987638344076 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42123083051185023, + "acc_stderr": 0.004927473370720142, + "acc_norm": 0.5585540728938458, + "acc_norm_stderr": 0.0049554475646940545 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5031928480204342, + "acc_stderr": 0.017879598945933082, + "acc_norm": 0.5031928480204342, + "acc_norm_stderr": 0.017879598945933082 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.02483881198803315, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02483881198803315 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568385, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791923, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112126, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112126 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41329479768786126, + "acc_stderr": 0.02651126136940924, + "acc_norm": 0.41329479768786126, + "acc_norm_stderr": 0.02651126136940924 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.02775653525734767, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.02775653525734767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.018771683893528183, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.018771683893528183 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639872, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639872 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025445, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025445 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.02714627193662517, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.02714627193662517 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2966101694915254, + "acc_stderr": 0.011665946586082849, + "acc_norm": 0.2966101694915254, + "acc_norm_stderr": 0.011665946586082849 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.01600265148736101, + "mc2": 0.4675588535923083, + "mc2_stderr": 0.01519834664408876 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4923258559622196, + "acc_stderr": 0.01718832921965428, + "acc_norm": 0.5230224321133412, + "acc_norm_stderr": 0.01717212154672763 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/llama2_cot-13b-v2", + "model_sha": "7aa89f6afcd8a48eb30f8bc8a6013fb7eee105b0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/llama2_tmt-13b-v1/result_2023-11-02 08:59:31.json b/jiwoochris/llama2_tmt-13b-v1/result_2023-11-02 08:59:31.json new file mode 100644 index 0000000000000000000000000000000000000000..abfe5a2236cc82c10056c5ba6877c5f079155fe6 --- /dev/null +++ b/jiwoochris/llama2_tmt-13b-v1/result_2023-11-02 08:59:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38054607508532423, + "acc_stderr": 0.01418827771234981, + "acc_norm": 0.42918088737201365, + "acc_norm_stderr": 0.014464085894870657 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42342162915753834, + "acc_stderr": 0.004930911515084777, + "acc_norm": 0.5576578370842462, + "acc_norm_stderr": 0.0049564940598648966 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49680715197956576, + "acc_stderr": 0.017879598945933068, + "acc_norm": 0.49680715197956576, + "acc_norm_stderr": 0.017879598945933068 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416828, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416828 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40336134453781514, + "acc_stderr": 0.031866081214088314, + "acc_norm": 0.40336134453781514, + "acc_norm_stderr": 0.031866081214088314 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.024838811988033154, + "acc_norm": 0.4, + "acc_norm_stderr": 0.024838811988033154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165894, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165894 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.031660988918880785, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.031660988918880785 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205608, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.023266512213730554, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.023266512213730554 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.026613350840261743, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.026613350840261743 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194045, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680814, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680814 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249032, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249032 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.01866335967146367, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.01866335967146367 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022125, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022125 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.15178571428571427, + "acc_stderr": 0.03405702838185692, + "acc_norm": 0.15178571428571427, + "acc_norm_stderr": 0.03405702838185692 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.031141447823536048, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.031141447823536048 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.25735294117647056, + "acc_stderr": 0.02655651947004153, + "acc_norm": 0.25735294117647056, + "acc_norm_stderr": 0.02655651947004153 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713672, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713672 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4978902953586498, + "acc_stderr": 0.03254693801802007, + "acc_norm": 0.4978902953586498, + "acc_norm_stderr": 0.03254693801802007 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271815, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271815 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.471591282984116, + "mc2_stderr": 0.015363705899479043 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4675324675324675, + "acc_stderr": 0.017154073716682865, + "acc_norm": 0.5100354191263282, + "acc_norm_stderr": 0.01718689128689406 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/llama2_tmt-13b-v1", + "model_sha": "19ab331bb4323f92cc3e5b17e3a7c2d629d5aa01", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/llama2_tmt-13b-v2/result_2023-11-07 12:17:35.json b/jiwoochris/llama2_tmt-13b-v2/result_2023-11-07 12:17:35.json new file mode 100644 index 0000000000000000000000000000000000000000..c546ba62476599fbecfd028f1ba371f1087c1f1f --- /dev/null +++ b/jiwoochris/llama2_tmt-13b-v2/result_2023-11-07 12:17:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3771331058020478, + "acc_stderr": 0.014163366896192608, + "acc_norm": 0.4325938566552901, + "acc_norm_stderr": 0.014478005694182531 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4217287392949612, + "acc_stderr": 0.004928263494616727, + "acc_norm": 0.5551682931686915, + "acc_norm_stderr": 0.004959315198011164 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4942528735632184, + "acc_stderr": 0.01787878232612924, + "acc_norm": 0.4942528735632184, + "acc_norm_stderr": 0.01787878232612924 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370332, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370332 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.03191863374478466, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.03191863374478466 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.024838811988033158, + "acc_norm": 0.4, + "acc_norm_stderr": 0.024838811988033158 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969481, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969481 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.02646611753895992, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.02646611753895992 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.0365634365335316, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.0365634365335316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655795, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655795 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.026720034380514995, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.026720034380514995 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456024, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456024 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.0276847214156562, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.0276847214156562 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39378238341968913, + "acc_stderr": 0.03526077095548237, + "acc_norm": 0.39378238341968913, + "acc_norm_stderr": 0.03526077095548237 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47339449541284406, + "acc_stderr": 0.02140695268815158, + "acc_norm": 0.47339449541284406, + "acc_norm_stderr": 0.02140695268815158 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31209150326797386, + "acc_stderr": 0.01874501120127766, + "acc_norm": 0.31209150326797386, + "acc_norm_stderr": 0.01874501120127766 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.03485946096475741, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.03485946096475741 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.026679252270103117, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.026679252270103117 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713672, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713672 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2894393741851369, + "acc_stderr": 0.011582659702210236, + "acc_norm": 0.2894393741851369, + "acc_norm_stderr": 0.011582659702210236 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.4683789011362333, + "mc2_stderr": 0.0153748011102314 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49940968122786306, + "acc_stderr": 0.01719034212344866, + "acc_norm": 0.5395513577331759, + "acc_norm_stderr": 0.01713648762604985 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/llama2_tmt-13b-v2", + "model_sha": "430d81fb7292f40407b87f989603131bf16f085a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jiwoochris/polyglot_350/result_2023-10-19 07:42:25.json b/jiwoochris/polyglot_350/result_2023-10-19 07:42:25.json new file mode 100644 index 0000000000000000000000000000000000000000..a0fa2a7beb7e74b2c8c3a3c81e1924d0046a1a1c --- /dev/null +++ b/jiwoochris/polyglot_350/result_2023-10-19 07:42:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28498293515358364, + "acc_stderr": 0.013191348179838792, + "acc_norm": 0.3174061433447099, + "acc_norm_stderr": 0.01360223908803817 + }, + "harness|ko_hellaswag|10": { + "acc": 0.371539533957379, + "acc_stderr": 0.004822286556305217, + "acc_norm": 0.4738099980083649, + "acc_norm_stderr": 0.004982931565945953 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.18128654970760233, + "acc_stderr": 0.029547741687640024, + "acc_norm": 0.18128654970760233, + "acc_norm_stderr": 0.029547741687640024 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.36893203883495146, + "acc_stderr": 0.047776151811567386, + "acc_norm": 0.36893203883495146, + "acc_norm_stderr": 0.047776151811567386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.014485656041669173, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.014485656041669173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.037125378336148665, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.037125378336148665 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838746, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838746 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2379421221864952, + "acc_stderr": 0.024185150647818707, + "acc_norm": 0.2379421221864952, + "acc_norm_stderr": 0.024185150647818707 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.15246636771300448, + "acc_stderr": 0.024126204813252863, + "acc_norm": 0.15246636771300448, + "acc_norm_stderr": 0.024126204813252863 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847836, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847836 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.033184773338453315, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.033184773338453315 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135303, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135303 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062947, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062947 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.03086868260412162, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.03086868260412162 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36153846153846153, + "acc_stderr": 0.024359581465396983, + "acc_norm": 0.36153846153846153, + "acc_norm_stderr": 0.024359581465396983 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.038935425188248475, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.038935425188248475 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.02704685763071668, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.02704685763071668 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493864, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493864 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878284, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878284 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736412, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577656, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577656 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757173, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757173 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.03351953879521271, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.03351953879521271 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.23765432098765432, + "acc_stderr": 0.023683591837008553, + "acc_norm": 0.23765432098765432, + "acc_norm_stderr": 0.023683591837008553 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3626943005181347, + "acc_stderr": 0.03469713791704372, + "acc_norm": 0.3626943005181347, + "acc_norm_stderr": 0.03469713791704372 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3541284403669725, + "acc_stderr": 0.020504729013829104, + "acc_norm": 0.3541284403669725, + "acc_norm_stderr": 0.020504729013829104 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.02582916327275748, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.02582916327275748 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036622, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036622 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.14049586776859505, + "acc_stderr": 0.031722334260021585, + "acc_norm": 0.14049586776859505, + "acc_norm_stderr": 0.031722334260021585 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.0378272898086547, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.0378272898086547 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902013, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902013 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.033953227263757976, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.033953227263757976 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.21940928270042195, + "acc_stderr": 0.026939106581553945, + "acc_norm": 0.21940928270042195, + "acc_norm_stderr": 0.026939106581553945 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2438070404172099, + "acc_stderr": 0.010966507972178475, + "acc_norm": 0.2438070404172099, + "acc_norm_stderr": 0.010966507972178475 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350194, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350194 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253595, + "mc2": 0.40641374284087445, + "mc2_stderr": 0.014952562897051682 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2857142857142857, + "acc_stderr": 0.015531620786986736, + "acc_norm": 0.3246753246753247, + "acc_norm_stderr": 0.016098883939346463 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jiwoochris/polyglot_350", + "model_sha": "9fb5a66197344b0ec71467e384620bd610668339", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jjhsnail0822/danube-ko-1.8b-base/result_2024-06-19 13:13:58.json b/jjhsnail0822/danube-ko-1.8b-base/result_2024-06-19 13:13:58.json new file mode 100644 index 0000000000000000000000000000000000000000..79ec7a70caaeca636fd674bdfa52515e1172f8eb --- /dev/null +++ b/jjhsnail0822/danube-ko-1.8b-base/result_2024-06-19 13:13:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2645051194539249, + "acc_stderr": 0.012889272949313368, + "acc_norm": 0.3174061433447099, + "acc_norm_stderr": 0.01360223908803817 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3481378211511651, + "acc_stderr": 0.004754063867700179, + "acc_norm": 0.4444333798048198, + "acc_norm_stderr": 0.0049588722884421465 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.18128654970760233, + "acc_stderr": 0.029547741687640024, + "acc_norm": 0.18128654970760233, + "acc_norm_stderr": 0.029547741687640024 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.04846748253977239, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.04846748253977239 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.22988505747126436, + "acc_stderr": 0.015046301846691814, + "acc_norm": 0.22988505747126436, + "acc_norm_stderr": 0.015046301846691814 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.027321078417387533, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.027321078417387533 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24437299035369775, + "acc_stderr": 0.02440616209466889, + "acc_norm": 0.24437299035369775, + "acc_norm_stderr": 0.02440616209466889 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.18385650224215247, + "acc_stderr": 0.025998379092356517, + "acc_norm": 0.18385650224215247, + "acc_norm_stderr": 0.025998379092356517 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.048108401480826346, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.048108401480826346 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.024433016466052452, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.024433016466052452 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.18803418803418803, + "acc_stderr": 0.02559819368665224, + "acc_norm": 0.18803418803418803, + "acc_norm_stderr": 0.02559819368665224 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.28679245283018867, + "acc_stderr": 0.027834912527544067, + "acc_norm": 0.28679245283018867, + "acc_norm_stderr": 0.027834912527544067 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3283582089552239, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.3283582089552239, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02256989707491842, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02256989707491842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.02279711027807114, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.02279711027807114 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24382716049382716, + "acc_stderr": 0.023891879541959614, + "acc_norm": 0.24382716049382716, + "acc_norm_stderr": 0.023891879541959614 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3467889908256881, + "acc_stderr": 0.02040609710409303, + "acc_norm": 0.3467889908256881, + "acc_norm_stderr": 0.02040609710409303 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.02625605383571896, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.02625605383571896 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.32231404958677684, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316091, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316091 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.016639319350313264, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.016639319350313264 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23049645390070922, + "acc_stderr": 0.025123739226872402, + "acc_norm": 0.23049645390070922, + "acc_norm_stderr": 0.025123739226872402 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596452, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596452 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20253164556962025, + "acc_stderr": 0.026160568246601464, + "acc_norm": 0.20253164556962025, + "acc_norm_stderr": 0.026160568246601464 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2438070404172099, + "acc_stderr": 0.010966507972178475, + "acc_norm": 0.2438070404172099, + "acc_norm_stderr": 0.010966507972178475 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604243, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604243 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.01539211880501501, + "mc2": 0.416250228687046, + "mc2_stderr": 0.015084923631840903 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24321133412042503, + "acc_stderr": 0.014750068360453266, + "acc_norm": 0.3270365997638725, + "acc_norm_stderr": 0.016129047485457022 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jjhsnail0822/danube-ko-1.8b-base", + "model_sha": "09cd401b361ea6c145f7d2db59b1108007b944fe", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jjourney1125/M-SOLAR-10.7B-v1.0/result_2023-12-25 04:59:35.json b/jjourney1125/M-SOLAR-10.7B-v1.0/result_2023-12-25 04:59:35.json new file mode 100644 index 0000000000000000000000000000000000000000..222213d0d0b12301fa5f2c5f670893a17f13100d --- /dev/null +++ b/jjourney1125/M-SOLAR-10.7B-v1.0/result_2023-12-25 04:59:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4598976109215017, + "acc_stderr": 0.014564318856924848, + "acc_norm": 0.49573378839590443, + "acc_norm_stderr": 0.014610858923956952 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4403505277833101, + "acc_stderr": 0.004954146286513353, + "acc_norm": 0.6011750647281418, + "acc_norm_stderr": 0.004886559008754985 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.03722965741385539, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.03722965741385539 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6538952745849298, + "acc_stderr": 0.01701196526641208, + "acc_norm": 0.6538952745849298, + "acc_norm_stderr": 0.01701196526641208 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5191489361702127, + "acc_stderr": 0.032662042990646796, + "acc_norm": 0.5191489361702127, + "acc_norm_stderr": 0.032662042990646796 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.594855305466238, + "acc_stderr": 0.02788238379132595, + "acc_norm": 0.594855305466238, + "acc_norm_stderr": 0.02788238379132595 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.043171711948702556, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.043171711948702556 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786753, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786753 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.04858083574266345, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.04858083574266345 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5666666666666667, + "acc_stderr": 0.025124653525885103, + "acc_norm": 0.5666666666666667, + "acc_norm_stderr": 0.025124653525885103 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.02786932057166462, + "acc_norm": 0.6, + "acc_norm_stderr": 0.02786932057166462 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.026246772946890488, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.026246772946890488 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5622641509433962, + "acc_stderr": 0.03053333843046752, + "acc_norm": 0.5622641509433962, + "acc_norm_stderr": 0.03053333843046752 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465073, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465073 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.41721854304635764, + "acc_stderr": 0.04026141497634612, + "acc_norm": 0.41721854304635764, + "acc_norm_stderr": 0.04026141497634612 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.025670080636909193, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.025670080636909193 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6127167630057804, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.6127167630057804, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.595679012345679, + "acc_stderr": 0.027306625297327688, + "acc_norm": 0.595679012345679, + "acc_norm_stderr": 0.027306625297327688 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.689908256880734, + "acc_stderr": 0.01983084968443975, + "acc_norm": 0.689908256880734, + "acc_norm_stderr": 0.01983084968443975 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.565359477124183, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.565359477124183, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5921052631578947, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.5921052631578947, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.020200164564804588, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.020200164564804588 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3971631205673759, + "acc_stderr": 0.029189805673587105, + "acc_norm": 0.3971631205673759, + "acc_norm_stderr": 0.029189805673587105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21899441340782122, + "acc_stderr": 0.01383167668730321, + "acc_norm": 0.21899441340782122, + "acc_norm_stderr": 0.01383167668730321 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4963235294117647, + "acc_stderr": 0.030372015885428188, + "acc_norm": 0.4963235294117647, + "acc_norm_stderr": 0.030372015885428188 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.03164209487942942, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.03164209487942942 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7679324894514767, + "acc_stderr": 0.02747974455080851, + "acc_norm": 0.7679324894514767, + "acc_norm_stderr": 0.02747974455080851 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4015645371577575, + "acc_stderr": 0.012520315120147132, + "acc_norm": 0.4015645371577575, + "acc_norm_stderr": 0.012520315120147132 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3292533659730722, + "mc1_stderr": 0.016451264440068242, + "mc2": 0.49232422202678827, + "mc2_stderr": 0.0154380637666786 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5749704840613932, + "acc_stderr": 0.016996016308362887, + "acc_norm": 0.6221959858323495, + "acc_norm_stderr": 0.01666908284069498 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jjourney1125/M-SOLAR-10.7B-v1.0", + "model_sha": "744f2fd6625d908aa47f5adeaa127fd661198054", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jjourney1125/llama2-13b-v1/result_2023-12-22 04:35:39.json b/jjourney1125/llama2-13b-v1/result_2023-12-22 04:35:39.json new file mode 100644 index 0000000000000000000000000000000000000000..1a63504dd7ea8333bcb21511d7e3972751251a61 --- /dev/null +++ b/jjourney1125/llama2-13b-v1/result_2023-12-22 04:35:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38993174061433444, + "acc_stderr": 0.014252959848892894, + "acc_norm": 0.4308873720136519, + "acc_norm_stderr": 0.01447113339264247 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4091814379605656, + "acc_stderr": 0.004906779523192673, + "acc_norm": 0.5419239195379406, + "acc_norm_stderr": 0.004972210244020565 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.01785041079438017, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.01785041079438017 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.031565646822367836, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.031565646822367836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.035476014940069384, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.035476014940069384 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.033764582465095665, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.033764582465095665 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03981240543717861, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03981240543717861 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281335, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281335 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5357798165137615, + "acc_stderr": 0.021382364775701906, + "acc_norm": 0.5357798165137615, + "acc_norm_stderr": 0.021382364775701906 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523811, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523811 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.02862441255016795, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.02862441255016795 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.040633027314866704, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.040633027314866704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.019659922493623343, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.019659922493623343 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293647, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293647 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28156424581005585, + "acc_stderr": 0.015042290171866113, + "acc_norm": 0.28156424581005585, + "acc_norm_stderr": 0.015042290171866113 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3135593220338983, + "acc_stderr": 0.01184923429145932, + "acc_norm": 0.3135593220338983, + "acc_norm_stderr": 0.01184923429145932 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.44709065910683965, + "mc2_stderr": 0.015327918263132853 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5430932703659976, + "acc_stderr": 0.017126389093086777, + "acc_norm": 0.5985832349468713, + "acc_norm_stderr": 0.01685290785872906 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jjourney1125/llama2-13b-v1", + "model_sha": "202ead34796d321bc87053c81cc88026f8cac022", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jjourney1125/llama2-dev/result_2023-12-16 13:31:05.json b/jjourney1125/llama2-dev/result_2023-12-16 13:31:05.json new file mode 100644 index 0000000000000000000000000000000000000000..0bdc6a8d489672f4ff1399c7674bbded8a757227 --- /dev/null +++ b/jjourney1125/llama2-dev/result_2023-12-16 13:31:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32593856655290104, + "acc_stderr": 0.013697432466693246, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.014194389086685272 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3543118900617407, + "acc_stderr": 0.0047732675101127406, + "acc_norm": 0.4435371439952201, + "acc_norm_stderr": 0.004957863944093121 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48020434227330777, + "acc_stderr": 0.017865944827291615, + "acc_norm": 0.48020434227330777, + "acc_norm_stderr": 0.017865944827291615 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.04049122041702506, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.04049122041702506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.02817391776176287, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.02817391776176287 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.35172413793103446, + "acc_stderr": 0.03979236637497411, + "acc_norm": 0.35172413793103446, + "acc_norm_stderr": 0.03979236637497411 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.032183581077426124, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.032183581077426124 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.02475600038213094, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.02475600038213094 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.031947400722655395, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.031947400722655395 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961827, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961827 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02306818884826111, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02306818884826111 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272438, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272438 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44954128440366975, + "acc_stderr": 0.021327881417823373, + "acc_norm": 0.44954128440366975, + "acc_norm_stderr": 0.021327881417823373 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.028180596328259287, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.028180596328259287 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3349673202614379, + "acc_stderr": 0.01909422816700031, + "acc_norm": 0.3349673202614379, + "acc_norm_stderr": 0.01909422816700031 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802747, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802747 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.014655780837497722, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.014655780837497722 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225417, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225417 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.363754889178618, + "acc_stderr": 0.012286991879902887, + "acc_norm": 0.363754889178618, + "acc_norm_stderr": 0.012286991879902887 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396708, + "mc2": 0.444838685797901, + "mc2_stderr": 0.015532530203119514 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3742621015348288, + "acc_stderr": 0.016637917789798735, + "acc_norm": 0.4179456906729634, + "acc_norm_stderr": 0.016957292005279723 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jjourney1125/llama2-dev", + "model_sha": "66931bf246639e144dcd1e8b255a2222e210e2f0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jojo0217/ChatSKKU5.8B/result_2023-09-27 14:33:28.json b/jojo0217/ChatSKKU5.8B/result_2023-09-27 14:33:28.json new file mode 100644 index 0000000000000000000000000000000000000000..b7adbdeaed2eec53b2edde597bb693f434653094 --- /dev/null +++ b/jojo0217/ChatSKKU5.8B/result_2023-09-27 14:33:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28924914675767915, + "acc_stderr": 0.013250012579393443, + "acc_norm": 0.3293515358361775, + "acc_norm_stderr": 0.013734057652635474 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3754232224656443, + "acc_stderr": 0.004832423630593185, + "acc_norm": 0.48028281218880703, + "acc_norm_stderr": 0.004985900172317694 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727665, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727665 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.36893203883495146, + "acc_stderr": 0.04777615181156739, + "acc_norm": 0.36893203883495146, + "acc_norm_stderr": 0.04777615181156739 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20434227330779056, + "acc_stderr": 0.014419123980931904, + "acc_norm": 0.20434227330779056, + "acc_norm_stderr": 0.014419123980931904 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838746, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838746 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1927710843373494, + "acc_stderr": 0.030709824050565264, + "acc_norm": 0.1927710843373494, + "acc_norm_stderr": 0.030709824050565264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.09865470852017937, + "acc_stderr": 0.020013729184919227, + "acc_norm": 0.09865470852017937, + "acc_norm_stderr": 0.020013729184919227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3383838383838384, + "acc_stderr": 0.033711241426263014, + "acc_norm": 0.3383838383838384, + "acc_norm_stderr": 0.033711241426263014 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36923076923076925, + "acc_stderr": 0.02446861524147891, + "acc_norm": 0.36923076923076925, + "acc_norm_stderr": 0.02446861524147891 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.031447125816782426, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.031447125816782426 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493864, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493864 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184408, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184408 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255168, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255168 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.031524391865554016, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.031524391865554016 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02201908001221789, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02201908001221789 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757173, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757173 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2191358024691358, + "acc_stderr": 0.023016705640262206, + "acc_norm": 0.2191358024691358, + "acc_norm_stderr": 0.023016705640262206 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3504587155963303, + "acc_stderr": 0.02045607759982446, + "acc_norm": 0.3504587155963303, + "acc_norm_stderr": 0.02045607759982446 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.026173908506718576, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.026173908506718576 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.21487603305785125, + "acc_stderr": 0.037494924487096966, + "acc_norm": 0.21487603305785125, + "acc_norm_stderr": 0.037494924487096966 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.03842498559395268, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.03842498559395268 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953776, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953776 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.17857142857142858, + "acc_stderr": 0.036352091215778065, + "acc_norm": 0.17857142857142858, + "acc_norm_stderr": 0.036352091215778065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20253164556962025, + "acc_stderr": 0.026160568246601464, + "acc_norm": 0.20253164556962025, + "acc_norm_stderr": 0.026160568246601464 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.242503259452412, + "acc_stderr": 0.010946570966348775, + "acc_norm": 0.242503259452412, + "acc_norm_stderr": 0.010946570966348775 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087298, + "mc2": 0.41570723548070315, + "mc2_stderr": 0.014870707305351522 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26564344746162927, + "acc_stderr": 0.015185107107791248, + "acc_norm": 0.3435655253837072, + "acc_norm_stderr": 0.016327334806429138 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jojo0217/ChatSKKU5.8B", + "model_sha": "f10a62ae90265032a2fc60b9484a501028b539bb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jondurbin/bagel-8b-v1.0/result_2024-05-18 14:17:55.json b/jondurbin/bagel-8b-v1.0/result_2024-05-18 14:17:55.json new file mode 100644 index 0000000000000000000000000000000000000000..f0d9ff9a84aed4067d67e52c657733efddf93a33 --- /dev/null +++ b/jondurbin/bagel-8b-v1.0/result_2024-05-18 14:17:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40187713310580203, + "acc_stderr": 0.014327268614578276, + "acc_norm": 0.4539249146757679, + "acc_norm_stderr": 0.014549221105171864 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3988249352718582, + "acc_stderr": 0.004886559008754986, + "acc_norm": 0.5319657438757219, + "acc_norm_stderr": 0.00497957376557586 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6608187134502924, + "acc_stderr": 0.03631053496488905, + "acc_norm": 0.6608187134502924, + "acc_norm_stderr": 0.03631053496488905 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5747126436781609, + "acc_stderr": 0.017679225489431457, + "acc_norm": 0.5747126436781609, + "acc_norm_stderr": 0.017679225489431457 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4978723404255319, + "acc_stderr": 0.032685726586674915, + "acc_norm": 0.4978723404255319, + "acc_norm_stderr": 0.032685726586674915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5755627009646302, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.5755627009646302, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062947, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062947 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.592436974789916, + "acc_stderr": 0.03191863374478466, + "acc_norm": 0.592436974789916, + "acc_norm_stderr": 0.03191863374478466 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.025334667080954897, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.025334667080954897 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.033661244890514495, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.033661244890514495 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5645161290322581, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.5645161290322581, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392923, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5358490566037736, + "acc_stderr": 0.030693675018458006, + "acc_norm": 0.5358490566037736, + "acc_norm_stderr": 0.030693675018458006 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251976, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251976 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658752, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658752 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.03794012674697029, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.03794012674697029 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.02522545028406788, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.02522545028406788 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.026830805998952233, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.026830805998952233 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5246913580246914, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.5246913580246914, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6476683937823834, + "acc_stderr": 0.03447478286414357, + "acc_norm": 0.6476683937823834, + "acc_norm_stderr": 0.03447478286414357 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.046306532033665956, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.046306532033665956 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6256880733944954, + "acc_stderr": 0.020748959408988313, + "acc_norm": 0.6256880733944954, + "acc_norm_stderr": 0.020748959408988313 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.47619047619047616, + "acc_stderr": 0.04467062628403273, + "acc_norm": 0.47619047619047616, + "acc_norm_stderr": 0.04467062628403273 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.04026097083296561, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.04026097083296561 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.0201655233139079, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.0201655233139079 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.029097675599463926, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.029097675599463926 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5, + "acc_stderr": 0.04745789978762494, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04745789978762494 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.03407632093854051, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.03407632093854051 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6530612244897959, + "acc_stderr": 0.0304725260267265, + "acc_norm": 0.6530612244897959, + "acc_norm_stderr": 0.0304725260267265 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7088607594936709, + "acc_stderr": 0.029571601065753374, + "acc_norm": 0.7088607594936709, + "acc_norm_stderr": 0.029571601065753374 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37353324641460234, + "acc_stderr": 0.012354994823515274, + "acc_norm": 0.37353324641460234, + "acc_norm_stderr": 0.012354994823515274 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6078431372549019, + "acc_stderr": 0.03426712349247273, + "acc_norm": 0.6078431372549019, + "acc_norm_stderr": 0.03426712349247273 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31946144430844553, + "mc1_stderr": 0.0163226441829605, + "mc2": 0.4864551567320093, + "mc2_stderr": 0.015567897712861186 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.4935064935064935, + "acc_norm_stderr": 0.017188904359077307 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jondurbin/bagel-8b-v1.0", + "model_sha": "b7f4d46d8a935623a47c9d6b3845443f20413a5f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jsfs11/MixtureofMerges-MoE-4x7b-v4/result_2024-05-15 17:58:52.json b/jsfs11/MixtureofMerges-MoE-4x7b-v4/result_2024-05-15 17:58:52.json new file mode 100644 index 0000000000000000000000000000000000000000..793142e85a83274a10a39fb00861072f8c541074 --- /dev/null +++ b/jsfs11/MixtureofMerges-MoE-4x7b-v4/result_2024-05-15 17:58:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910476, + "acc_norm": 0.4496587030716723, + "acc_norm_stderr": 0.014537144444284738 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3964349731129257, + "acc_stderr": 0.004881570100014375, + "acc_norm": 0.525592511451902, + "acc_norm_stderr": 0.004983240744101385 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45849297573435505, + "acc_stderr": 0.017818248603465554, + "acc_norm": 0.45849297573435505, + "acc_norm_stderr": 0.017818248603465554 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42443729903536975, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.42443729903536975, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255099, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255099 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.041443118108781506, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.041443118108781506 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.02860595370200425, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.02860595370200425 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961827, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961827 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473075, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.03765746693865151, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.03765746693865151 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137595, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137595 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5137614678899083, + "acc_stderr": 0.02142920208987408, + "acc_norm": 0.5137614678899083, + "acc_norm_stderr": 0.02142920208987408 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.019643801557924806, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.019643801557924806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.028838921471251455, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.028838921471251455 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.033509916046960436, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.033509916046960436 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.01502408388332288, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.01502408388332288 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3396349413298566, + "acc_stderr": 0.01209559250693197, + "acc_norm": 0.3396349413298566, + "acc_norm_stderr": 0.01209559250693197 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4222766217870257, + "mc1_stderr": 0.017290733254248167, + "mc2": 0.5852787587557416, + "mc2_stderr": 0.016216851828083707 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4085005903187721, + "acc_stderr": 0.01690006287942712, + "acc_norm": 0.4167650531286895, + "acc_norm_stderr": 0.016950489146108822 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jsfs11/MixtureofMerges-MoE-4x7b-v4", + "model_sha": "2b98406f20a874184dbffb5ed24e1f4b5063ec4b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jsfs11/MixtureofMerges-MoE-4x7b-v5/result_2024-06-17 02:44:46.json b/jsfs11/MixtureofMerges-MoE-4x7b-v5/result_2024-06-17 02:44:46.json new file mode 100644 index 0000000000000000000000000000000000000000..c36dfe32bd91661a69404bb38d318f8ff029d34e --- /dev/null +++ b/jsfs11/MixtureofMerges-MoE-4x7b-v5/result_2024-06-17 02:44:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3848122866894198, + "acc_stderr": 0.014218371065251104, + "acc_norm": 0.4539249146757679, + "acc_norm_stderr": 0.01454922110517187 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3966341366261701, + "acc_stderr": 0.0048819904876289105, + "acc_norm": 0.5239992033459471, + "acc_norm_stderr": 0.004984030250507296 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47509578544061304, + "acc_stderr": 0.01785777070490102, + "acc_norm": 0.47509578544061304, + "acc_norm_stderr": 0.01785777070490102 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.02821768355665231, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.02821768355665231 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.03070948699255654, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.03070948699255654 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702862, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702862 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384486, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384486 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5137614678899083, + "acc_stderr": 0.02142920208987408, + "acc_norm": 0.5137614678899083, + "acc_norm_stderr": 0.02142920208987408 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.044359328928514664, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.044359328928514664 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.01975172650876263, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.01975172650876263 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.029049190342543458, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.029049190342543458 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2849162011173184, + "acc_stderr": 0.015096222302469802, + "acc_norm": 0.2849162011173184, + "acc_norm_stderr": 0.015096222302469802 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34028683181225555, + "acc_stderr": 0.012101217610223806, + "acc_norm": 0.34028683181225555, + "acc_norm_stderr": 0.012101217610223806 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.41615667074663404, + "mc1_stderr": 0.01725565750290304, + "mc2": 0.5774622369944382, + "mc2_stderr": 0.016360547495483713 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43211334120425027, + "acc_stderr": 0.01703117019885175, + "acc_norm": 0.43919716646989376, + "acc_norm_stderr": 0.017062775744780705 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jsfs11/MixtureofMerges-MoE-4x7b-v5", + "model_sha": "c1b5ce7144b966062df7627d2482a59e0df3757c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/juengsi/DT-EQ-SOLAR-10.7B-v0.1/result_2024-03-31 16:07:14.json b/juengsi/DT-EQ-SOLAR-10.7B-v0.1/result_2024-03-31 16:07:14.json new file mode 100644 index 0000000000000000000000000000000000000000..1d988daedab50c4a3621c966340ca8eedf7cc2ee --- /dev/null +++ b/juengsi/DT-EQ-SOLAR-10.7B-v0.1/result_2024-03-31 16:07:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.18771331058020477, + "acc_stderr": 0.011411001314155124, + "acc_norm": 0.2354948805460751, + "acc_norm_stderr": 0.012399451855004759 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2506472814180442, + "acc_stderr": 0.004325000473328607, + "acc_norm": 0.24875522804222266, + "acc_norm_stderr": 0.004314081608624646 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824563, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824563 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822582, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822582 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20051085568326948, + "acc_stderr": 0.014317653708594209, + "acc_norm": 0.20051085568326948, + "acc_norm_stderr": 0.014317653708594209 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838746, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838746 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.16265060240963855, + "acc_stderr": 0.0287302378926138, + "acc_norm": 0.16265060240963855, + "acc_norm_stderr": 0.0287302378926138 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.13004484304932734, + "acc_stderr": 0.022574519424174877, + "acc_norm": 0.13004484304932734, + "acc_norm_stderr": 0.022574519424174877 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082395, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082395 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.035149425512674394, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.035149425512674394 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.021851509822031708, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.021851509822031708 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0230836585869842, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0230836585869842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2147239263803681, + "acc_stderr": 0.032262193772867744, + "acc_norm": 0.2147239263803681, + "acc_norm_stderr": 0.032262193772867744 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.02357688174400572, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.02357688174400572 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24770642201834864, + "acc_stderr": 0.018508143602547822, + "acc_norm": 0.24770642201834864, + "acc_norm_stderr": 0.018508143602547822 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316091, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316091 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23468057366362452, + "acc_stderr": 0.010824026872449351, + "acc_norm": 0.23468057366362452, + "acc_norm_stderr": 0.010824026872449351 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826842, + "mc2": 0.4899767196172315, + "mc2_stderr": 0.01730408828542341 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09327036599763873, + "acc_stderr": 0.00999828619027673, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.015311853110300352 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "juengsi/DT-EQ-SOLAR-10.7B-v0.1", + "model_sha": "c3d8a9603973e5a4163ce602de68e49973f41570", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/juengsi/DT-SL-MLP-SOLAR-10.7B-v0.1/result_2024-03-31 23:08:54.json b/juengsi/DT-SL-MLP-SOLAR-10.7B-v0.1/result_2024-03-31 23:08:54.json new file mode 100644 index 0000000000000000000000000000000000000000..669fab9f6ff8eced7fa5ed80e73597dd9058db56 --- /dev/null +++ b/juengsi/DT-SL-MLP-SOLAR-10.7B-v0.1/result_2024-03-31 23:08:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.18003412969283278, + "acc_stderr": 0.011227856729050021, + "acc_norm": 0.2098976109215017, + "acc_norm_stderr": 0.011900548748047437 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2765385381398128, + "acc_stderr": 0.004463721071319088, + "acc_norm": 0.3042222664807807, + "acc_norm_stderr": 0.004591369853276522 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.031885780176863984, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.031885780176863984 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260595, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260595 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3205619412515964, + "acc_stderr": 0.01668889331080377, + "acc_norm": 0.3205619412515964, + "acc_norm_stderr": 0.01668889331080377 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288086, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288086 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.34726688102893893, + "acc_stderr": 0.027040745502307336, + "acc_norm": 0.34726688102893893, + "acc_norm_stderr": 0.027040745502307336 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.03219079200419997, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.03219079200419997 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306085, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306085 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365904, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365904 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.028657491285071966, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.028657491285071966 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24615384615384617, + "acc_stderr": 0.021840866990423077, + "acc_norm": 0.24615384615384617, + "acc_norm_stderr": 0.021840866990423077 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.031618563353586086, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.031618563353586086 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.02468597928623997, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.02468597928623997 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.032485775115784, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.032485775115784 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.028049186315695248, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.028049186315695248 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823018, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.19205298013245034, + "acc_stderr": 0.032162984205936114, + "acc_norm": 0.19205298013245034, + "acc_norm_stderr": 0.032162984205936114 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02306818884826111, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02306818884826111 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080343, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080343 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.023618678310069363, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.023618678310069363 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.025251173936495033, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.025251173936495033 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916648, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916648 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579859, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579859 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27889908256880735, + "acc_stderr": 0.01922746887646351, + "acc_norm": 0.27889908256880735, + "acc_norm_stderr": 0.01922746887646351 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.024739981355113592, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.024739981355113592 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2957516339869281, + "acc_stderr": 0.018463154132632806, + "acc_norm": 0.2957516339869281, + "acc_norm_stderr": 0.018463154132632806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605586, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605586 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536934, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536934 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.024562204314142317, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.024562204314142317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2163265306122449, + "acc_stderr": 0.026358916334904017, + "acc_norm": 0.2163265306122449, + "acc_norm_stderr": 0.026358916334904017 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598035, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598035 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2588005215123859, + "acc_stderr": 0.011186109046564608, + "acc_norm": 0.2588005215123859, + "acc_norm_stderr": 0.011186109046564608 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591362, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591362 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139404, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139404 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.41420479962606566, + "mc2_stderr": 0.01677493225522166 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.1770956316410862, + "acc_stderr": 0.01312482213747454, + "acc_norm": 0.29634002361275086, + "acc_norm_stderr": 0.015699701628594232 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "juengsi/DT-SL-MLP-SOLAR-10.7B-v0.1", + "model_sha": "005a86a686e9b271a6aee30e652c245277a60e7b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/juengsi/DT-SL-SOLAR-10.7B-v0.1/result_2024-03-31 17:29:20.json b/juengsi/DT-SL-SOLAR-10.7B-v0.1/result_2024-03-31 17:29:20.json new file mode 100644 index 0000000000000000000000000000000000000000..3f7d927f05afbd9f0190757dcecb4e723b979a94 --- /dev/null +++ b/juengsi/DT-SL-SOLAR-10.7B-v0.1/result_2024-03-31 17:29:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.18515358361774745, + "acc_stderr": 0.011350774438389699, + "acc_norm": 0.257679180887372, + "acc_norm_stderr": 0.012780770562768402 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25104560844453294, + "acc_stderr": 0.004327285172596085, + "acc_norm": 0.2502489543915555, + "acc_norm_stderr": 0.004322710911026374 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2681992337164751, + "acc_stderr": 0.015842430835269438, + "acc_norm": 0.2681992337164751, + "acc_norm_stderr": 0.015842430835269438 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1746987951807229, + "acc_stderr": 0.02956032621125683, + "acc_norm": 0.1746987951807229, + "acc_norm_stderr": 0.02956032621125683 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.027157150479563824, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.027157150479563824 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35858585858585856, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.35858585858585856, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.03077805742293167, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.03077805742293167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.34102564102564104, + "acc_stderr": 0.024035489676335065, + "acc_norm": 0.34102564102564104, + "acc_norm_stderr": 0.024035489676335065 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3283018867924528, + "acc_stderr": 0.028901593612411784, + "acc_norm": 0.3283018867924528, + "acc_norm_stderr": 0.028901593612411784 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21393034825870647, + "acc_stderr": 0.028996909693328923, + "acc_norm": 0.21393034825870647, + "acc_norm_stderr": 0.028996909693328923 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.034765996075164785, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.034765996075164785 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21957671957671956, + "acc_stderr": 0.021320018599770355, + "acc_norm": 0.21957671957671956, + "acc_norm_stderr": 0.021320018599770355 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3137614678899083, + "acc_stderr": 0.01989472334146915, + "acc_norm": 0.3137614678899083, + "acc_norm_stderr": 0.01989472334146915 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.038522733649243156, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.038522733649243156 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02564686309713791, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02564686309713791 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2809917355371901, + "acc_stderr": 0.041032038305145124, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.041032038305145124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.03878139888797611, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.03878139888797611 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.016819028375736386, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.016819028375736386 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290392, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290392 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.027212835884073167, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.027212835884073167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.1940928270042194, + "acc_stderr": 0.02574490253229093, + "acc_norm": 0.1940928270042194, + "acc_norm_stderr": 0.02574490253229093 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23728813559322035, + "acc_stderr": 0.010865436690780293, + "acc_norm": 0.23728813559322035, + "acc_norm_stderr": 0.010865436690780293 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.03287666758603489, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.03287666758603489 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456418, + "mc2": 0.4754933180206516, + "mc2_stderr": 0.01722274853736126 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09681227863046045, + "acc_stderr": 0.010166443512074711, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.014676495332267253 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "juengsi/DT-SL-SOLAR-10.7B-v0.1", + "model_sha": "7e9e1ed6244772fb3fce7f9659d3b3b2d8c9e07e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/juengsi/EVO-SOLAR-10.7B-v0.1/result_2024-03-31 23:25:13.json b/juengsi/EVO-SOLAR-10.7B-v0.1/result_2024-03-31 23:25:13.json new file mode 100644 index 0000000000000000000000000000000000000000..9a4bce8839054c7cbd325685ff86edf211ab7f27 --- /dev/null +++ b/juengsi/EVO-SOLAR-10.7B-v0.1/result_2024-03-31 23:25:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19368600682593856, + "acc_stderr": 0.01154842540997854, + "acc_norm": 0.24061433447098976, + "acc_norm_stderr": 0.012491468532390578 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2509460266879108, + "acc_stderr": 0.004326714453266739, + "acc_norm": 0.25413264289982074, + "acc_norm_stderr": 0.004344827546976549 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21637426900584794, + "acc_stderr": 0.031581495393387345, + "acc_norm": 0.21637426900584794, + "acc_norm_stderr": 0.031581495393387345 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.01581845089477756, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.01581845089477756 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2297872340425532, + "acc_stderr": 0.02750175294441242, + "acc_norm": 0.2297872340425532, + "acc_norm_stderr": 0.02750175294441242 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.027157150479563824, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.027157150479563824 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728745, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728745 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.1717171717171717, + "acc_stderr": 0.026869716187429903, + "acc_norm": 0.1717171717171717, + "acc_norm_stderr": 0.026869716187429903 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924811, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924811 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02934311479809445, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02934311479809445 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.19090909090909092, + "acc_stderr": 0.03764425585984927, + "acc_norm": 0.19090909090909092, + "acc_norm_stderr": 0.03764425585984927 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24382716049382716, + "acc_stderr": 0.02389187954195961, + "acc_norm": 0.24382716049382716, + "acc_norm_stderr": 0.02389187954195961 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23119266055045873, + "acc_stderr": 0.018075750241633146, + "acc_norm": 0.23119266055045873, + "acc_norm_stderr": 0.018075750241633146 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.017630827375148383, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.017630827375148383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767867, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767867 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.027682979522960227, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.027682979522960227 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.0284588209914603, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.0284588209914603 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.242503259452412, + "acc_stderr": 0.010946570966348773, + "acc_norm": 0.242503259452412, + "acc_norm_stderr": 0.010946570966348773 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.028867431449849313, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.028867431449849313 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253597, + "mc2": 0.49987899831937804, + "mc2_stderr": 0.017187640008138803 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.10035419126328217, + "acc_stderr": 0.010330428987816106, + "acc_norm": 0.2538370720188902, + "acc_norm_stderr": 0.01496267273977 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "juengsi/EVO-SOLAR-10.7B-v0.1", + "model_sha": "3648bbaf80c3b997228120fab4c6644375372dc8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/julleong/illuni-llama-2-ko-7b-test/result_2024-03-07 08:55:25.json b/julleong/illuni-llama-2-ko-7b-test/result_2024-03-07 08:55:25.json new file mode 100644 index 0000000000000000000000000000000000000000..75ee89f0c1745d2b01c8b9d50ec4659f94803c3a --- /dev/null +++ b/julleong/illuni-llama-2-ko-7b-test/result_2024-03-07 08:55:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32593856655290104, + "acc_stderr": 0.01369743246669324, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349815 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38548097988448515, + "acc_stderr": 0.004857140410776752, + "acc_norm": 0.4848635729934276, + "acc_norm_stderr": 0.004987494455523726 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.03660298834049162, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.03660298834049162 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3499361430395913, + "acc_stderr": 0.01705567979715043, + "acc_norm": 0.3499361430395913, + "acc_norm_stderr": 0.01705567979715043 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.34726688102893893, + "acc_stderr": 0.027040745502307336, + "acc_norm": 0.34726688102893893, + "acc_norm_stderr": 0.027040745502307336 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.03219079200419996, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.03219079200419996 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378948, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378948 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.03524068951567449, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.03524068951567449 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863818, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863818 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.02127839386358628, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.02127839386358628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114475, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114475 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.267741935483871, + "acc_stderr": 0.025189006660212385, + "acc_norm": 0.267741935483871, + "acc_norm_stderr": 0.025189006660212385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.0311669573672359, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.0311669573672359 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.027611163402399715, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.027611163402399715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.32338308457711445, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.32338308457711445, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.0309528902177499, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.0309528902177499 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.18055555555555555, + "acc_stderr": 0.03216600808802269, + "acc_norm": 0.18055555555555555, + "acc_norm_stderr": 0.03216600808802269 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.025070713719153193, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.025070713719153193 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.0360251131880677, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.0360251131880677 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.026105673861409825, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.026105673861409825 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565318, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565318 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.037752050135836386, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.037752050135836386 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3302752293577982, + "acc_stderr": 0.020164466336342977, + "acc_norm": 0.3302752293577982, + "acc_norm_stderr": 0.020164466336342977 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020534, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020534 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.0273053080762747, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.0273053080762747 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.04507732278775094, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.04507732278775094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316093, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316093 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2957516339869281, + "acc_stderr": 0.018463154132632813, + "acc_norm": 0.2957516339869281, + "acc_norm_stderr": 0.018463154132632813 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833585, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833585 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.02792096314799366, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.02792096314799366 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29044117647058826, + "acc_stderr": 0.027576468622740512, + "acc_norm": 0.29044117647058826, + "acc_norm_stderr": 0.027576468622740512 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.0282638899437846, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.0282638899437846 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.030964810588786713, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.030964810588786713 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27509778357235987, + "acc_stderr": 0.011405443620996943, + "acc_norm": 0.27509778357235987, + "acc_norm_stderr": 0.011405443620996943 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693268, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693268 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.03608541011573967, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.03608541011573967 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.01520152224629995, + "mc2": 0.3993620915229942, + "mc2_stderr": 0.015816294888855918 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26092089728453366, + "acc_stderr": 0.015097836279964204, + "acc_norm": 0.345926800472255, + "acc_norm_stderr": 0.016353853414347575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "julleong/illuni-llama-2-ko-7b-test", + "model_sha": "104fac91a859164fd379c96814788090bbe22e76", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/junga/hjys_LLM_final/result_2024-02-13 04:20:22.json b/junga/hjys_LLM_final/result_2024-02-13 04:20:22.json new file mode 100644 index 0000000000000000000000000000000000000000..9abe411b3b5fe249914e31d930b1b8d0db94ea91 --- /dev/null +++ b/junga/hjys_LLM_final/result_2024-02-13 04:20:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2764505119453925, + "acc_stderr": 0.013069662474252428, + "acc_norm": 0.35494880546075086, + "acc_norm_stderr": 0.013983036904094095 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3611830312686716, + "acc_stderr": 0.004793617835645066, + "acc_norm": 0.46046604262099183, + "acc_norm_stderr": 0.0049741595613426925 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1262135922330097, + "acc_stderr": 0.03288180278808629, + "acc_norm": 0.1262135922330097, + "acc_norm_stderr": 0.03288180278808629 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150193, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150193 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.03633384414073466, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.03633384414073466 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.02895734278834235, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.02895734278834235 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.03484331592680588, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.03484331592680588 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.02558306248998483, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.02558306248998483 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2645739910313901, + "acc_stderr": 0.029605103217038336, + "acc_norm": 0.2645739910313901, + "acc_norm_stderr": 0.029605103217038336 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596918, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596918 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20707070707070707, + "acc_stderr": 0.02886977846026705, + "acc_norm": 0.20707070707070707, + "acc_norm_stderr": 0.02886977846026705 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149354, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149354 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868966, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868966 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.021444547301560472, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.021444547301560472 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.15, + "acc_stderr": 0.035887028128263714, + "acc_norm": 0.15, + "acc_norm_stderr": 0.035887028128263714 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.16748768472906403, + "acc_stderr": 0.026273086047535407, + "acc_norm": 0.16748768472906403, + "acc_norm_stderr": 0.026273086047535407 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2838709677419355, + "acc_stderr": 0.025649381063029258, + "acc_norm": 0.2838709677419355, + "acc_norm_stderr": 0.025649381063029258 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.030236389942173095, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.030236389942173095 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21132075471698114, + "acc_stderr": 0.025125766484827852, + "acc_norm": 0.21132075471698114, + "acc_norm_stderr": 0.025125766484827852 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.035118075718047245, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.035118075718047245 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.03076944496729601, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.03076944496729601 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.030631145539198813, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.030631145539198813 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.02185150982203171, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.02185150982203171 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.02279711027807113, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.02279711027807113 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.0335195387952127, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.0335195387952127 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.025251173936495026, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.025251173936495026 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860695, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860695 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21100917431192662, + "acc_stderr": 0.017493922404112648, + "acc_norm": 0.21100917431192662, + "acc_norm_stderr": 0.017493922404112648 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.024954184324879905, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.024954184324879905 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.33884297520661155, + "acc_stderr": 0.043207678075366726, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.043207678075366726 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.017704531653250075, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.017704531653250075 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266733, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266733 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653696, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653696 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.02703304115168146, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.02703304115168146 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20408163265306123, + "acc_stderr": 0.025801283475090492, + "acc_norm": 0.20408163265306123, + "acc_norm_stderr": 0.025801283475090492 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.28270042194092826, + "acc_stderr": 0.02931281415395591, + "acc_norm": 0.28270042194092826, + "acc_norm_stderr": 0.02931281415395591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2529335071707953, + "acc_stderr": 0.011102268713839987, + "acc_norm": 0.2529335071707953, + "acc_norm_stderr": 0.011102268713839987 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396708, + "mc2": 0.44230178151794036, + "mc2_stderr": 0.015356825692053043 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2408500590318772, + "acc_stderr": 0.014701172662583908, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.016616612843224937 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "junga/hjys_LLM_final", + "model_sha": "6651c951cec355c4a1ab904464ea8cc30ada89d2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v0.61/result_2024-01-23 14:40:02.json b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v0.61/result_2024-01-23 14:40:02.json new file mode 100644 index 0000000000000000000000000000000000000000..7bff4be0967f62a864a18427c4401668a079d087 --- /dev/null +++ b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v0.61/result_2024-01-23 14:40:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2696245733788396, + "acc_stderr": 0.01296804068686916, + "acc_norm": 0.32764505119453924, + "acc_norm_stderr": 0.01371584794071934 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3571001792471619, + "acc_stderr": 0.004781654610857131, + "acc_norm": 0.45558653654650466, + "acc_norm_stderr": 0.004970057183367326 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2573099415204678, + "acc_stderr": 0.03352799844161865, + "acc_norm": 0.2573099415204678, + "acc_norm_stderr": 0.03352799844161865 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.04245022486384493, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.04245022486384493 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24010217113665389, + "acc_stderr": 0.015274685213734193, + "acc_norm": 0.24010217113665389, + "acc_norm_stderr": 0.015274685213734193 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.03502553170678318, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.03502553170678318 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.030251237579213167, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.030251237579213167 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824664, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824664 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2379421221864952, + "acc_stderr": 0.024185150647818707, + "acc_norm": 0.2379421221864952, + "acc_norm_stderr": 0.024185150647818707 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572196, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082397, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082397 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23737373737373738, + "acc_stderr": 0.03031371053819889, + "acc_norm": 0.23737373737373738, + "acc_norm_stderr": 0.03031371053819889 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.03375672449560554, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.03375672449560554 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.27310924369747897, + "acc_stderr": 0.02894200404099817, + "acc_norm": 0.27310924369747897, + "acc_norm_stderr": 0.02894200404099817 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.0224212736129237, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.0224212736129237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.036028141763926456, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.036028141763926456 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.032550867699701024, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.032550867699701024 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.267741935483871, + "acc_stderr": 0.025189006660212385, + "acc_norm": 0.267741935483871, + "acc_norm_stderr": 0.025189006660212385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.029872577708891148, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.029872577708891148 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.026341480371118355, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.026341480371118355 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.025497532639609546, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.025497532639609546 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935558, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935558 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.022698657167855713, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.022698657167855713 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.03291099578615771, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.03291099578615771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.025171041915309684, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.025171041915309684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.03324837939758159, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.03324837939758159 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281335, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281335 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23119266055045873, + "acc_stderr": 0.01807575024163315, + "acc_norm": 0.23119266055045873, + "acc_norm_stderr": 0.01807575024163315 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.026090162504279025, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.026090162504279025 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.031103182383123377, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.031103182383123377 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2826797385620915, + "acc_stderr": 0.018217269552053442, + "acc_norm": 0.2826797385620915, + "acc_norm_stderr": 0.018217269552053442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.02678917235114024, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.02678917235114024 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.027833023871399683, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.027833023871399683 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.23628691983122363, + "acc_stderr": 0.02765215314415928, + "acc_norm": 0.23628691983122363, + "acc_norm_stderr": 0.02765215314415928 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25097783572359844, + "acc_stderr": 0.011073730299187236, + "acc_norm": 0.25097783572359844, + "acc_norm_stderr": 0.011073730299187236 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.03096451792692339, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.03096451792692339 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766372, + "mc2": 0.4063338563893229, + "mc2_stderr": 0.014987872967631773 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24675324675324675, + "acc_stderr": 0.014822275820015253, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.01627295299701913 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v0.61", + "model_sha": "a4f22cda4fdce2555a1aafd91bc5151d4aec453e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v0.63/result_2024-01-24 15:55:12.json b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v0.63/result_2024-01-24 15:55:12.json new file mode 100644 index 0000000000000000000000000000000000000000..c98d6c288c7631c1a3b6c88a9d986d44efbbb7ca --- /dev/null +++ b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v0.63/result_2024-01-24 15:55:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2713310580204778, + "acc_stderr": 0.012993807727545784, + "acc_norm": 0.32593856655290104, + "acc_norm_stderr": 0.013697432466693235 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3545110535749851, + "acc_stderr": 0.004773872456201071, + "acc_norm": 0.4519020115514838, + "acc_norm_stderr": 0.004966640868083861 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1650485436893204, + "acc_stderr": 0.03675668832233188, + "acc_norm": 0.1650485436893204, + "acc_norm_stderr": 0.03675668832233188 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23627075351213284, + "acc_stderr": 0.015190473717037484, + "acc_norm": 0.23627075351213284, + "acc_norm_stderr": 0.015190473717037484 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.029379170464124815, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.029379170464124815 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.03484331592680587, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.03484331592680587 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632945, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632945 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2556053811659193, + "acc_stderr": 0.029275891003969927, + "acc_norm": 0.2556053811659193, + "acc_norm_stderr": 0.029275891003969927 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.04010358942462203, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.04010358942462203 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653697, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653697 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.03074630074212451, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.03074630074212451 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179964, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179964 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863804, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863804 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23846153846153847, + "acc_stderr": 0.021606294494647727, + "acc_norm": 0.23846153846153847, + "acc_norm_stderr": 0.021606294494647727 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694433, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694433 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885196, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885196 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674036, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674036 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.02560423347089909, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.02560423347089909 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878285, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878285 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.03096590312357302, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.03096590312357302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.03214737302029468, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.03214737302029468 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.0220190800122179, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.0220190800122179 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.02402774515526503, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.02402774515526503 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.034089978868575295, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.034089978868575295 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.025251173936495012, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.025251173936495012 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.03308818594415752, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.03308818594415752 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26972477064220185, + "acc_stderr": 0.019028486711115445, + "acc_norm": 0.26972477064220185, + "acc_norm_stderr": 0.019028486711115445 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102148, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102148 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.026336613469046637, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.026336613469046637 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04391326286724071, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04391326286724071 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403165, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403165 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815198, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815198 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353604, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353604 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220513, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220513 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933105, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293423, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293423 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2711864406779661, + "acc_stderr": 0.011354581451622985, + "acc_norm": 0.2711864406779661, + "acc_norm_stderr": 0.011354581451622985 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373616, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373616 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03477691162163659, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03477691162163659 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.01541524174023703, + "mc2": 0.413658396645113, + "mc2_stderr": 0.014807568817012682 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2502951593860685, + "acc_stderr": 0.014893137573316869, + "acc_norm": 0.358913813459268, + "acc_norm_stderr": 0.01649180210299904 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v0.63", + "model_sha": "1bb73ac2dd8f697ec7ad94a502207798518ecdb1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v0.71/result_2024-01-28 13:26:19.json b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v0.71/result_2024-01-28 13:26:19.json new file mode 100644 index 0000000000000000000000000000000000000000..a88a26fbf62c8ed1421ed82d7ec27f4ebb61b43a --- /dev/null +++ b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v0.71/result_2024-01-28 13:26:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26535836177474403, + "acc_stderr": 0.01290255476231397, + "acc_norm": 0.3174061433447099, + "acc_norm_stderr": 0.01360223908803817 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35550687114120694, + "acc_stderr": 0.004776883632722612, + "acc_norm": 0.4522007568213503, + "acc_norm_stderr": 0.004966928094797576 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245231, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245231 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822586, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822586 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.015302380123542082, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.015302380123542082 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843230997, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843230997 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.035716092300534796, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.035716092300534796 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2765273311897106, + "acc_stderr": 0.025403832978179604, + "acc_norm": 0.2765273311897106, + "acc_norm_stderr": 0.025403832978179604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.27802690582959644, + "acc_stderr": 0.030069584874494043, + "acc_norm": 0.27802690582959644, + "acc_norm_stderr": 0.030069584874494043 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.21379310344827587, + "acc_stderr": 0.03416520447747549, + "acc_norm": 0.21379310344827587, + "acc_norm_stderr": 0.03416520447747549 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868966, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868966 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.0219169577092138, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.0219169577092138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.03893542518824847, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.03893542518824847 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358611, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358611 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24838709677419354, + "acc_stderr": 0.024580028921481003, + "acc_norm": 0.24838709677419354, + "acc_norm_stderr": 0.024580028921481003 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.030572811310299604, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.030572811310299604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.25660377358490566, + "acc_stderr": 0.026880647889051996, + "acc_norm": 0.25660377358490566, + "acc_norm_stderr": 0.026880647889051996 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321658, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321658 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.02394851290546836, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.02394851290546836 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.03559039531617342, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.03559039531617342 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.025089478523765127, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.025089478523765127 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29533678756476683, + "acc_stderr": 0.0329229663915514, + "acc_norm": 0.29533678756476683, + "acc_norm_stderr": 0.0329229663915514 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27522935779816515, + "acc_stderr": 0.019149093743155196, + "acc_norm": 0.27522935779816515, + "acc_norm_stderr": 0.019149093743155196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.02625605383571896, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.02625605383571896 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2236842105263158, + "acc_stderr": 0.03391160934343603, + "acc_norm": 0.2236842105263158, + "acc_norm_stderr": 0.03391160934343603 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28594771241830064, + "acc_stderr": 0.018280485072954683, + "acc_norm": 0.28594771241830064, + "acc_norm_stderr": 0.018280485072954683 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347019, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347019 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2897959183673469, + "acc_stderr": 0.029043088683304335, + "acc_norm": 0.2897959183673469, + "acc_norm_stderr": 0.029043088683304335 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460295, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460295 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.011328734403140304, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.011328734403140304 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083291, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083291 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.035014387062967806, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.035014387062967806 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.01541524174023703, + "mc2": 0.41459624116461463, + "mc2_stderr": 0.014809698767517197 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24557260920897284, + "acc_stderr": 0.01479835715497281, + "acc_norm": 0.3364817001180638, + "acc_norm_stderr": 0.016245085294386556 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v0.71", + "model_sha": "62068a9e435e26733512d760b76c717786d286f3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v0.73/result_2024-01-29 11:32:09.json b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v0.73/result_2024-01-29 11:32:09.json new file mode 100644 index 0000000000000000000000000000000000000000..bea45d270455f01b50e710081866588fc958b1c5 --- /dev/null +++ b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v0.73/result_2024-01-29 11:32:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2764505119453925, + "acc_stderr": 0.013069662474252428, + "acc_norm": 0.3302047781569966, + "acc_norm_stderr": 0.013743085603760433 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35540728938458477, + "acc_stderr": 0.004776583530909569, + "acc_norm": 0.4532961561441944, + "acc_norm_stderr": 0.004967965810199987 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2567049808429119, + "acc_stderr": 0.015620480263064533, + "acc_norm": 0.2567049808429119, + "acc_norm_stderr": 0.015620480263064533 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552004, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.030472973363380042, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.030472973363380042 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.035716092300534796, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.035716092300534796 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.273542600896861, + "acc_stderr": 0.029918586707798813, + "acc_norm": 0.273542600896861, + "acc_norm_stderr": 0.029918586707798813 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.0418644516301375, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.0418644516301375 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380565, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380565 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.02136202772522273, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.02136202772522273 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.030572811310299604, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.030572811310299604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.02619980880756193, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.02619980880756193 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.02620276653465215, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.02620276653465215 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3034825870646766, + "acc_stderr": 0.032510068164586195, + "acc_norm": 0.3034825870646766, + "acc_norm_stderr": 0.032510068164586195 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2328042328042328, + "acc_stderr": 0.021765961672154527, + "acc_norm": 0.2328042328042328, + "acc_norm_stderr": 0.021765961672154527 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.024405173935783238, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.024405173935783238 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.025702640260603756, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.025702640260603756 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281335, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281335 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24770642201834864, + "acc_stderr": 0.01850814360254781, + "acc_norm": 0.24770642201834864, + "acc_norm_stderr": 0.01850814360254781 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523812, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523812 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02564686309713791, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02564686309713791 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2957516339869281, + "acc_stderr": 0.0184631541326328, + "acc_norm": 0.2957516339869281, + "acc_norm_stderr": 0.0184631541326328 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880585, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880585 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.030388051301678116, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.030388051301678116 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2571428571428571, + "acc_stderr": 0.02797982353874455, + "acc_norm": 0.2571428571428571, + "acc_norm_stderr": 0.02797982353874455 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598025, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598025 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2620599739243807, + "acc_stderr": 0.011231552795890394, + "acc_norm": 0.2620599739243807, + "acc_norm_stderr": 0.011231552795890394 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.03198001660115071, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.03198001660115071 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.4355435185360611, + "mc2_stderr": 0.014910386218502153 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2396694214876033, + "acc_stderr": 0.014676495332267253, + "acc_norm": 0.31641086186540734, + "acc_norm_stderr": 0.015989617951065477 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v0.73", + "model_sha": "7333fb8f19309c49506590b2a7559e858d597150", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.1/result_2024-02-15 05:12:45.json b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.1/result_2024-02-15 05:12:45.json new file mode 100644 index 0000000000000000000000000000000000000000..6eaccaa29d974cf54008bfb74a6d83ec7f931bd0 --- /dev/null +++ b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.1/result_2024-02-15 05:12:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27303754266211605, + "acc_stderr": 0.013019332762635727, + "acc_norm": 0.3387372013651877, + "acc_norm_stderr": 0.013830568927974332 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3650667197769369, + "acc_stderr": 0.004804649197163699, + "acc_norm": 0.47122087233618803, + "acc_norm_stderr": 0.004981509099276349 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.015302380123542082, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.015302380123542082 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.03036358219723816, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.03036358219723816 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288085, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288085 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2379421221864952, + "acc_stderr": 0.024185150647818707, + "acc_norm": 0.2379421221864952, + "acc_norm_stderr": 0.024185150647818707 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.18834080717488788, + "acc_stderr": 0.026241132996407256, + "acc_norm": 0.18834080717488788, + "acc_norm_stderr": 0.026241132996407256 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.041423137719966634, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.041423137719966634 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732522, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732522 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2689655172413793, + "acc_stderr": 0.036951833116502325, + "acc_norm": 0.2689655172413793, + "acc_norm_stderr": 0.036951833116502325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931666, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931666 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3435897435897436, + "acc_stderr": 0.024078696580635474, + "acc_norm": 0.3435897435897436, + "acc_norm_stderr": 0.024078696580635474 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517414, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517414 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2264957264957265, + "acc_stderr": 0.027421007295392912, + "acc_norm": 0.2264957264957265, + "acc_norm_stderr": 0.027421007295392912 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.028049186315695248, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.028049186315695248 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3482587064676617, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.3482587064676617, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047875, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047875 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577657, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577657 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.0222896388526179, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.0222896388526179 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35751295336787564, + "acc_stderr": 0.03458816042181005, + "acc_norm": 0.35751295336787564, + "acc_norm_stderr": 0.03458816042181005 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3247706422018349, + "acc_stderr": 0.02007772910931033, + "acc_norm": 0.3247706422018349, + "acc_norm_stderr": 0.02007772910931033 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.02656892101545715, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.02656892101545715 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774711, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774711 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04391326286724071, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04391326286724071 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533158, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533158 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.01716058723504635, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.01716058723504635 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502326, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502326 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3183673469387755, + "acc_stderr": 0.029822533793982066, + "acc_norm": 0.3183673469387755, + "acc_norm_stderr": 0.029822533793982066 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2320675105485232, + "acc_stderr": 0.02747974455080852, + "acc_norm": 0.2320675105485232, + "acc_norm_stderr": 0.02747974455080852 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24119947848761408, + "acc_stderr": 0.01092649610203495, + "acc_norm": 0.24119947848761408, + "acc_norm_stderr": 0.01092649610203495 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2107843137254902, + "acc_stderr": 0.02862654791243738, + "acc_norm": 0.2107843137254902, + "acc_norm_stderr": 0.02862654791243738 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707693, + "mc2": 0.4130495656901895, + "mc2_stderr": 0.014863494570793674 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26564344746162927, + "acc_stderr": 0.015185107107791246, + "acc_norm": 0.33412042502951594, + "acc_norm_stderr": 0.016216763304239695 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.1", + "model_sha": "d1eb4e228223508eb3fdf6cd7c483fb4765dac36", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.12/result_2024-02-21 01:57:00.json b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.12/result_2024-02-21 01:57:00.json new file mode 100644 index 0000000000000000000000000000000000000000..f200fde09f1db83e0704fff78f620d2125c342ef --- /dev/null +++ b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.12/result_2024-02-21 01:57:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27303754266211605, + "acc_stderr": 0.01301933276263573, + "acc_norm": 0.32764505119453924, + "acc_norm_stderr": 0.01371584794071934 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36008763194582755, + "acc_stderr": 0.004790445139186364, + "acc_norm": 0.46395140410276836, + "acc_norm_stderr": 0.004976796060456438 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2046783625730994, + "acc_stderr": 0.030944459778533214, + "acc_norm": 0.2046783625730994, + "acc_norm_stderr": 0.030944459778533214 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26053639846743293, + "acc_stderr": 0.01569600856380709, + "acc_norm": 0.26053639846743293, + "acc_norm_stderr": 0.01569600856380709 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.0355092018568963, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.0355092018568963 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2540192926045016, + "acc_stderr": 0.02472386150477169, + "acc_norm": 0.2540192926045016, + "acc_norm_stderr": 0.02472386150477169 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2556053811659193, + "acc_stderr": 0.029275891003969923, + "acc_norm": 0.2556053811659193, + "acc_norm_stderr": 0.029275891003969923 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793254, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793254 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2773109243697479, + "acc_stderr": 0.02907937453948001, + "acc_norm": 0.2773109243697479, + "acc_norm_stderr": 0.02907937453948001 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.022282141204204423, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.022282141204204423 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.025736542745594525, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.025736542745594525 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.27350427350427353, + "acc_stderr": 0.029202540153431177, + "acc_norm": 0.27350427350427353, + "acc_norm_stderr": 0.029202540153431177 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844082, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844082 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014638, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014638 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.19653179190751446, + "acc_stderr": 0.030299574664788137, + "acc_norm": 0.19653179190751446, + "acc_norm_stderr": 0.030299574664788137 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.022289638852617904, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.022289638852617904 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.03487825168497892, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.03487825168497892 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02492200116888633, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02492200116888633 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.031618779179354094, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.031618779179354094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25321100917431194, + "acc_stderr": 0.018644073041375046, + "acc_norm": 0.25321100917431194, + "acc_norm_stderr": 0.018644073041375046 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.025829163272757468, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.025829163272757468 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2434640522875817, + "acc_stderr": 0.017362473762146634, + "acc_norm": 0.2434640522875817, + "acc_norm_stderr": 0.017362473762146634 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880582, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880582 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.0316746870682898, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.0316746870682898 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22448979591836735, + "acc_stderr": 0.026711430555538405, + "acc_norm": 0.22448979591836735, + "acc_norm_stderr": 0.026711430555538405 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3291139240506329, + "acc_stderr": 0.03058732629470237, + "acc_norm": 0.3291139240506329, + "acc_norm_stderr": 0.03058732629470237 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.258148631029987, + "acc_stderr": 0.011176923719313394, + "acc_norm": 0.258148631029987, + "acc_norm_stderr": 0.011176923719313394 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083292, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083292 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2460220318237454, + "mc1_stderr": 0.015077219200662574, + "mc2": 0.4033435595764378, + "mc2_stderr": 0.014622286711088409 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2680047225501771, + "acc_stderr": 0.015227905796335147, + "acc_norm": 0.358913813459268, + "acc_norm_stderr": 0.016491802102999036 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.12", + "model_sha": "b3daf615e1a87d2a3301c5ce454b75b9b0a741e9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.2/result_2024-02-27 01:50:28.json b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.2/result_2024-02-27 01:50:28.json new file mode 100644 index 0000000000000000000000000000000000000000..70dac39e00c8d94bc768e6139a8f10e54fe83803 --- /dev/null +++ b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.2/result_2024-02-27 01:50:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28242320819112626, + "acc_stderr": 0.013155456884097218, + "acc_norm": 0.3267918088737201, + "acc_norm_stderr": 0.01370666597558734 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35968930491933876, + "acc_stderr": 0.004789284723955851, + "acc_norm": 0.4592710615415256, + "acc_norm_stderr": 0.004973199296339979 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245232, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245232 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646035, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646035 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2260536398467433, + "acc_stderr": 0.014957458504335825, + "acc_norm": 0.2260536398467433, + "acc_norm_stderr": 0.014957458504335825 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.03502553170678317, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.03502553170678317 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.030363582197238167, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.030363582197238167 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.034106466140718564, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.034106466140718564 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.16591928251121077, + "acc_stderr": 0.02496755319654715, + "acc_norm": 0.16591928251121077, + "acc_norm_stderr": 0.02496755319654715 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.037276735755969174, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.037276735755969174 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03191178226713547, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03191178226713547 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006718, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006718 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.029344572500634335, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.029344572500634335 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2641025641025641, + "acc_stderr": 0.02235219373745327, + "acc_norm": 0.2641025641025641, + "acc_norm_stderr": 0.02235219373745327 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252628, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.02850137816789395, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.02850137816789395 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.025736542745594528, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.025736542745594528 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.028120966503914414, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.028120966503914414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.028049186315695248, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.028049186315695248 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.040693063197213754, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.040693063197213754 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2935323383084577, + "acc_stderr": 0.032200241045342054, + "acc_norm": 0.2935323383084577, + "acc_norm_stderr": 0.032200241045342054 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.03156809362703174, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.03156809362703174 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02256989707491842, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02256989707491842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.22832369942196531, + "acc_stderr": 0.02259870380432162, + "acc_norm": 0.22832369942196531, + "acc_norm_stderr": 0.02259870380432162 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.034624199316156234, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.034624199316156234 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25, + "acc_stderr": 0.02409347123262133, + "acc_norm": 0.25, + "acc_norm_stderr": 0.02409347123262133 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.31088082901554404, + "acc_stderr": 0.03340361906276588, + "acc_norm": 0.31088082901554404, + "acc_norm_stderr": 0.03340361906276588 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27522935779816515, + "acc_stderr": 0.0191490937431552, + "acc_norm": 0.27522935779816515, + "acc_norm_stderr": 0.0191490937431552 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.02545775669666786, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.02545775669666786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533158, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533158 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.018152871051538802, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.018152871051538802 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.02564555362226673, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.02564555362226673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.040073418097558065, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.040073418097558065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012404, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012404 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788153, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788153 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2320675105485232, + "acc_stderr": 0.02747974455080852, + "acc_norm": 0.2320675105485232, + "acc_norm_stderr": 0.02747974455080852 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.011328734403140316, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.011328734403140316 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501954, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501954 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.03287666758603488, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.03287666758603488 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386847, + "mc2": 0.45251819811100263, + "mc2_stderr": 0.014805394473577047 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26210153482880755, + "acc_stderr": 0.015119864670254151, + "acc_norm": 0.41440377804014167, + "acc_norm_stderr": 0.016936583383943632 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.2", + "model_sha": "ad70c3b4272d5d249d50b24c47a4ca696602577e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.4/result_2024-03-05 02:14:22.json b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.4/result_2024-03-05 02:14:22.json new file mode 100644 index 0000000000000000000000000000000000000000..551b83ca54b6f911357fdc7081f7ae82a38ddf70 --- /dev/null +++ b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.4/result_2024-03-05 02:14:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.013203196088537364, + "acc_norm": 0.3438566552901024, + "acc_norm_stderr": 0.013880644570156203 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3652658832901812, + "acc_stderr": 0.004805205798724566, + "acc_norm": 0.4667396932881896, + "acc_norm_stderr": 0.004978729300074891 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03188578017686398, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03188578017686398 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.0291012906983867, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.0291012906983867 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.03610805018031023, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.03610805018031023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26688102893890675, + "acc_stderr": 0.025122637608816646, + "acc_norm": 0.26688102893890675, + "acc_norm_stderr": 0.025122637608816646 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2242152466367713, + "acc_stderr": 0.027991534258519534, + "acc_norm": 0.2242152466367713, + "acc_norm_stderr": 0.027991534258519534 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02962022787479048, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02962022787479048 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309993, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309993 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179327, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179327 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.029472485833136098, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.029472485833136098 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.02136202772522273, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.02136202772522273 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.042365112580946315, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.042365112580946315 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.025988500792411898, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.025988500792411898 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.26495726495726496, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.26495726495726496, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.0266164829805017, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.0266164829805017 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.043091187099464585, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.043091187099464585 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.03036049015401465, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.03036049015401465 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.19653179190751446, + "acc_stderr": 0.030299574664788147, + "acc_norm": 0.19653179190751446, + "acc_norm_stderr": 0.030299574664788147 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.023786203255508277, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.023786203255508277 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.024836057868294677, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.024836057868294677 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3316062176165803, + "acc_stderr": 0.03397636541089116, + "acc_norm": 0.3316062176165803, + "acc_norm_stderr": 0.03397636541089116 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25321100917431194, + "acc_stderr": 0.018644073041375046, + "acc_norm": 0.25321100917431194, + "acc_norm_stderr": 0.018644073041375046 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.026415601914389006, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.026415601914389006 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2630718954248366, + "acc_stderr": 0.017812676542320657, + "acc_norm": 0.2630718954248366, + "acc_norm_stderr": 0.017812676542320657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252336, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252336 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714864, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714864 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.028666857790274655, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.028666857790274655 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31223628691983124, + "acc_stderr": 0.030165137867847004, + "acc_norm": 0.31223628691983124, + "acc_norm_stderr": 0.030165137867847004 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2627118644067797, + "acc_stderr": 0.011240545514995667, + "acc_norm": 0.2627118644067797, + "acc_norm_stderr": 0.011240545514995667 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501954, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501954 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30966952264381886, + "mc1_stderr": 0.016185744355144912, + "mc2": 0.4428976595831895, + "mc2_stderr": 0.014947262095052873 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2632821723730815, + "acc_stderr": 0.0151417521995732, + "acc_norm": 0.3022432113341204, + "acc_norm_stderr": 0.015788654863022375 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.4", + "model_sha": "41052056ae6cbbee32475d910c28232a7da55ee4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.5.3/result_2024-03-06 07:40:52.json b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.5.3/result_2024-03-06 07:40:52.json new file mode 100644 index 0000000000000000000000000000000000000000..423fb298888df7fd973c50aa93ae69970387023b --- /dev/null +++ b/jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.5.3/result_2024-03-06 07:40:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27559726962457337, + "acc_stderr": 0.01305716965576184, + "acc_norm": 0.33276450511945393, + "acc_norm_stderr": 0.013769863046192305 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36496713802031466, + "acc_stderr": 0.004804370563856226, + "acc_norm": 0.465345548695479, + "acc_norm_stderr": 0.004977782217582459 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457923, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457923 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1650485436893204, + "acc_stderr": 0.036756688322331886, + "acc_norm": 0.1650485436893204, + "acc_norm_stderr": 0.036756688322331886 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24265644955300128, + "acc_stderr": 0.015329888940899879, + "acc_norm": 0.24265644955300128, + "acc_norm_stderr": 0.015329888940899879 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066653, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066653 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.035716092300534796, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.035716092300534796 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26688102893890675, + "acc_stderr": 0.025122637608816646, + "acc_norm": 0.26688102893890675, + "acc_norm_stderr": 0.025122637608816646 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.23318385650224216, + "acc_stderr": 0.028380391147094716, + "acc_norm": 0.23318385650224216, + "acc_norm_stderr": 0.028380391147094716 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20202020202020202, + "acc_stderr": 0.028606204289229872, + "acc_norm": 0.20202020202020202, + "acc_norm_stderr": 0.028606204289229872 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.028657491285071977, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.028657491285071977 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128006, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128006 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.03031509928561773, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.03031509928561773 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764815, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764815 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674043, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674043 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04265792110940588, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04265792110940588 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275805, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275805 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173043, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173043 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.023266512213730557, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.023266512213730557 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106135, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816503, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816503 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.02344582627654554, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.02344582627654554 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21243523316062177, + "acc_stderr": 0.029519282616817247, + "acc_norm": 0.21243523316062177, + "acc_norm_stderr": 0.029519282616817247 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25688073394495414, + "acc_stderr": 0.01873249292834246, + "acc_norm": 0.25688073394495414, + "acc_norm_stderr": 0.01873249292834246 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.025738854797818712, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.025738854797818712 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.033176727875331574, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.033176727875331574 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.272875816993464, + "acc_stderr": 0.01802047414839358, + "acc_norm": 0.272875816993464, + "acc_norm_stderr": 0.01802047414839358 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307857, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307857 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467761, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467761 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280065, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280065 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.02917868230484256, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.02917868230484256 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2405475880052151, + "acc_stderr": 0.010916406735478947, + "acc_norm": 0.2405475880052151, + "acc_norm_stderr": 0.010916406735478947 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.0340150671524904, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.0340150671524904 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.016058999026100612, + "mc2": 0.44880648999860945, + "mc2_stderr": 0.014904840052367791 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.25737898465171194, + "acc_stderr": 0.015030899730346766, + "acc_norm": 0.3246753246753247, + "acc_norm_stderr": 0.01609888393934646 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jungyuko/DAVinCI-42dot_LLM-PLM-1.3B-v1.5.3", + "model_sha": "0b7e6a0c2d9e3054aede6d76e520a5fa65c138f5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jungyuko/DAVinCI-Yi-Ko-6B-v0.61-ff-e1/result_2024-01-24 08:24:44.json b/jungyuko/DAVinCI-Yi-Ko-6B-v0.61-ff-e1/result_2024-01-24 08:24:44.json new file mode 100644 index 0000000000000000000000000000000000000000..0d23b03a594a62767a552bc1b1015ddbf8c9efdf --- /dev/null +++ b/jungyuko/DAVinCI-Yi-Ko-6B-v0.61-ff-e1/result_2024-01-24 08:24:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34897610921501704, + "acc_stderr": 0.013928933461382506, + "acc_norm": 0.40784982935153585, + "acc_norm_stderr": 0.014361097288449698 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3979286994622585, + "acc_stderr": 0.0048847024124560965, + "acc_norm": 0.5332603067118104, + "acc_norm_stderr": 0.004978729300074891 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.01771222893929979, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.01771222893929979 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042338, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042338 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942652, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942652 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983052, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228405, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228405 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360383, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360383 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.02413015829976261, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.02413015829976261 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5926605504587156, + "acc_stderr": 0.021065986244412874, + "acc_norm": 0.5926605504587156, + "acc_norm_stderr": 0.021065986244412874 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.019898412717635896, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.019898412717635896 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005326, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005326 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2223463687150838, + "acc_stderr": 0.013907189208156881, + "acc_norm": 0.2223463687150838, + "acc_norm_stderr": 0.013907189208156881 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.028888193103988647, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.028888193103988647 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.031067211262872485, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.031067211262872485 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.012014142101842972, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.012014142101842972 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826837, + "mc2": 0.41906862548761753, + "mc2_stderr": 0.014974350977362081 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4037780401416765, + "acc_stderr": 0.01686903154029863, + "acc_norm": 0.46162927981109797, + "acc_norm_stderr": 0.017139660221845553 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jungyuko/DAVinCI-Yi-Ko-6B-v0.61-ff-e1", + "model_sha": "92d30d6daf6fcfc78fcf0ef0490a8c049bca1ce1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jungyuko/DAVinCI-Yi-Ko-6B-v0.8/result_2024-01-31 12:25:35.json b/jungyuko/DAVinCI-Yi-Ko-6B-v0.8/result_2024-01-31 12:25:35.json new file mode 100644 index 0000000000000000000000000000000000000000..d4590417e7079af31a2dd24180e624020bf5e658 --- /dev/null +++ b/jungyuko/DAVinCI-Yi-Ko-6B-v0.8/result_2024-01-31 12:25:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35580204778157, + "acc_stderr": 0.013990571137918758, + "acc_norm": 0.41467576791808874, + "acc_norm_stderr": 0.014397070564409174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39892451702848036, + "acc_stderr": 0.004886764243204056, + "acc_norm": 0.5360485958972316, + "acc_norm_stderr": 0.004976796060456436 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5606641123882503, + "acc_stderr": 0.017747874245683602, + "acc_norm": 0.5606641123882503, + "acc_norm_stderr": 0.017747874245683602 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.034648816750163375, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.034648816750163375 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969566, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969566 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674064, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674064 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123936, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123936 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.023865206836972602, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.023865206836972602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666666, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666666 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422715, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422715 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584926, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584926 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5981651376146789, + "acc_stderr": 0.02102010617299701, + "acc_norm": 0.5981651376146789, + "acc_norm_stderr": 0.02102010617299701 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127152, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127152 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.028304576673141114, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.028304576673141114 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061173, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061173 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285714, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.03154696285656628, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.03154696285656628 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767864, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767864 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877753, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877753 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.031067211262872478, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.031067211262872478 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937599, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937599 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32920469361147325, + "acc_stderr": 0.01200209166690232, + "acc_norm": 0.32920469361147325, + "acc_norm_stderr": 0.01200209166690232 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3390452876376989, + "mc1_stderr": 0.016571797910626622, + "mc2": 0.48010278029225234, + "mc2_stderr": 0.014971977362403386 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3837072018890201, + "acc_stderr": 0.016718924637231826, + "acc_norm": 0.42502951593860683, + "acc_norm_stderr": 0.016996016308362887 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jungyuko/DAVinCI-Yi-Ko-6B-v0.8", + "model_sha": "06b4548612428aeca595206117cf7cd8d04ae58e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jungyuko/DAVinCI-Yi-Ko-6B-v1.1/result_2024-02-16 09:04:51.json b/jungyuko/DAVinCI-Yi-Ko-6B-v1.1/result_2024-02-16 09:04:51.json new file mode 100644 index 0000000000000000000000000000000000000000..8c69d4b8e1dc9392199181df9905c47ced6fea00 --- /dev/null +++ b/jungyuko/DAVinCI-Yi-Ko-6B-v1.1/result_2024-02-16 09:04:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3848122866894198, + "acc_stderr": 0.014218371065251091, + "acc_norm": 0.4308873720136519, + "acc_norm_stderr": 0.014471133392642463 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4193387771360287, + "acc_stderr": 0.004924424018073674, + "acc_norm": 0.5540728938458475, + "acc_norm_stderr": 0.004960516570284905 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.01776925058353325, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.01776925058353325 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.03252909619613197, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.03252909619613197 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.03394853965156402, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.03394853965156402 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.037932811853078084, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.037932811853078084 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5025641025641026, + "acc_stderr": 0.025350672979412188, + "acc_norm": 0.5025641025641026, + "acc_norm_stderr": 0.025350672979412188 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03088273697413865, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03088273697413865 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.03765746693865149, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.03765746693865149 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.024180497164376886, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.024180497164376886 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.041614023984032786, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.041614023984032786 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.04598188057816542, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.04598188057816542 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5963302752293578, + "acc_stderr": 0.021035704856574966, + "acc_norm": 0.5963302752293578, + "acc_norm_stderr": 0.021035704856574966 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225864, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225864 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.044313245019684304, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.044313245019684304 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.04065771002562605, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.04065771002562605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.02010986454718136, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.02010986454718136 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0286638201471995, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0286638201471995 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833587, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833587 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2346368715083799, + "acc_stderr": 0.014173044098303656, + "acc_norm": 0.2346368715083799, + "acc_norm_stderr": 0.014173044098303656 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.031362502409358936, + "acc_norm": 0.4, + "acc_norm_stderr": 0.031362502409358936 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.03195514741370671, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.03195514741370671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34159061277705344, + "acc_stderr": 0.012112391320842845, + "acc_norm": 0.34159061277705344, + "acc_norm_stderr": 0.012112391320842845 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.035086373586305716, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.035086373586305716 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398393, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398393 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.44207228567941503, + "mc2_stderr": 0.01561642018391544 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.577331759149941, + "acc_stderr": 0.016983506079577604, + "acc_norm": 0.5749704840613932, + "acc_norm_stderr": 0.01699601630836289 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jungyuko/DAVinCI-Yi-Ko-6B-v1.1", + "model_sha": "0321fe83e20bf9d116e14368fc024c20a23cdae5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/juungwon/Llama-3-cs-LoRA/result_2024-07-04 23:29:26.json b/juungwon/Llama-3-cs-LoRA/result_2024-07-04 23:29:26.json new file mode 100644 index 0000000000000000000000000000000000000000..001618ad58ea152f685d8efa7f83b2019e92183d --- /dev/null +++ b/juungwon/Llama-3-cs-LoRA/result_2024-07-04 23:29:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41723549488054607, + "acc_stderr": 0.014409825518403077, + "acc_norm": 0.4735494880546075, + "acc_norm_stderr": 0.01459093135812017 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4192391953794065, + "acc_stderr": 0.004924261467934419, + "acc_norm": 0.5710017924716192, + "acc_norm_stderr": 0.004939215682191771 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5887611749680716, + "acc_stderr": 0.017595971908056566, + "acc_norm": 0.5887611749680716, + "acc_norm_stderr": 0.017595971908056566 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464245, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789958, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789958 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.572347266881029, + "acc_stderr": 0.02809924077580956, + "acc_norm": 0.572347266881029, + "acc_norm_stderr": 0.02809924077580956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6565656565656566, + "acc_stderr": 0.03383201223244443, + "acc_norm": 0.6565656565656566, + "acc_norm_stderr": 0.03383201223244443 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954963, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954963 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978813, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978813 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5548387096774193, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.5548387096774193, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809446, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809446 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075657, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075657 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.023865206836972602, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.023865206836972602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.035260770955482405, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.035260770955482405 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374767, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374767 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6623853211009174, + "acc_stderr": 0.020275265986638903, + "acc_norm": 0.6623853211009174, + "acc_norm_stderr": 0.020275265986638903 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147125, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147125 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.028614624752805427, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.028614624752805427 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249032, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249032 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.0201429745537952, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.0201429745537952 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.02847350127296377, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.02847350127296377 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.014149575348976274, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.014149575348976274 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.030320243265004137, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.030320243265004137 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.031912820526692774, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.031912820526692774 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3435462842242503, + "acc_stderr": 0.01212896117419016, + "acc_norm": 0.3435462842242503, + "acc_norm_stderr": 0.01212896117419016 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.03465868196380762, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.03465868196380762 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380027, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380027 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502332, + "mc2": 0.4527694253509443, + "mc2_stderr": 0.015176448499189582 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43919716646989376, + "acc_stderr": 0.017062775744780705, + "acc_norm": 0.4734356552538371, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "juungwon/Llama-3-cs-LoRA", + "model_sha": "cc91e425b5d48b824aa309a5f5dcdf4db35fea6d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/juungwon/Llama-3-instruction-constructionsafety/result_2024-07-03 03:38:51.json b/juungwon/Llama-3-instruction-constructionsafety/result_2024-07-03 03:38:51.json new file mode 100644 index 0000000000000000000000000000000000000000..c9eb4b877243510d9bd9c73472aa047700aa76a1 --- /dev/null +++ b/juungwon/Llama-3-instruction-constructionsafety/result_2024-07-03 03:38:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43686006825938567, + "acc_stderr": 0.014494421584256525, + "acc_norm": 0.47696245733788395, + "acc_norm_stderr": 0.014595873205358267 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4230233021310496, + "acc_stderr": 0.004930293787545617, + "acc_norm": 0.5730930093606851, + "acc_norm_stderr": 0.004936176784631952 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6155810983397191, + "acc_stderr": 0.01739568874281962, + "acc_norm": 0.6155810983397191, + "acc_norm_stderr": 0.01739568874281962 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.038913644958358196, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.038913644958358196 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5594855305466238, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.5594855305466238, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232963, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232963 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.02533900301010653, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.02533900301010653 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5451612903225806, + "acc_stderr": 0.02832774309156108, + "acc_norm": 0.5451612903225806, + "acc_norm_stderr": 0.02832774309156108 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.029872577708891176, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.029872577708891176 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979033, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979033 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.026882643434022885, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.026882643434022885 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5216049382716049, + "acc_stderr": 0.027794760105008722, + "acc_norm": 0.5216049382716049, + "acc_norm_stderr": 0.027794760105008722 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.035260770955482405, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.035260770955482405 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6330275229357798, + "acc_stderr": 0.020664675659520536, + "acc_norm": 0.6330275229357798, + "acc_norm_stderr": 0.020664675659520536 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576073, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576073 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490435, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490435 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4526143790849673, + "acc_stderr": 0.020136790918492537, + "acc_norm": 0.4526143790849673, + "acc_norm_stderr": 0.020136790918492537 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875192, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875192 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22905027932960895, + "acc_stderr": 0.014054314935614555, + "acc_norm": 0.22905027932960895, + "acc_norm_stderr": 0.014054314935614555 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.030320243265004137, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.030320243265004137 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.031891418324213966, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.031891418324213966 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.0306858205966108, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.0306858205966108 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3494132985658409, + "acc_stderr": 0.012177306252786691, + "acc_norm": 0.3494132985658409, + "acc_norm_stderr": 0.012177306252786691 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.034542365853806094, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.034542365853806094 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.03646204963253812, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.03646204963253812 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006509, + "mc2": 0.41826742046296295, + "mc2_stderr": 0.015049874966181587 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45808736717827625, + "acc_stderr": 0.017129852117911147, + "acc_norm": 0.48406139315230223, + "acc_norm_stderr": 0.017181617837190195 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "juungwon/Llama-3-instruction-constructionsafety", + "model_sha": "4bf598f804cff72c13c2de98ac96727753911739", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jwkweon/CUBOX-SOLAR-10.7B-DPO-v0.1/result_2024-02-22 05:35:31.json b/jwkweon/CUBOX-SOLAR-10.7B-DPO-v0.1/result_2024-02-22 05:35:31.json new file mode 100644 index 0000000000000000000000000000000000000000..f265978a1ebf35a1220bafc51190b65cab818dee --- /dev/null +++ b/jwkweon/CUBOX-SOLAR-10.7B-DPO-v0.1/result_2024-02-22 05:35:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.49573378839590443, + "acc_stderr": 0.014610858923956952, + "acc_norm": 0.5571672354948806, + "acc_norm_stderr": 0.01451557387334891 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45717984465245964, + "acc_stderr": 0.004971449552787172, + "acc_norm": 0.6417048396733719, + "acc_norm_stderr": 0.0047851950498891595 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.03722965741385539, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.03722965741385539 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6781609195402298, + "acc_stderr": 0.01670638141505791, + "acc_norm": 0.6781609195402298, + "acc_norm_stderr": 0.01670638141505791 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033583, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033583 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6767676767676768, + "acc_stderr": 0.033322999210706444, + "acc_norm": 0.6767676767676768, + "acc_norm_stderr": 0.033322999210706444 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.032422250271150074, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.032422250271150074 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5205128205128206, + "acc_stderr": 0.02532966316348994, + "acc_norm": 0.5205128205128206, + "acc_norm_stderr": 0.02532966316348994 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.033959703819985726, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.033959703819985726 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6064516129032258, + "acc_stderr": 0.027791878753132264, + "acc_norm": 0.6064516129032258, + "acc_norm_stderr": 0.027791878753132264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.02685345037700915, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.02685345037700915 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389177, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389177 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815646, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815646 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054096, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054096 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851116, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851116 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.041808067502949374, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.041808067502949374 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5493827160493827, + "acc_stderr": 0.027684721415656192, + "acc_norm": 0.5493827160493827, + "acc_norm_stderr": 0.027684721415656192 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6373056994818653, + "acc_stderr": 0.034697137917043715, + "acc_norm": 0.6373056994818653, + "acc_norm_stderr": 0.034697137917043715 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6403669724770642, + "acc_stderr": 0.020575234660123787, + "acc_norm": 0.6403669724770642, + "acc_norm_stderr": 0.020575234660123787 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138293, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041019, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041019 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.020148939420415738, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.020148939420415738 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340455, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098402, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098402 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312548, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312548 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5918367346938775, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.5918367346938775, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7172995780590717, + "acc_stderr": 0.029312814153955924, + "acc_norm": 0.7172995780590717, + "acc_norm_stderr": 0.029312814153955924 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34485006518904826, + "acc_stderr": 0.012139881006287065, + "acc_norm": 0.34485006518904826, + "acc_norm_stderr": 0.012139881006287065 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.03283472056108561, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.03283472056108561 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.03713158067481913, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.03713158067481913 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3317013463892289, + "mc1_stderr": 0.01648214881024146, + "mc2": 0.5067332653158748, + "mc2_stderr": 0.015901847594011375 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5867768595041323, + "acc_stderr": 0.016929480234495226, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.01687694116504561 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jwkweon/CUBOX-SOLAR-10.7B-DPO-v0.1", + "model_sha": "22e07d1e30734fece4cd454ad3164f87f10cd3e1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jwkweon/CUBOX-SOLAR-DPO-v0.2/result_2024-04-02 02:05:03.json b/jwkweon/CUBOX-SOLAR-DPO-v0.2/result_2024-04-02 02:05:03.json new file mode 100644 index 0000000000000000000000000000000000000000..66056d5ae1a92d6c043569e787d1b1260d4db649 --- /dev/null +++ b/jwkweon/CUBOX-SOLAR-DPO-v0.2/result_2024-04-02 02:05:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7312286689419796, + "acc_stderr": 0.012955065963710682, + "acc_norm": 0.7773037542662116, + "acc_norm_stderr": 0.012158314774829931 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6620195180242979, + "acc_stderr": 0.004720551323547144, + "acc_norm": 0.798546106353316, + "acc_norm_stderr": 0.004002665957282785 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.695906432748538, + "acc_stderr": 0.035282112582452306, + "acc_norm": 0.695906432748538, + "acc_norm_stderr": 0.035282112582452306 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7281553398058253, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.7281553398058253, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.669220945083014, + "acc_stderr": 0.01682481846256376, + "acc_norm": 0.669220945083014, + "acc_norm_stderr": 0.01682481846256376 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6237942122186495, + "acc_stderr": 0.02751392568354943, + "acc_norm": 0.6237942122186495, + "acc_norm_stderr": 0.02751392568354943 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6278026905829597, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.6278026905829597, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.031566630992154156, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.031566630992154156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6102564102564103, + "acc_stderr": 0.02472696788664708, + "acc_norm": 0.6102564102564103, + "acc_norm_stderr": 0.02472696788664708 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301812, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301812 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.03510766597959217, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.03510766597959217 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5967741935483871, + "acc_stderr": 0.027906150826041146, + "acc_norm": 0.5967741935483871, + "acc_norm_stderr": 0.027906150826041146 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.02514093595033544, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.02514093595033544 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5547169811320755, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.5547169811320755, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131147, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131147 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555402, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555402 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.038118909889404126, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.038118909889404126 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4470899470899471, + "acc_stderr": 0.025606723995777025, + "acc_norm": 0.4470899470899471, + "acc_norm_stderr": 0.025606723995777025 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.625, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.625, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5838150289017341, + "acc_stderr": 0.02653818910470548, + "acc_norm": 0.5838150289017341, + "acc_norm_stderr": 0.02653818910470548 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5950920245398773, + "acc_stderr": 0.038566721635489125, + "acc_norm": 0.5950920245398773, + "acc_norm_stderr": 0.038566721635489125 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6512345679012346, + "acc_stderr": 0.02651759772446501, + "acc_norm": 0.6512345679012346, + "acc_norm_stderr": 0.02651759772446501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7823834196891192, + "acc_stderr": 0.029778663037752954, + "acc_norm": 0.7823834196891192, + "acc_norm_stderr": 0.029778663037752954 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.047028804320496165, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.047028804320496165 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.726605504587156, + "acc_stderr": 0.019109299846098278, + "acc_norm": 0.726605504587156, + "acc_norm_stderr": 0.019109299846098278 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.028332397483664278, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.028332397483664278 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6447368421052632, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.6447368421052632, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5604575163398693, + "acc_stderr": 0.02007942040808792, + "acc_norm": 0.5604575163398693, + "acc_norm_stderr": 0.02007942040808792 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3971631205673759, + "acc_stderr": 0.029189805673587095, + "acc_norm": 0.3971631205673759, + "acc_norm_stderr": 0.029189805673587095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.0340763209385405, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.0340763209385405 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3307262569832402, + "acc_stderr": 0.01573502625896612, + "acc_norm": 0.3307262569832402, + "acc_norm_stderr": 0.01573502625896612 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5110294117647058, + "acc_stderr": 0.030365446477275675, + "acc_norm": 0.5110294117647058, + "acc_norm_stderr": 0.030365446477275675 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6653061224489796, + "acc_stderr": 0.030209235226242304, + "acc_norm": 0.6653061224489796, + "acc_norm_stderr": 0.030209235226242304 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7215189873417721, + "acc_stderr": 0.029178682304842544, + "acc_norm": 0.7215189873417721, + "acc_norm_stderr": 0.029178682304842544 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44589308996088656, + "acc_stderr": 0.01269524471137978, + "acc_norm": 0.44589308996088656, + "acc_norm_stderr": 0.01269524471137978 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6862745098039216, + "acc_stderr": 0.03256685484460387, + "acc_norm": 0.6862745098039216, + "acc_norm_stderr": 0.03256685484460387 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7319461444308446, + "mc1_stderr": 0.015506204722834382, + "mc2": 0.8313736699851659, + "mc2_stderr": 0.012649408684893977 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.017177301992342544, + "acc_norm": 0.5029515938606848, + "acc_norm_stderr": 0.017190054580194698 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jwkweon/CUBOX-SOLAR-DPO-v0.2", + "model_sha": "7e96f3740629b7ac35fb54d545a8bd9e7768f7ed", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jwkweon/CUBOX-SOLAR-DPO-v0.3/result_2024-04-02 02:05:12.json b/jwkweon/CUBOX-SOLAR-DPO-v0.3/result_2024-04-02 02:05:12.json new file mode 100644 index 0000000000000000000000000000000000000000..5fc0cf0948a811751359b5831e536c06b9a5c633 --- /dev/null +++ b/jwkweon/CUBOX-SOLAR-DPO-v0.3/result_2024-04-02 02:05:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7363481228668942, + "acc_stderr": 0.012875929151297058, + "acc_norm": 0.7747440273037542, + "acc_norm_stderr": 0.012207839995407305 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6863174666401115, + "acc_stderr": 0.004630407476835182, + "acc_norm": 0.8095000995817566, + "acc_norm_stderr": 0.0039189285565904815 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6842105263157895, + "acc_stderr": 0.03565079670708311, + "acc_norm": 0.6842105263157895, + "acc_norm_stderr": 0.03565079670708311 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7572815533980582, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.7572815533980582, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.016857391247472542, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.016857391247472542 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.03266204299064677, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.03266204299064677 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.0389136449583582, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.0389136449583582 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.0274666102131401, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.0274666102131401 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6278026905829597, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.6278026905829597, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7525252525252525, + "acc_stderr": 0.03074630074212451, + "acc_norm": 0.7525252525252525, + "acc_norm_stderr": 0.03074630074212451 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.047840607041056527, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.047840607041056527 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.03163145807552378, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.03163145807552378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6256410256410256, + "acc_stderr": 0.024537591572830496, + "acc_norm": 0.6256410256410256, + "acc_norm_stderr": 0.024537591572830496 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5935483870967742, + "acc_stderr": 0.027941727346256304, + "acc_norm": 0.5935483870967742, + "acc_norm_stderr": 0.027941727346256304 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.025140935950335435, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.025140935950335435 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5811320754716981, + "acc_stderr": 0.030365050829115205, + "acc_norm": 0.5811320754716981, + "acc_norm_stderr": 0.030365050829115205 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.02925290592725198, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.02925290592725198 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.03807301726504511, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.03807301726504511 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.025591857761382182, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.025591857761382182 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6041666666666666, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.6041666666666666, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6040462427745664, + "acc_stderr": 0.026329813341946243, + "acc_norm": 0.6040462427745664, + "acc_norm_stderr": 0.026329813341946243 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6073619631901841, + "acc_stderr": 0.038367409078310294, + "acc_norm": 0.6073619631901841, + "acc_norm_stderr": 0.038367409078310294 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6790123456790124, + "acc_stderr": 0.025976566010862737, + "acc_norm": 0.6790123456790124, + "acc_norm_stderr": 0.025976566010862737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7668393782383419, + "acc_stderr": 0.030516111371476005, + "acc_norm": 0.7668393782383419, + "acc_norm_stderr": 0.030516111371476005 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.04702880432049615, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.04702880432049615 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7211009174311926, + "acc_stderr": 0.019227468876463524, + "acc_norm": 0.7211009174311926, + "acc_norm_stderr": 0.019227468876463524 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6143790849673203, + "acc_stderr": 0.027870745278290282, + "acc_norm": 0.6143790849673203, + "acc_norm_stderr": 0.027870745278290282 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6447368421052632, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.6447368421052632, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.020017629214213097, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.020017629214213097 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4078014184397163, + "acc_stderr": 0.02931601177634356, + "acc_norm": 0.4078014184397163, + "acc_norm_stderr": 0.02931601177634356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.034086558679777494, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.034086558679777494 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.358659217877095, + "acc_stderr": 0.01604045442616447, + "acc_norm": 0.358659217877095, + "acc_norm_stderr": 0.01604045442616447 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.030254372573976722, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.030254372573976722 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6612244897959184, + "acc_stderr": 0.030299506562154188, + "acc_norm": 0.6612244897959184, + "acc_norm_stderr": 0.030299506562154188 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.729957805907173, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.729957805907173, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4498044328552803, + "acc_stderr": 0.0127057214985651, + "acc_norm": 0.4498044328552803, + "acc_norm_stderr": 0.0127057214985651 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.696078431372549, + "acc_stderr": 0.032282103870378914, + "acc_norm": 0.696078431372549, + "acc_norm_stderr": 0.032282103870378914 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.593939393939394, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.593939393939394, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7564259485924113, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.8337228201162233, + "mc2_stderr": 0.012658216629478166 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47107438016528924, + "acc_stderr": 0.01716156394991635, + "acc_norm": 0.4911452184179457, + "acc_norm_stderr": 0.017187658199336736 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jwkweon/CUBOX-SOLAR-DPO-v0.3", + "model_sha": "aa6416e01e3c61116fef4706a2f554003dabb948", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jylee420/gemma-2b-data-std-v0/result_2024-03-17 03:49:52.json b/jylee420/gemma-2b-data-std-v0/result_2024-03-17 03:49:52.json new file mode 100644 index 0000000000000000000000000000000000000000..5cfb19cd033a2c4771f52ea2b8abe5b245e60ed7 --- /dev/null +++ b/jylee420/gemma-2b-data-std-v0/result_2024-03-17 03:49:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.24829351535836178, + "acc_stderr": 0.012624912868089783, + "acc_norm": 0.2935153583617747, + "acc_norm_stderr": 0.013307250444941115 + }, + "harness|ko_hellaswag|10": { + "acc": 0.31318462457677754, + "acc_stderr": 0.004628409084218762, + "acc_norm": 0.36885082652858, + "acc_norm_stderr": 0.0048150733340005985 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393163, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393163 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.280970625798212, + "acc_stderr": 0.016073127851221232, + "acc_norm": 0.280970625798212, + "acc_norm_stderr": 0.016073127851221232 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288087, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288087 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3183279742765273, + "acc_stderr": 0.026457225067811025, + "acc_norm": 0.3183279742765273, + "acc_norm_stderr": 0.026457225067811025 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677697, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677697 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533085, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533085 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.29743589743589743, + "acc_stderr": 0.02317740813146592, + "acc_norm": 0.29743589743589743, + "acc_norm_stderr": 0.02317740813146592 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.031447125816782426, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.031447125816782426 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.29354838709677417, + "acc_stderr": 0.025906087021319295, + "acc_norm": 0.29354838709677417, + "acc_norm_stderr": 0.025906087021319295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.44017094017094016, + "acc_stderr": 0.032520741720630506, + "acc_norm": 0.44017094017094016, + "acc_norm_stderr": 0.032520741720630506 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35094339622641507, + "acc_stderr": 0.029373646253234686, + "acc_norm": 0.35094339622641507, + "acc_norm_stderr": 0.029373646253234686 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302506, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302506 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3681592039800995, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.3681592039800995, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594295, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.023330654054535886, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.023330654054535886 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869355, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869355 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.315028901734104, + "acc_stderr": 0.025009313790069713, + "acc_norm": 0.315028901734104, + "acc_norm_stderr": 0.025009313790069713 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.026105673861409818, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.026105673861409818 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3174311926605505, + "acc_stderr": 0.019957152198460504, + "acc_norm": 0.3174311926605505, + "acc_norm_stderr": 0.019957152198460504 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848877, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848877 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.027870745278290327, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.027870745278290327 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.26973684210526316, + "acc_stderr": 0.03611780560284898, + "acc_norm": 0.26973684210526316, + "acc_norm_stderr": 0.03611780560284898 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663137, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663137 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590627, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590627 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.028353212866863434, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.028353212866863434 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29535864978902954, + "acc_stderr": 0.02969633871342289, + "acc_norm": 0.29535864978902954, + "acc_norm_stderr": 0.02969633871342289 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28292046936114734, + "acc_stderr": 0.011503891323188976, + "acc_norm": 0.28292046936114734, + "acc_norm_stderr": 0.011503891323188976 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.036462049632538115, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.036462049632538115 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024626, + "mc2": 0.45249267433918944, + "mc2_stderr": 0.016019280038075977 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21723730814639905, + "acc_stderr": 0.014177416034265046, + "acc_norm": 0.2798110979929162, + "acc_norm_stderr": 0.015433715795427745 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jylee420/gemma-2b-data-std-v0", + "model_sha": "0428cfb3662ae9affe752d0c1d5b019365fabbae", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jylee420/gemma-2b-data-std/result_2024-03-17 03:49:42.json b/jylee420/gemma-2b-data-std/result_2024-03-17 03:49:42.json new file mode 100644 index 0000000000000000000000000000000000000000..bdf4c2dcddb8ad96686afba9f1122866ef8e4aa4 --- /dev/null +++ b/jylee420/gemma-2b-data-std/result_2024-03-17 03:49:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2363481228668942, + "acc_stderr": 0.012414960524301829, + "acc_norm": 0.28071672354948807, + "acc_norm_stderr": 0.013131238126975578 + }, + "harness|ko_hellaswag|10": { + "acc": 0.302230631348337, + "acc_stderr": 0.004582861219020889, + "acc_norm": 0.35261900019916353, + "acc_norm_stderr": 0.004768088918512185 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03188578017686398, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03188578017686398 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1650485436893204, + "acc_stderr": 0.036756688322331886, + "acc_norm": 0.1650485436893204, + "acc_norm_stderr": 0.036756688322331886 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2681992337164751, + "acc_stderr": 0.015842430835269435, + "acc_norm": 0.2681992337164751, + "acc_norm_stderr": 0.015842430835269435 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.030472973363380045, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.030472973363380045 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1927710843373494, + "acc_stderr": 0.030709824050565274, + "acc_norm": 0.1927710843373494, + "acc_norm_stderr": 0.030709824050565274 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3054662379421222, + "acc_stderr": 0.026160584450140478, + "acc_norm": 0.3054662379421222, + "acc_norm_stderr": 0.026160584450140478 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.04010358942462202, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.04010358942462202 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533084, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533084 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.039215453124671215, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.039215453124671215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3025210084033613, + "acc_stderr": 0.029837962388291926, + "acc_norm": 0.3025210084033613, + "acc_norm_stderr": 0.029837962388291926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.0228158130988966, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.0228158130988966 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.04453197507374984, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.04453197507374984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.025988500792411894, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.025988500792411894 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.31196581196581197, + "acc_stderr": 0.030351527323344958, + "acc_norm": 0.31196581196581197, + "acc_norm_stderr": 0.030351527323344958 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.25660377358490566, + "acc_stderr": 0.026880647889051975, + "acc_norm": 0.25660377358490566, + "acc_norm_stderr": 0.026880647889051975 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072776, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072776 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.02564410863926762, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.02564410863926762 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.032038410402133226, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.032038410402133226 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26878612716763006, + "acc_stderr": 0.0238680032625001, + "acc_norm": 0.26878612716763006, + "acc_norm_stderr": 0.0238680032625001 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.026229649178821167, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.026229649178821167 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.03051611137147601, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.03051611137147601 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.037752050135836386, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.037752050135836386 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796617, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242557, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242557 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.035834961763610625, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.035834961763610625 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307857, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307857 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02835321286686344, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02835321286686344 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25027932960893856, + "acc_stderr": 0.014487500852850426, + "acc_norm": 0.25027932960893856, + "acc_norm_stderr": 0.014487500852850426 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1801470588235294, + "acc_stderr": 0.02334516361654486, + "acc_norm": 0.1801470588235294, + "acc_norm_stderr": 0.02334516361654486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788163, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788163 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598028, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598028 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28683181225554105, + "acc_stderr": 0.011551504781176933, + "acc_norm": 0.28683181225554105, + "acc_norm_stderr": 0.011551504781176933 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.03096451792692341, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.03096451792692341 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624335, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624335 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.01574402724825605, + "mc2": 0.46790855279560106, + "mc2_stderr": 0.0161930339013724 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.17355371900826447, + "acc_stderr": 0.01302084279439823, + "acc_norm": 0.23376623376623376, + "acc_norm_stderr": 0.014550782587103128 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jylee420/gemma-2b-data-std", + "model_sha": "8514b865649969a5e1acdbff5d098694269c69ab", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jyoung105/KoR-Orca-Platypus-13B-neft/result_2023-10-23 17:13:22.json b/jyoung105/KoR-Orca-Platypus-13B-neft/result_2023-10-23 17:13:22.json new file mode 100644 index 0000000000000000000000000000000000000000..4774e48739fb62dd429d8fdcaf537b41b1aa5268 --- /dev/null +++ b/jyoung105/KoR-Orca-Platypus-13B-neft/result_2023-10-23 17:13:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.014049106564955005, + "acc_norm": 0.40955631399317405, + "acc_norm_stderr": 0.014370358632472451 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4001194981079466, + "acc_stderr": 0.004889210628907952, + "acc_norm": 0.5324636526588329, + "acc_norm_stderr": 0.0049792529549773125 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4955300127713921, + "acc_stderr": 0.017879248970584388, + "acc_norm": 0.4955300127713921, + "acc_norm_stderr": 0.017879248970584388 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.0362933532994786, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.0362933532994786 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.02836504154256457, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.02836504154256457 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929187, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929187 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.37373737373737376, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.37373737373737376, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.024666744915187236, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.024666744915187236 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.02829205683011274, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.02829205683011274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5854700854700855, + "acc_stderr": 0.03227396567623779, + "acc_norm": 0.5854700854700855, + "acc_norm_stderr": 0.03227396567623779 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389174, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389174 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712173, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712173 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.035344398485395785, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.035344398485395785 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.026538189104705488, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.026538189104705488 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899616, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899616 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43486238532110094, + "acc_stderr": 0.02125463146560928, + "acc_norm": 0.43486238532110094, + "acc_norm_stderr": 0.02125463146560928 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.027996723180631455, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.027996723180631455 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.019184639328092484, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.019184639328092484 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534774, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534774 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.25, + "acc_stderr": 0.026303648393696036, + "acc_norm": 0.25, + "acc_norm_stderr": 0.026303648393696036 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.03113088039623593, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.03113088039623593 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31486310299869624, + "acc_stderr": 0.011862561755715931, + "acc_norm": 0.31486310299869624, + "acc_norm_stderr": 0.011862561755715931 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.015905987048184828, + "mc2": 0.45413657999042506, + "mc2_stderr": 0.015074046336424325 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3990554899645809, + "acc_stderr": 0.016836377292849296, + "acc_norm": 0.51357733175915, + "acc_norm_stderr": 0.01718401506040146 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jyoung105/KoR-Orca-Platypus-13B-neft", + "model_sha": "a02ee5b06d952c0dc23f5868d59778638696ebfd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jyoung105/ko-platypus2-collective-13b/result_2023-10-14 09:02:59.json b/jyoung105/ko-platypus2-collective-13b/result_2023-10-14 09:02:59.json new file mode 100644 index 0000000000000000000000000000000000000000..a3ae825ed3857faec885fd37f5ce521d00f57798 --- /dev/null +++ b/jyoung105/ko-platypus2-collective-13b/result_2023-10-14 09:02:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3856655290102389, + "acc_stderr": 0.014224250973257182, + "acc_norm": 0.44283276450511944, + "acc_norm_stderr": 0.014515573873348906 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40798645688109936, + "acc_stderr": 0.004904561795919, + "acc_norm": 0.5428201553475404, + "acc_norm_stderr": 0.004971449552787176 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5261813537675607, + "acc_stderr": 0.017855434554042, + "acc_norm": 0.5261813537675607, + "acc_norm_stderr": 0.017855434554042 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489424, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489424 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.03314190222110655, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.03314190222110655 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4797979797979798, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.4797979797979798, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.032061837832361516, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.032061837832361516 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042328, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042328 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.03053333843046751, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.03053333843046751 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353985, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353985 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490437, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490437 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.019291961895066382, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.019291961895066382 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409146, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409146 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714864, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714864 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3728813559322034, + "acc_stderr": 0.012350630058333362, + "acc_norm": 0.3728813559322034, + "acc_norm_stderr": 0.012350630058333362 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006514, + "mc2": 0.4442744883801461, + "mc2_stderr": 0.015229595169585636 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.37662337662337664, + "acc_stderr": 0.01665879987405197, + "acc_norm": 0.42739079102715466, + "acc_norm_stderr": 0.01700812984482316 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jyoung105/ko-platypus2-collective-13b", + "model_sha": "a42bdc7082f08920ee23b5ed9946aa81008de332", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/jyoung105/ko-platypus2-collective-13b_v1.1/result_2023-10-20 02:08:26.json b/jyoung105/ko-platypus2-collective-13b_v1.1/result_2023-10-20 02:08:26.json new file mode 100644 index 0000000000000000000000000000000000000000..ae7762a74656e1664b3ee943f8e935558f239c7b --- /dev/null +++ b/jyoung105/ko-platypus2-collective-13b_v1.1/result_2023-10-20 02:08:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38054607508532423, + "acc_stderr": 0.014188277712349812, + "acc_norm": 0.4453924914675768, + "acc_norm_stderr": 0.014523987638344078 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4084843656642103, + "acc_stderr": 0.0049054894940050746, + "acc_norm": 0.5414260107548298, + "acc_norm_stderr": 0.0049726258487026555 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.017862091778507855, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.017862091778507855 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.037117251907407486, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.037117251907407486 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562804, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562804 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077636, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.032061837832361516, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.032061837832361516 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.02508830145469484, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.02508830145469484 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.02832774309156106, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.02832774309156106 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.594017094017094, + "acc_stderr": 0.03217180182641087, + "acc_norm": 0.594017094017094, + "acc_norm_stderr": 0.03217180182641087 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.026864624366756643, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.026864624366756643 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194048, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194048 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569653, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569653 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353982, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353982 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.028408302020332687, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.028408302020332687 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483184, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483184 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3545751633986928, + "acc_stderr": 0.019353360547553704, + "acc_norm": 0.3545751633986928, + "acc_norm_stderr": 0.019353360547553704 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2659217877094972, + "acc_stderr": 0.014776765066438885, + "acc_norm": 0.2659217877094972, + "acc_norm_stderr": 0.014776765066438885 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37222946544980445, + "acc_stderr": 0.012346241297204368, + "acc_norm": 0.37222946544980445, + "acc_norm_stderr": 0.012346241297204368 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.4598250500026081, + "mc2_stderr": 0.01526561900775728 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3730814639905549, + "acc_stderr": 0.016627318275137432, + "acc_norm": 0.40731995277449823, + "acc_norm_stderr": 0.01689245669519127 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "jyoung105/ko-platypus2-collective-13b_v1.1", + "model_sha": "ccb5e5262ad155e445b27ca11c6bb946f56fc4d1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kaist-ai/prometheus-13b-v1.0/result_2023-11-20 14:31:31.json b/kaist-ai/prometheus-13b-v1.0/result_2023-11-20 14:31:31.json new file mode 100644 index 0000000000000000000000000000000000000000..210f4bbf0f582cfd74015c011b21440a2e3c9f34 --- /dev/null +++ b/kaist-ai/prometheus-13b-v1.0/result_2023-11-20 14:31:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28668941979522183, + "acc_stderr": 0.013214986329274757, + "acc_norm": 0.32593856655290104, + "acc_norm_stderr": 0.013697432466693242 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3300139414459271, + "acc_stderr": 0.00469256765596177, + "acc_norm": 0.40021907986456884, + "acc_norm_stderr": 0.00488941312620877 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.03743979825926401, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.03743979825926401 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.42718446601941745, + "acc_stderr": 0.04897957737781169, + "acc_norm": 0.42718446601941745, + "acc_norm_stderr": 0.04897957737781169 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41507024265644954, + "acc_stderr": 0.017620137003655268, + "acc_norm": 0.41507024265644954, + "acc_norm_stderr": 0.017620137003655268 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610334, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610334 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.03610805018031022, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.03610805018031022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3954983922829582, + "acc_stderr": 0.027770918531427838, + "acc_norm": 0.3954983922829582, + "acc_norm_stderr": 0.027770918531427838 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.03526552724601198, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.03526552724601198 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04750077341199986, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04750077341199986 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165897, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165897 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5769230769230769, + "acc_stderr": 0.032366121762202014, + "acc_norm": 0.5769230769230769, + "acc_norm_stderr": 0.032366121762202014 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585475, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585475 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.0233306540545359, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.0233306540545359 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.026483392042098177, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.026483392042098177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02622964917882116, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02622964917882116 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3412844036697248, + "acc_stderr": 0.020328612816592435, + "acc_norm": 0.3412844036697248, + "acc_norm_stderr": 0.020328612816592435 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.027732834353363944, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.027732834353363944 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.018311653053648222, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.018311653053648222 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169938, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169938 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.0449394906861354, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.0449394906861354 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.26838235294117646, + "acc_stderr": 0.02691748122437725, + "acc_norm": 0.26838235294117646, + "acc_norm_stderr": 0.02691748122437725 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42448979591836733, + "acc_stderr": 0.03164209487942941, + "acc_norm": 0.42448979591836733, + "acc_norm_stderr": 0.03164209487942941 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.37130801687763715, + "acc_stderr": 0.031450686007448596, + "acc_norm": 0.37130801687763715, + "acc_norm_stderr": 0.031450686007448596 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.011855911587048231, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.011855911587048231 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.03713158067481913, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.03713158067481913 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.016002651487361005, + "mc2": 0.4511958734873742, + "mc2_stderr": 0.016021768655782184 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3612750885478158, + "acc_stderr": 0.01651546302241201, + "acc_norm": 0.39433293978748524, + "acc_norm_stderr": 0.016802090674893206 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kaist-ai/prometheus-13b-v1.0", + "model_sha": "9088377314f91af4b48940e09a0c76d0878f5020", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kaist-ai/selfee-13b-delta/result_2023-11-20 14:29:53.json b/kaist-ai/selfee-13b-delta/result_2023-11-20 14:29:53.json new file mode 100644 index 0000000000000000000000000000000000000000..a2c5544cdd30dca2dddbc6037214f2ea38e05c67 --- /dev/null +++ b/kaist-ai/selfee-13b-delta/result_2023-11-20 14:29:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2098976109215017, + "acc_stderr": 0.011900548748047452, + "acc_norm": 0.2568259385665529, + "acc_norm_stderr": 0.012766923794116796 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2517426807408883, + "acc_stderr": 0.004331271717773851, + "acc_norm": 0.24307906791475803, + "acc_norm_stderr": 0.004280658234718773 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28735632183908044, + "acc_stderr": 0.0161824107306827, + "acc_norm": 0.28735632183908044, + "acc_norm_stderr": 0.0161824107306827 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426115, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426115 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924811, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924811 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.021020672680827912, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.021020672680827912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.02860595370200424, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.02860595370200424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749895, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749895 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.029252823291803627, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.029252823291803627 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.02405102973991225, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.02405102973991225 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.031546980450822305, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.031546980450822305 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.017667841612378984, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.017667841612378984 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.0358870281282637, + "acc_norm": 0.15, + "acc_norm_stderr": 0.0358870281282637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676653, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22766217870257038, + "mc1_stderr": 0.014679255032111066, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.07674144037780402, + "acc_stderr": 0.009151482698827047, + "acc_norm": 0.1959858323494687, + "acc_norm_stderr": 0.013647685567768866 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kaist-ai/selfee-13b-delta", + "model_sha": "d3d65ca5e956a520c65bbdf9cf060f8d88b3a687", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kakaobrain/kogpt/result_2023-11-07 09:38:44.json b/kakaobrain/kogpt/result_2023-11-07 09:38:44.json new file mode 100644 index 0000000000000000000000000000000000000000..d3f041c4b14dd005cbc78e2a1e925b707ea3eccd --- /dev/null +++ b/kakaobrain/kogpt/result_2023-11-07 09:38:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2568259385665529, + "acc_stderr": 0.0127669237941168, + "acc_norm": 0.3225255972696246, + "acc_norm_stderr": 0.013659980894277366 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3487353116908982, + "acc_stderr": 0.004755960559929155, + "acc_norm": 0.4329814777932683, + "acc_norm_stderr": 0.004944755230598386 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822585, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822585 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26053639846743293, + "acc_stderr": 0.01569600856380709, + "acc_norm": 0.26053639846743293, + "acc_norm_stderr": 0.01569600856380709 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.028659179374292323, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.028659179374292323 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553029, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553029 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24758842443729903, + "acc_stderr": 0.024513879973621967, + "acc_norm": 0.24758842443729903, + "acc_norm_stderr": 0.024513879973621967 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.28699551569506726, + "acc_stderr": 0.030360379710291954, + "acc_norm": 0.28699551569506726, + "acc_norm_stderr": 0.030360379710291954 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.03258630383836555, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.03258630383836555 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087764, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087764 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.023901157979402527, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.023901157979402527 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21674876847290642, + "acc_stderr": 0.028990331252516235, + "acc_norm": 0.21674876847290642, + "acc_norm_stderr": 0.028990331252516235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2903225806451613, + "acc_stderr": 0.025822106119415898, + "acc_norm": 0.2903225806451613, + "acc_norm_stderr": 0.025822106119415898 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02934311479809446, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02934311479809446 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23018867924528302, + "acc_stderr": 0.02590789712240817, + "acc_norm": 0.23018867924528302, + "acc_norm_stderr": 0.02590789712240817 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724138, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724138 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184408, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184408 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696545, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696545 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213322, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213322 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21693121693121692, + "acc_stderr": 0.021227082449445045, + "acc_norm": 0.21693121693121692, + "acc_norm_stderr": 0.021227082449445045 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.22254335260115607, + "acc_stderr": 0.02239421566194282, + "acc_norm": 0.22254335260115607, + "acc_norm_stderr": 0.02239421566194282 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.03559039531617342, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.03559039531617342 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.024659685185967284, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.024659685185967284 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32642487046632124, + "acc_stderr": 0.033840286211432945, + "acc_norm": 0.32642487046632124, + "acc_norm_stderr": 0.033840286211432945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25871559633027524, + "acc_stderr": 0.01877605231961962, + "acc_norm": 0.25871559633027524, + "acc_norm_stderr": 0.01877605231961962 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.02573885479781874, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.02573885479781874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516303, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516303 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.017282760695167404, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.017282760695167404 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290403, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290403 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.0356236785009539, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.0356236785009539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364552, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364552 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.0290294228156814, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.0290294228156814 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.011005971399927227, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.011005971399927227 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501943, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501943 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087298, + "mc2": 0.42031551863421324, + "mc2_stderr": 0.01497080980038926 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3246753246753247, + "acc_stderr": 0.016098883939346453, + "acc_norm": 0.39315230224321135, + "acc_norm_stderr": 0.01679326280128708 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kakaobrain/kogpt", + "model_sha": "9abbe61d9e8d51adc4986e7725a1851e2264d4ff", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kakaomacao/Llama-3-Open-Ko-8B-Instruct-seongi-KU/result_2024-07-11 07:45:54.json b/kakaomacao/Llama-3-Open-Ko-8B-Instruct-seongi-KU/result_2024-07-11 07:45:54.json new file mode 100644 index 0000000000000000000000000000000000000000..0a111491058f4340b0ba16503a425032be3d8fb4 --- /dev/null +++ b/kakaomacao/Llama-3-Open-Ko-8B-Instruct-seongi-KU/result_2024-07-11 07:45:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32081911262798635, + "acc_stderr": 0.013640943091946531, + "acc_norm": 0.35494880546075086, + "acc_norm_stderr": 0.013983036904094097 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33578968333001397, + "acc_stderr": 0.004713006072807708, + "acc_norm": 0.4208325034853615, + "acc_norm_stderr": 0.004926837572202165 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.03786720706234215, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.03786720706234215 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.33980582524271846, + "acc_stderr": 0.04689765937278135, + "acc_norm": 0.33980582524271846, + "acc_norm_stderr": 0.04689765937278135 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36270753512132825, + "acc_stderr": 0.017192708674602306, + "acc_norm": 0.36270753512132825, + "acc_norm_stderr": 0.017192708674602306 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.042667634040995814, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.042667634040995814 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596239, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596239 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.036965843170106004, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.036965843170106004 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3858520900321543, + "acc_stderr": 0.027648149599751468, + "acc_norm": 0.3858520900321543, + "acc_norm_stderr": 0.027648149599751468 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.03219079200419996, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.03219079200419996 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.37404580152671757, + "acc_stderr": 0.04243869242230524, + "acc_norm": 0.37404580152671757, + "acc_norm_stderr": 0.04243869242230524 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.031544498882702866, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.031544498882702866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.30512820512820515, + "acc_stderr": 0.023346335293325887, + "acc_norm": 0.30512820512820515, + "acc_norm_stderr": 0.023346335293325887 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114482, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114482 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36774193548387096, + "acc_stderr": 0.027430866579973467, + "acc_norm": 0.36774193548387096, + "acc_norm_stderr": 0.027430866579973467 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.03271298896811159, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.03271298896811159 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443867, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443867 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505417, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505417 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712163, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712163 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119994, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119994 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.39303482587064675, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.39303482587064675, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.33815028901734107, + "acc_stderr": 0.02546977014940017, + "acc_norm": 0.33815028901734107, + "acc_norm_stderr": 0.02546977014940017 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924055, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924055 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.36419753086419754, + "acc_stderr": 0.026774929899722317, + "acc_norm": 0.36419753086419754, + "acc_norm_stderr": 0.026774929899722317 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.033088185944157494, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.033088185944157494 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30458715596330277, + "acc_stderr": 0.019732299420354038, + "acc_norm": 0.30458715596330277, + "acc_norm_stderr": 0.019732299420354038 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102147, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102147 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.026568921015457155, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.026568921015457155 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4628099173553719, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.4628099173553719, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403165, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403165 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.018433427649401903, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.018433427649401903 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.02513045365226846, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.02513045365226846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2324022346368715, + "acc_stderr": 0.014125968754673385, + "acc_norm": 0.2324022346368715, + "acc_norm_stderr": 0.014125968754673385 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23161764705882354, + "acc_stderr": 0.025626533803777565, + "acc_norm": 0.23161764705882354, + "acc_norm_stderr": 0.025626533803777565 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2163265306122449, + "acc_stderr": 0.02635891633490405, + "acc_norm": 0.2163265306122449, + "acc_norm_stderr": 0.02635891633490405 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3924050632911392, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.3924050632911392, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27249022164276404, + "acc_stderr": 0.011371658294311526, + "acc_norm": 0.27249022164276404, + "acc_norm_stderr": 0.011371658294311526 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.03149328104507955, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.03149328104507955 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268048, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268048 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.4676698023765017, + "mc2_stderr": 0.015518069774655164 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3022432113341204, + "acc_stderr": 0.01578865486302238, + "acc_norm": 0.38134592680047225, + "acc_norm_stderr": 0.016699301768828077 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kakaomacao/Llama-3-Open-Ko-8B-Instruct-seongi-KU", + "model_sha": "bf3fdd4c608a86a1a37621f55295c4235c5151ea", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kakaomacao/Llama-3-Open-Ko-8B-Instruct-seongi-ver0710/result_2024-07-10 09:31:56.json b/kakaomacao/Llama-3-Open-Ko-8B-Instruct-seongi-ver0710/result_2024-07-10 09:31:56.json new file mode 100644 index 0000000000000000000000000000000000000000..e5bab6db0ea4a1fd46e907d5e528ab7230f52335 --- /dev/null +++ b/kakaomacao/Llama-3-Open-Ko-8B-Instruct-seongi-ver0710/result_2024-07-10 09:31:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.013855831287497723, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668526 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3483369846644095, + "acc_stderr": 0.0047546970133549625, + "acc_norm": 0.44373630750846443, + "acc_norm_stderr": 0.004958089432669991 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.03805797505590459, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.03805797505590459 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.017612204084663772, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.017612204084663772 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.037400593820293204, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.037400593820293204 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.37942122186495175, + "acc_stderr": 0.027559949802347824, + "acc_norm": 0.37942122186495175, + "acc_norm_stderr": 0.027559949802347824 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.033141902221106585, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.033141902221106585 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3282828282828283, + "acc_stderr": 0.033456784227567773, + "acc_norm": 0.3282828282828283, + "acc_norm_stderr": 0.033456784227567773 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179961, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179961 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3282051282051282, + "acc_stderr": 0.023807633198657262, + "acc_norm": 0.3282051282051282, + "acc_norm_stderr": 0.023807633198657262 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.032550867699701044, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.032550867699701044 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5042735042735043, + "acc_stderr": 0.03275489264382132, + "acc_norm": 0.5042735042735043, + "acc_norm_stderr": 0.03275489264382132 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.40298507462686567, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.40298507462686567, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594295, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.023000086859068642, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.023000086859068642 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3439306358381503, + "acc_stderr": 0.025574123786546672, + "acc_norm": 0.3439306358381503, + "acc_norm_stderr": 0.025574123786546672 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3549382716049383, + "acc_stderr": 0.02662415247884585, + "acc_norm": 0.3549382716049383, + "acc_norm_stderr": 0.02662415247884585 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29533678756476683, + "acc_stderr": 0.03292296639155142, + "acc_norm": 0.29533678756476683, + "acc_norm_stderr": 0.03292296639155142 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3467889908256881, + "acc_stderr": 0.020406097104093027, + "acc_norm": 0.3467889908256881, + "acc_norm_stderr": 0.020406097104093027 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924318, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924318 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.027530078447110307, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.027530078447110307 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04545454545454546, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04545454545454546 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.018824219512706214, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.018824219512706214 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320182, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320182 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19907407407407407, + "acc_stderr": 0.027232298462690232, + "acc_norm": 0.19907407407407407, + "acc_norm_stderr": 0.027232298462690232 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098416, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098416 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2426470588235294, + "acc_stderr": 0.026040662474201275, + "acc_norm": 0.2426470588235294, + "acc_norm_stderr": 0.026040662474201275 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2, + "acc_stderr": 0.02560737598657916, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02560737598657916 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4641350210970464, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.4641350210970464, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27444589308996087, + "acc_stderr": 0.011397043163078154, + "acc_norm": 0.27444589308996087, + "acc_norm_stderr": 0.011397043163078154 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3219094247246022, + "mc1_stderr": 0.016355567611960387, + "mc2": 0.4802366364672857, + "mc2_stderr": 0.01548928972794085 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36363636363636365, + "acc_stderr": 0.016538691603327712, + "acc_norm": 0.4427390791027155, + "acc_norm_stderr": 0.01707725413155622 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kakaomacao/Llama-3-Open-Ko-8B-Instruct-seongi-ver0710", + "model_sha": "e639acd726b40cfe29d71455b4c1580a4ae5e16b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kakaomacao/Llama-3-Open-Ko-8B-Instruct-seongi-ver0710/result_2024-07-10 09:50:03.json b/kakaomacao/Llama-3-Open-Ko-8B-Instruct-seongi-ver0710/result_2024-07-10 09:50:03.json new file mode 100644 index 0000000000000000000000000000000000000000..e5bab6db0ea4a1fd46e907d5e528ab7230f52335 --- /dev/null +++ b/kakaomacao/Llama-3-Open-Ko-8B-Instruct-seongi-ver0710/result_2024-07-10 09:50:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.013855831287497723, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668526 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3483369846644095, + "acc_stderr": 0.0047546970133549625, + "acc_norm": 0.44373630750846443, + "acc_norm_stderr": 0.004958089432669991 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.03805797505590459, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.03805797505590459 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.017612204084663772, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.017612204084663772 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.037400593820293204, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.037400593820293204 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.37942122186495175, + "acc_stderr": 0.027559949802347824, + "acc_norm": 0.37942122186495175, + "acc_norm_stderr": 0.027559949802347824 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.033141902221106585, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.033141902221106585 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3282828282828283, + "acc_stderr": 0.033456784227567773, + "acc_norm": 0.3282828282828283, + "acc_norm_stderr": 0.033456784227567773 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179961, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179961 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3282051282051282, + "acc_stderr": 0.023807633198657262, + "acc_norm": 0.3282051282051282, + "acc_norm_stderr": 0.023807633198657262 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.032550867699701044, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.032550867699701044 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5042735042735043, + "acc_stderr": 0.03275489264382132, + "acc_norm": 0.5042735042735043, + "acc_norm_stderr": 0.03275489264382132 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.40298507462686567, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.40298507462686567, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594295, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.023000086859068642, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.023000086859068642 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3439306358381503, + "acc_stderr": 0.025574123786546672, + "acc_norm": 0.3439306358381503, + "acc_norm_stderr": 0.025574123786546672 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3549382716049383, + "acc_stderr": 0.02662415247884585, + "acc_norm": 0.3549382716049383, + "acc_norm_stderr": 0.02662415247884585 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29533678756476683, + "acc_stderr": 0.03292296639155142, + "acc_norm": 0.29533678756476683, + "acc_norm_stderr": 0.03292296639155142 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3467889908256881, + "acc_stderr": 0.020406097104093027, + "acc_norm": 0.3467889908256881, + "acc_norm_stderr": 0.020406097104093027 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924318, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924318 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.027530078447110307, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.027530078447110307 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04545454545454546, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04545454545454546 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.018824219512706214, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.018824219512706214 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320182, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320182 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19907407407407407, + "acc_stderr": 0.027232298462690232, + "acc_norm": 0.19907407407407407, + "acc_norm_stderr": 0.027232298462690232 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098416, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098416 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2426470588235294, + "acc_stderr": 0.026040662474201275, + "acc_norm": 0.2426470588235294, + "acc_norm_stderr": 0.026040662474201275 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2, + "acc_stderr": 0.02560737598657916, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02560737598657916 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4641350210970464, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.4641350210970464, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27444589308996087, + "acc_stderr": 0.011397043163078154, + "acc_norm": 0.27444589308996087, + "acc_norm_stderr": 0.011397043163078154 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3219094247246022, + "mc1_stderr": 0.016355567611960387, + "mc2": 0.4802366364672857, + "mc2_stderr": 0.01548928972794085 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36363636363636365, + "acc_stderr": 0.016538691603327712, + "acc_norm": 0.4427390791027155, + "acc_norm_stderr": 0.01707725413155622 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kakaomacao/Llama-3-Open-Ko-8B-Instruct-seongi-ver0710", + "model_sha": "e639acd726b40cfe29d71455b4c1580a4ae5e16b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kekmodel/StopCarbon-10.7B-v5/result_2024-05-16 06:06:45.json b/kekmodel/StopCarbon-10.7B-v5/result_2024-05-16 06:06:45.json new file mode 100644 index 0000000000000000000000000000000000000000..65eb214a34a6ec493628ff94c9a47d79bdd1ec18 --- /dev/null +++ b/kekmodel/StopCarbon-10.7B-v5/result_2024-05-16 06:06:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910467, + "acc_norm": 0.4761092150170648, + "acc_norm_stderr": 0.014594701798071654 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4018123879705238, + "acc_stderr": 0.00489262449093721, + "acc_norm": 0.5388368850826528, + "acc_norm_stderr": 0.004974706428434288 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5887611749680716, + "acc_stderr": 0.017595971908056573, + "acc_norm": 0.5887611749680716, + "acc_norm_stderr": 0.017595971908056573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5755627009646302, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.5755627009646302, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6767676767676768, + "acc_stderr": 0.033322999210706444, + "acc_norm": 0.6767676767676768, + "acc_norm_stderr": 0.033322999210706444 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5205128205128206, + "acc_stderr": 0.02532966316348994, + "acc_norm": 0.5205128205128206, + "acc_norm_stderr": 0.02532966316348994 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356462, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356462 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417618, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417618 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205608, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6865671641791045, + "acc_stderr": 0.032801882053486414, + "acc_norm": 0.6865671641791045, + "acc_norm_stderr": 0.032801882053486414 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.025075981767601684, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.025075981767601684 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.026720034380514998, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.026720034380514998 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5802469135802469, + "acc_stderr": 0.027460099557005135, + "acc_norm": 0.5802469135802469, + "acc_norm_stderr": 0.027460099557005135 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6839378238341969, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.6839378238341969, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6256880733944954, + "acc_stderr": 0.02074895940898831, + "acc_norm": 0.6256880733944954, + "acc_norm_stderr": 0.02074895940898831 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.028614624752805434, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.028614624752805434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.0404633688397825, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.0404633688397825 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.020212274976302957, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.020212274976302957 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281288, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281288 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.033922384053216174, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.033922384053216174 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.32849162011173183, + "acc_stderr": 0.015707935398496457, + "acc_norm": 0.32849162011173183, + "acc_norm_stderr": 0.015707935398496457 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5, + "acc_stderr": 0.030372836961539352, + "acc_norm": 0.5, + "acc_norm_stderr": 0.030372836961539352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.030932858792789848, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.030932858792789848 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.0306858205966108, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.0306858205966108 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3924380704041721, + "acc_stderr": 0.0124712436692291, + "acc_norm": 0.3924380704041721, + "acc_norm_stderr": 0.0124712436692291 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.03804913653971009, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.03804913653971009 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3708690330477356, + "mc1_stderr": 0.016909693580248804, + "mc2": 0.5289150190434356, + "mc2_stderr": 0.0164378931326531 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.017182864434998557, + "acc_norm": 0.48642266824085006, + "acc_norm_stderr": 0.01718401506040145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kekmodel/StopCarbon-10.7B-v5", + "model_sha": "7d59819dce2439f6c83b4f5c21a68aa882ff5ac9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kekmodel/StopCarbon-ko-10.7B-v2/result_2024-01-01 07:43:57.json b/kekmodel/StopCarbon-ko-10.7B-v2/result_2024-01-01 07:43:57.json new file mode 100644 index 0000000000000000000000000000000000000000..efb7837821491df3e3bc2e602145e1e4a28ddbf0 --- /dev/null +++ b/kekmodel/StopCarbon-ko-10.7B-v2/result_2024-01-01 07:43:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42406143344709896, + "acc_stderr": 0.014441889627464396, + "acc_norm": 0.47952218430034127, + "acc_norm_stderr": 0.014599131353035016 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42929695279824737, + "acc_stderr": 0.0049396424601725756, + "acc_norm": 0.5973909579764987, + "acc_norm_stderr": 0.004894210011303203 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280042, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280042 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6526181353767561, + "acc_stderr": 0.01702667174865573, + "acc_norm": 0.6526181353767561, + "acc_norm_stderr": 0.01702667174865573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936337, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6205787781350482, + "acc_stderr": 0.027559949802347817, + "acc_norm": 0.6205787781350482, + "acc_norm_stderr": 0.027559949802347817 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5605381165919282, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.5605381165919282, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786753, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786753 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006718, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006718 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5692307692307692, + "acc_stderr": 0.02510682066053976, + "acc_norm": 0.5692307692307692, + "acc_norm_stderr": 0.02510682066053976 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6064516129032258, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.6064516129032258, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922765, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922765 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389184, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844274, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844274 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.031343283582089536, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.031343283582089536 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41798941798941797, + "acc_stderr": 0.02540255550326091, + "acc_norm": 0.41798941798941797, + "acc_norm_stderr": 0.02540255550326091 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.04177578950739993, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.04177578950739993 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5751445086705202, + "acc_stderr": 0.02661335084026174, + "acc_norm": 0.5751445086705202, + "acc_norm_stderr": 0.02661335084026174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6049382716049383, + "acc_stderr": 0.027201117666925657, + "acc_norm": 0.6049382716049383, + "acc_norm_stderr": 0.027201117666925657 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7512953367875648, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.7512953367875648, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.04685473041907789, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.04685473041907789 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6642201834862386, + "acc_stderr": 0.02024808139675293, + "acc_norm": 0.6642201834862386, + "acc_norm_stderr": 0.02024808139675293 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.028452639985088006, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.028452639985088006 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.040633027314866725, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.040633027314866725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5179738562091504, + "acc_stderr": 0.020214761037872397, + "acc_norm": 0.5179738562091504, + "acc_norm_stderr": 0.020214761037872397 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.0294621892333706, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.0294621892333706 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5601851851851852, + "acc_stderr": 0.03385177976044812, + "acc_norm": 0.5601851851851852, + "acc_norm_stderr": 0.03385177976044812 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095268, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095268 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.03030625772246831, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.03030625772246831 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149675, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149675 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41003911342894395, + "acc_stderr": 0.012561837621962026, + "acc_norm": 0.41003911342894395, + "acc_norm_stderr": 0.012561837621962026 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.03296245110172229, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.03296245110172229 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3684210526315789, + "mc1_stderr": 0.016886551261046042, + "mc2": 0.557073075337194, + "mc2_stderr": 0.015709423858506075 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6092089728453365, + "acc_stderr": 0.016775298465108265, + "acc_norm": 0.6328217237308147, + "acc_norm_stderr": 0.016572727807458613 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kekmodel/StopCarbon-ko-10.7B-v2", + "model_sha": "958e326e8dcc11ce99e96a30b39e457609cdaac9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kekmodel/StopCarbon-ko-10.7B-v3/result_2024-01-01 08:32:03.json b/kekmodel/StopCarbon-ko-10.7B-v3/result_2024-01-01 08:32:03.json new file mode 100644 index 0000000000000000000000000000000000000000..e8b387bfda2125d82f96db43a1746623c53ec284 --- /dev/null +++ b/kekmodel/StopCarbon-ko-10.7B-v3/result_2024-01-01 08:32:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42406143344709896, + "acc_stderr": 0.014441889627464396, + "acc_norm": 0.47952218430034127, + "acc_norm_stderr": 0.014599131353035016 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42929695279824737, + "acc_stderr": 0.0049396424601725756, + "acc_norm": 0.5973909579764987, + "acc_norm_stderr": 0.004894210011303203 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280042, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280042 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6526181353767561, + "acc_stderr": 0.01702667174865573, + "acc_norm": 0.6526181353767561, + "acc_norm_stderr": 0.01702667174865573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936337, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6205787781350482, + "acc_stderr": 0.027559949802347817, + "acc_norm": 0.6205787781350482, + "acc_norm_stderr": 0.027559949802347817 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5605381165919282, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.5605381165919282, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786753, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786753 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006718, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006718 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5692307692307692, + "acc_stderr": 0.02510682066053976, + "acc_norm": 0.5692307692307692, + "acc_norm_stderr": 0.02510682066053976 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6064516129032258, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.6064516129032258, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922765, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922765 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389184, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844274, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844274 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.031343283582089536, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.031343283582089536 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41798941798941797, + "acc_stderr": 0.02540255550326091, + "acc_norm": 0.41798941798941797, + "acc_norm_stderr": 0.02540255550326091 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.04177578950739993, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.04177578950739993 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5751445086705202, + "acc_stderr": 0.02661335084026174, + "acc_norm": 0.5751445086705202, + "acc_norm_stderr": 0.02661335084026174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6049382716049383, + "acc_stderr": 0.027201117666925657, + "acc_norm": 0.6049382716049383, + "acc_norm_stderr": 0.027201117666925657 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7512953367875648, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.7512953367875648, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.04685473041907789, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.04685473041907789 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6642201834862386, + "acc_stderr": 0.02024808139675293, + "acc_norm": 0.6642201834862386, + "acc_norm_stderr": 0.02024808139675293 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.028452639985088006, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.028452639985088006 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.040633027314866725, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.040633027314866725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5179738562091504, + "acc_stderr": 0.020214761037872397, + "acc_norm": 0.5179738562091504, + "acc_norm_stderr": 0.020214761037872397 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.0294621892333706, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.0294621892333706 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5601851851851852, + "acc_stderr": 0.03385177976044812, + "acc_norm": 0.5601851851851852, + "acc_norm_stderr": 0.03385177976044812 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095268, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095268 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.03030625772246831, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.03030625772246831 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149675, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149675 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41003911342894395, + "acc_stderr": 0.012561837621962026, + "acc_norm": 0.41003911342894395, + "acc_norm_stderr": 0.012561837621962026 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.03296245110172229, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.03296245110172229 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3684210526315789, + "mc1_stderr": 0.016886551261046042, + "mc2": 0.557073075337194, + "mc2_stderr": 0.015709423858506075 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6092089728453365, + "acc_stderr": 0.016775298465108265, + "acc_norm": 0.6328217237308147, + "acc_norm_stderr": 0.016572727807458613 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kekmodel/StopCarbon-ko-10.7B-v3", + "model_sha": "6935d1679c1d14c6c89693ecd8a84296473a9d9a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kevin009/llamaRAGdrama/result_2024-05-30 16:43:13.json b/kevin009/llamaRAGdrama/result_2024-05-30 16:43:13.json new file mode 100644 index 0000000000000000000000000000000000000000..6e4a832e81ef4c7d1dca7fd2ad82d3c44be87e46 --- /dev/null +++ b/kevin009/llamaRAGdrama/result_2024-05-30 16:43:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39590443686006827, + "acc_stderr": 0.014291228393536585, + "acc_norm": 0.4564846416382253, + "acc_norm_stderr": 0.014555949760496437 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39713204540928104, + "acc_stderr": 0.004883037758919961, + "acc_norm": 0.5210117506472814, + "acc_norm_stderr": 0.004985373550775105 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45849297573435505, + "acc_stderr": 0.017818248603465554, + "acc_norm": 0.45849297573435505, + "acc_norm_stderr": 0.017818248603465554 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.032232762667117124, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.032232762667117124 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.02817391776176287, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.02817391776176287 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764187, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764187 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.028071588901091852, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.028071588901091852 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731837, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731837 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524572, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524572 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.03525675167467974, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.03525675167467974 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518026, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518026 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067877, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067877 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05000000000000001, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05000000000000001 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.036072280610477486, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.036072280610477486 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5192660550458715, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.5192660550458715, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225882, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225882 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125146, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.046840993210771065, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.046840993210771065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.014635185616527824, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.014635185616527824 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252611, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252611 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.02833295951403122, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.02833295951403122 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3546284224250326, + "acc_stderr": 0.012218576439090162, + "acc_norm": 0.3546284224250326, + "acc_norm_stderr": 0.012218576439090162 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.38555691554467564, + "mc1_stderr": 0.017038839010591684, + "mc2": 0.5540829043994184, + "mc2_stderr": 0.01636624159389132 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4167650531286895, + "acc_stderr": 0.016950489146108822, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.01697710193260152 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kevin009/llamaRAGdrama", + "model_sha": "8c103ca8fa6dd9a8d3dab81b319408095e9a1ad8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kfkas/Llama-2-ko-7b-Chat/result_2023-09-27 05:00:55.json b/kfkas/Llama-2-ko-7b-Chat/result_2023-09-27 05:00:55.json new file mode 100644 index 0000000000000000000000000000000000000000..517a58da1c8600f851d65243c7fcecd91d6206c6 --- /dev/null +++ b/kfkas/Llama-2-ko-7b-Chat/result_2023-09-27 05:00:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32593856655290104, + "acc_stderr": 0.013697432466693242, + "acc_norm": 0.3839590443686007, + "acc_norm_stderr": 0.01421244498065189 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3875721967735511, + "acc_stderr": 0.004862003566798545, + "acc_norm": 0.504779924317865, + "acc_norm_stderr": 0.004989553396413091 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.033773102522091945, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.033773102522091945 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3499361430395913, + "acc_stderr": 0.017055679797150423, + "acc_norm": 0.3499361430395913, + "acc_norm_stderr": 0.017055679797150423 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.0402477840197711, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.0402477840197711 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.03115852213135778, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.03115852213135778 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.02592237178881879, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.02592237178881879 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.032443052830087304, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.032443052830087304 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3787878787878788, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.3787878787878788, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03724563619774632, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03724563619774632 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.028657491285071966, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.028657491285071966 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2743589743589744, + "acc_stderr": 0.022622765767493197, + "acc_norm": 0.2743589743589744, + "acc_norm_stderr": 0.022622765767493197 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.02652270967466777, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.02652270967466777 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.030882736974138653, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.030882736974138653 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32075471698113206, + "acc_stderr": 0.028727502957880267, + "acc_norm": 0.32075471698113206, + "acc_norm_stderr": 0.028727502957880267 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.02549753263960955, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.02549753263960955 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.02241804289111394, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.02241804289111394 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.03309615177059006, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.03309615177059006 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2976878612716763, + "acc_stderr": 0.024617055388677003, + "acc_norm": 0.2976878612716763, + "acc_norm_stderr": 0.024617055388677003 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868052, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868052 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23316062176165803, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.23316062176165803, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3688073394495413, + "acc_stderr": 0.02068622756072955, + "acc_norm": 0.3688073394495413, + "acc_norm_stderr": 0.02068622756072955 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604672, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604672 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.027121956071388852, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.027121956071388852 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.038035102483515854, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.038035102483515854 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.017630827375148383, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.017630827375148383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257017, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257017 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.02866685779027465, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.02866685779027465 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3291139240506329, + "acc_stderr": 0.030587326294702368, + "acc_norm": 0.3291139240506329, + "acc_norm_stderr": 0.030587326294702368 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26988265971316816, + "acc_stderr": 0.011337381084250411, + "acc_norm": 0.26988265971316816, + "acc_norm_stderr": 0.011337381084250411 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.034277431758165236, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.034277431758165236 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731613, + "mc2": 0.3670922997204656, + "mc2_stderr": 0.014677148528936845 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2361275088547816, + "acc_stderr": 0.014601536093324388, + "acc_norm": 0.3116883116883117, + "acc_norm_stderr": 0.01592456760735833 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kfkas/Llama-2-ko-7b-Chat", + "model_sha": "6d94c8e5b34fb09e80601548761a8dbd54bc0bba", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kfkas/llama-2-koen-13b-SFT-LoRA-4bit-re/result_2023-12-03 08:29:51.json b/kfkas/llama-2-koen-13b-SFT-LoRA-4bit-re/result_2023-12-03 08:29:51.json new file mode 100644 index 0000000000000000000000000000000000000000..31c0cb89c64b9ea679cc2ec41663a1c898084b9c --- /dev/null +++ b/kfkas/llama-2-koen-13b-SFT-LoRA-4bit-re/result_2023-12-03 08:29:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3779863481228669, + "acc_stderr": 0.014169664520303096, + "acc_norm": 0.4300341296928328, + "acc_norm_stderr": 0.014467631559137998 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41884086835291773, + "acc_stderr": 0.004923609207861537, + "acc_norm": 0.5644293965345548, + "acc_norm_stderr": 0.00494818136702496 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5261813537675607, + "acc_stderr": 0.017855434554041996, + "acc_norm": 0.5261813537675607, + "acc_norm_stderr": 0.017855434554041996 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.028320325830105908, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.028320325830105908 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4494949494949495, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.4494949494949495, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36153846153846153, + "acc_stderr": 0.02435958146539698, + "acc_norm": 0.36153846153846153, + "acc_norm_stderr": 0.02435958146539698 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.0332085274234831, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.0332085274234831 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.027906150826041143, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.027906150826041143 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5598290598290598, + "acc_stderr": 0.032520741720630506, + "acc_norm": 0.5598290598290598, + "acc_norm_stderr": 0.032520741720630506 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.03005258057955784, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.03005258057955784 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.47761194029850745, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.47761194029850745, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.023266512213730575, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.023266512213730575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.026564178111422615, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.026564178111422615 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44036697247706424, + "acc_stderr": 0.02128431062376155, + "acc_norm": 0.44036697247706424, + "acc_norm_stderr": 0.02128431062376155 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924316, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.01952431674486634, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.01952431674486634 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101366, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101366 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.02896370257079103, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.02896370257079103 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682485, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682485 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2979591836734694, + "acc_stderr": 0.029279567411065674, + "acc_norm": 0.2979591836734694, + "acc_norm_stderr": 0.029279567411065674 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2953063885267275, + "acc_stderr": 0.011651061936208828, + "acc_norm": 0.2953063885267275, + "acc_norm_stderr": 0.011651061936208828 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904718, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904718 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.039036986477484416, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.039036986477484416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394816, + "mc2": 0.4378033526867641, + "mc2_stderr": 0.014971475297462514 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4592680047225502, + "acc_stderr": 0.017133218276537673, + "acc_norm": 0.5336481700118064, + "acc_norm_stderr": 0.017151384117131862 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kfkas/llama-2-koen-13b-SFT-LoRA-4bit-re", + "model_sha": "c7993d169e2a2b9b7615212d1684daa76c4cb7e7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kfkas/llama-2-koen-13b-SFT-LoRA-4bit/result_2023-11-30 12:50:03.json b/kfkas/llama-2-koen-13b-SFT-LoRA-4bit/result_2023-11-30 12:50:03.json new file mode 100644 index 0000000000000000000000000000000000000000..53075e4e16b4cd56fa22704ac72f3bfe856a850d --- /dev/null +++ b/kfkas/llama-2-koen-13b-SFT-LoRA-4bit/result_2023-11-30 12:50:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.01415063143511173, + "acc_norm": 0.4300341296928328, + "acc_norm_stderr": 0.014467631559137998 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4176458872734515, + "acc_stderr": 0.004921632645102382, + "acc_norm": 0.5644293965345548, + "acc_norm_stderr": 0.00494818136702496 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.038295098689947266, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.038295098689947266 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5325670498084292, + "acc_stderr": 0.017841995750520874, + "acc_norm": 0.5325670498084292, + "acc_norm_stderr": 0.017841995750520874 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.037932811853078105, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.037932811853078105 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.032219436365661956, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.032219436365661956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37948717948717947, + "acc_stderr": 0.024603626924097417, + "acc_norm": 0.37948717948717947, + "acc_norm_stderr": 0.024603626924097417 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3967741935483871, + "acc_stderr": 0.027831231605767955, + "acc_norm": 0.3967741935483871, + "acc_norm_stderr": 0.027831231605767955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5683760683760684, + "acc_stderr": 0.0324483553531149, + "acc_norm": 0.5683760683760684, + "acc_norm_stderr": 0.0324483553531149 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340496, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353928, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353928 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03835153954399419, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03835153954399419 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.02811092849280907, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.02811092849280907 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.019576953122088844, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.019576953122088844 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005344, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005344 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670736, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670736 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.011787910251664587, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.011787910251664587 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826824, + "mc2": 0.41284235007226594, + "mc2_stderr": 0.014696062981475876 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44037780401416765, + "acc_stderr": 0.017067699774312984, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kfkas/llama-2-koen-13b-SFT-LoRA-4bit", + "model_sha": "e87f9090850cc807f722327cf888243761183090", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kfkas/my_test_LLM/result_2023-11-29 09:44:36.json b/kfkas/my_test_LLM/result_2023-11-29 09:44:36.json new file mode 100644 index 0000000000000000000000000000000000000000..7330634c056e9605a1ed12169d502b3b5cea8abe --- /dev/null +++ b/kfkas/my_test_LLM/result_2023-11-29 09:44:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.01421244498065189, + "acc_norm": 0.44368600682593856, + "acc_norm_stderr": 0.01451842182567044 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4206333399721171, + "acc_stderr": 0.004926518439372262, + "acc_norm": 0.5655247958573989, + "acc_norm_stderr": 0.004946748608271349 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5376756066411239, + "acc_stderr": 0.01782913176428718, + "acc_norm": 0.5376756066411239, + "acc_norm_stderr": 0.01782913176428718 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.042667634040995814, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.042667634040995814 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.03240038086792747, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.03240038086792747 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796375, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796375 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.028386198084177673, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.028386198084177673 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.032219436365661956, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.032219436365661956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.02478431694215636, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.02478431694215636 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844065, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119994, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119994 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.026720034380514995, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.026720034380514995 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607704, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607704 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489359, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489359 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5284403669724771, + "acc_stderr": 0.02140261569734804, + "acc_norm": 0.5284403669724771, + "acc_norm_stderr": 0.02140261569734804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510467998, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510467998 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.01962744474841224, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.01962744474841224 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755805, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755805 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.032259413526312945, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.032259413526312945 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330371, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330371 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.45020017328432316, + "mc2_stderr": 0.014998933368831563 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4781582054309327, + "acc_stderr": 0.017173944474294375, + "acc_norm": 0.5808736717827627, + "acc_norm_stderr": 0.016963995010862796 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kfkas/my_test_LLM", + "model_sha": "5457da887248f0209222ef3507e367f0b01cec9a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kfkas/yi-ko-SFT-LoRA/result_2023-12-04 02:30:47.json b/kfkas/yi-ko-SFT-LoRA/result_2023-12-04 02:30:47.json new file mode 100644 index 0000000000000000000000000000000000000000..b02311c765090529c5f215f5c4e18b8639591450 --- /dev/null +++ b/kfkas/yi-ko-SFT-LoRA/result_2023-12-04 02:30:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3464163822525597, + "acc_stderr": 0.013905011180063246, + "acc_norm": 0.4104095563139932, + "acc_norm_stderr": 0.014374922192642664 + }, + "harness|ko_hellaswag|10": { + "acc": 0.398725353515236, + "acc_stderr": 0.004886353563571854, + "acc_norm": 0.5309699263095001, + "acc_norm_stderr": 0.004980200451851671 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5670498084291188, + "acc_stderr": 0.017718469101513982, + "acc_norm": 0.5670498084291188, + "acc_norm_stderr": 0.017718469101513982 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016337, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016337 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764194, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761008, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761008 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768817, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768817 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851302, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851302 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.41721854304635764, + "acc_stderr": 0.04026141497634611, + "acc_norm": 0.41721854304635764, + "acc_norm_stderr": 0.04026141497634611 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.0350349092367328, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.0350349092367328 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.041124909746707884, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.041124909746707884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008746, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008746 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.03555300319557669, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.03555300319557669 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5981651376146789, + "acc_stderr": 0.02102010617299701, + "acc_norm": 0.5981651376146789, + "acc_norm_stderr": 0.02102010617299701 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626564, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626564 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874141, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874141 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.019835176484375383, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.019835176484375383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861131, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861131 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353604, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353604 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468636, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468636 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.02972215209928006, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.02972215209928006 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3324641460234681, + "acc_stderr": 0.01203202233226052, + "acc_norm": 0.3324641460234681, + "acc_norm_stderr": 0.01203202233226052 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087305, + "mc2": 0.40810248555867834, + "mc2_stderr": 0.014955020241849247 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5088547815820543, + "acc_stderr": 0.01718765819933674, + "acc_norm": 0.6009445100354192, + "acc_norm_stderr": 0.016836377292849307 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kfkas/yi-ko-SFT-LoRA", + "model_sha": "8d8dffd8fd95b885d46d337d4d2a46319b76dde7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/khKim/mytest/result_2024-05-27 02:46:07.json b/khKim/mytest/result_2024-05-27 02:46:07.json new file mode 100644 index 0000000000000000000000000000000000000000..07d891f1cc27f7bbea65d3110f473595518807fc --- /dev/null +++ b/khKim/mytest/result_2024-05-27 02:46:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40273037542662116, + "acc_stderr": 0.014332236306790149, + "acc_norm": 0.447098976109215, + "acc_norm_stderr": 0.014529380160526843 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38568014339772955, + "acc_stderr": 0.004857607641160631, + "acc_norm": 0.5084644493128859, + "acc_norm_stderr": 0.004989066355449556 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041696, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4840357598978289, + "acc_stderr": 0.01787084750608172, + "acc_norm": 0.4840357598978289, + "acc_norm_stderr": 0.01787084750608172 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.028150232244535594, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.028150232244535594 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999998, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999998 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.47474747474747475, + "acc_stderr": 0.035578062450873145, + "acc_norm": 0.47474747474747475, + "acc_norm_stderr": 0.035578062450873145 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182087, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182087 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042335, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042335 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836918, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836918 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.03067609659938918, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.03067609659938918 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683512, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683512 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.036812296333943194, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.036812296333943194 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425086, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425086 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.027402042040269955, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.027402042040269955 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791438, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791438 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.01988622103750187, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.01988622103750187 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257013, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.014676252009319464, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.014676252009319464 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.02747227447323382, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.02747227447323382 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.03241920684693334, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.03241920684693334 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29465449804432853, + "acc_stderr": 0.011643576764069552, + "acc_norm": 0.29465449804432853, + "acc_norm_stderr": 0.011643576764069552 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.03354092437591519, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.03354092437591519 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3575757575757576, + "acc_stderr": 0.037425970438065864, + "acc_norm": 0.3575757575757576, + "acc_norm_stderr": 0.037425970438065864 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.38310893512851896, + "mc1_stderr": 0.01701846167938986, + "mc2": 0.5406794388419982, + "mc2_stderr": 0.01529094529182362 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3955135773317591, + "acc_stderr": 0.016810815902206046, + "acc_norm": 0.4332939787485242, + "acc_norm_stderr": 0.0170366836418931 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "khKim/mytest", + "model_sha": "a89c6837fd08ee9ceca04467a4ea04337177f63e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kihoonlee/STOCK_SOLAR-10.7B/result_2024-06-14 02:07:26.json b/kihoonlee/STOCK_SOLAR-10.7B/result_2024-06-14 02:07:26.json new file mode 100644 index 0000000000000000000000000000000000000000..0bc52c1bdb9d37a6c6c82ad963094a0abc9dc96c --- /dev/null +++ b/kihoonlee/STOCK_SOLAR-10.7B/result_2024-06-14 02:07:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7568259385665529, + "acc_stderr": 0.012536554144587089, + "acc_norm": 0.7901023890784983, + "acc_norm_stderr": 0.011900548748047449 + }, + "harness|ko_hellaswag|10": { + "acc": 0.7310296753634734, + "acc_stderr": 0.004425182676353251, + "acc_norm": 0.8147779326827326, + "acc_norm_stderr": 0.0038768367094611897 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7017543859649122, + "acc_stderr": 0.03508771929824564, + "acc_norm": 0.7017543859649122, + "acc_norm_stderr": 0.03508771929824564 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7669902912621359, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.7669902912621359, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6934865900383141, + "acc_stderr": 0.01648695289304151, + "acc_norm": 0.6934865900383141, + "acc_norm_stderr": 0.01648695289304151 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.03266204299064677, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.03266204299064677 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.639871382636656, + "acc_stderr": 0.02726429759980401, + "acc_norm": 0.639871382636656, + "acc_norm_stderr": 0.02726429759980401 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6681614349775785, + "acc_stderr": 0.031602951437766785, + "acc_norm": 0.6681614349775785, + "acc_norm_stderr": 0.031602951437766785 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262973, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262973 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.031156269519646847, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.031156269519646847 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.03149930577784906, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.03149930577784906 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6205128205128205, + "acc_stderr": 0.024603626924097417, + "acc_norm": 0.6205128205128205, + "acc_norm_stderr": 0.024603626924097417 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6944444444444444, + "acc_stderr": 0.04453197507374984, + "acc_norm": 0.6944444444444444, + "acc_norm_stderr": 0.04453197507374984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649038, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649038 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6387096774193548, + "acc_stderr": 0.02732754844795755, + "acc_norm": 0.6387096774193548, + "acc_norm_stderr": 0.02732754844795755 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.025372139671722933, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.025372139671722933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5622641509433962, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.5622641509433962, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857406, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857406 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4417989417989418, + "acc_stderr": 0.025576257061253833, + "acc_norm": 0.4417989417989418, + "acc_norm_stderr": 0.025576257061253833 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5902777777777778, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.5902777777777778, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6069364161849711, + "acc_stderr": 0.026296227915613663, + "acc_norm": 0.6069364161849711, + "acc_norm_stderr": 0.026296227915613663 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334384, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334384 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6728395061728395, + "acc_stderr": 0.026105673861409825, + "acc_norm": 0.6728395061728395, + "acc_norm_stderr": 0.026105673861409825 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.03074890536390989, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.03074890536390989 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7174311926605504, + "acc_stderr": 0.01930424349770715, + "acc_norm": 0.7174311926605504, + "acc_norm_stderr": 0.01930424349770715 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6241830065359477, + "acc_stderr": 0.02773283435336394, + "acc_norm": 0.6241830065359477, + "acc_norm_stderr": 0.02773283435336394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6447368421052632, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.6447368421052632, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5816993464052288, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.5816993464052288, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4432624113475177, + "acc_stderr": 0.029634838473766006, + "acc_norm": 0.4432624113475177, + "acc_norm_stderr": 0.029634838473766006 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977747, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977747 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3564245810055866, + "acc_stderr": 0.016018239710513412, + "acc_norm": 0.3564245810055866, + "acc_norm_stderr": 0.016018239710513412 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5257352941176471, + "acc_stderr": 0.030332578094555033, + "acc_norm": 0.5257352941176471, + "acc_norm_stderr": 0.030332578094555033 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598025, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598025 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4491525423728814, + "acc_stderr": 0.012704030518851472, + "acc_norm": 0.4491525423728814, + "acc_norm_stderr": 0.012704030518851472 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6519607843137255, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.6519607843137255, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7674418604651163, + "mc1_stderr": 0.014789157531080501, + "mc2": 0.831609352622251, + "mc2_stderr": 0.012855554552905074 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5029515938606848, + "acc_stderr": 0.017190054580194698, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.017161563949916345 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kihoonlee/STOCK_SOLAR-10.7B", + "model_sha": "eb6f7b35eef73df76209c443271af8f05ee69a1b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kihoonlee/stock-solar-10.7b-v1/result_2024-05-16 04:25:33.json b/kihoonlee/stock-solar-10.7b-v1/result_2024-05-16 04:25:33.json new file mode 100644 index 0000000000000000000000000000000000000000..6e4c127f2c301186f423116c3cf2e4a0cf4bd502 --- /dev/null +++ b/kihoonlee/stock-solar-10.7b-v1/result_2024-05-16 04:25:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7568259385665529, + "acc_stderr": 0.012536554144587089, + "acc_norm": 0.7909556313993175, + "acc_norm_stderr": 0.011882746987406467 + }, + "harness|ko_hellaswag|10": { + "acc": 0.7312288388767177, + "acc_stderr": 0.0044241465627461165, + "acc_norm": 0.8148775144393547, + "acc_norm_stderr": 0.0038760312505449206 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7017543859649122, + "acc_stderr": 0.03508771929824564, + "acc_norm": 0.7017543859649122, + "acc_norm_stderr": 0.03508771929824564 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7669902912621359, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.7669902912621359, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6922094508301405, + "acc_stderr": 0.016506045045155637, + "acc_norm": 0.6922094508301405, + "acc_norm_stderr": 0.016506045045155637 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.03266204299064677, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.03266204299064677 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6366559485530546, + "acc_stderr": 0.027316847674192717, + "acc_norm": 0.6366559485530546, + "acc_norm_stderr": 0.027316847674192717 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6681614349775785, + "acc_stderr": 0.031602951437766785, + "acc_norm": 0.6681614349775785, + "acc_norm_stderr": 0.031602951437766785 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262973, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262973 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.031156269519646847, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.031156269519646847 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319617, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319617 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.03149930577784906, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.03149930577784906 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6205128205128205, + "acc_stderr": 0.024603626924097417, + "acc_norm": 0.6205128205128205, + "acc_norm_stderr": 0.024603626924097417 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6944444444444444, + "acc_stderr": 0.04453197507374984, + "acc_norm": 0.6944444444444444, + "acc_norm_stderr": 0.04453197507374984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649038, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649038 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6387096774193548, + "acc_stderr": 0.02732754844795755, + "acc_norm": 0.6387096774193548, + "acc_norm_stderr": 0.02732754844795755 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.025372139671722933, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.025372139671722933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5622641509433962, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.5622641509433962, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251976, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251976 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4417989417989418, + "acc_stderr": 0.025576257061253833, + "acc_norm": 0.4417989417989418, + "acc_norm_stderr": 0.025576257061253833 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5902777777777778, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.5902777777777778, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6069364161849711, + "acc_stderr": 0.026296227915613663, + "acc_norm": 0.6069364161849711, + "acc_norm_stderr": 0.026296227915613663 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334384, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334384 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6728395061728395, + "acc_stderr": 0.026105673861409825, + "acc_norm": 0.6728395061728395, + "acc_norm_stderr": 0.026105673861409825 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7668393782383419, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.7668393782383419, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7174311926605504, + "acc_stderr": 0.01930424349770715, + "acc_norm": 0.7174311926605504, + "acc_norm_stderr": 0.01930424349770715 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6241830065359477, + "acc_stderr": 0.02773283435336394, + "acc_norm": 0.6241830065359477, + "acc_norm_stderr": 0.02773283435336394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6447368421052632, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.6447368421052632, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5816993464052288, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.5816993464052288, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4432624113475177, + "acc_stderr": 0.029634838473766006, + "acc_norm": 0.4432624113475177, + "acc_norm_stderr": 0.029634838473766006 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.358659217877095, + "acc_stderr": 0.016040454426164478, + "acc_norm": 0.358659217877095, + "acc_norm_stderr": 0.016040454426164478 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5257352941176471, + "acc_stderr": 0.030332578094555033, + "acc_norm": 0.5257352941176471, + "acc_norm_stderr": 0.030332578094555033 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598025, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598025 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4491525423728814, + "acc_stderr": 0.012704030518851472, + "acc_norm": 0.4491525423728814, + "acc_norm_stderr": 0.012704030518851472 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6519607843137255, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.6519607843137255, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7674418604651163, + "mc1_stderr": 0.014789157531080501, + "mc2": 0.8316057505832035, + "mc2_stderr": 0.012854315513557197 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5029515938606848, + "acc_stderr": 0.017190054580194694, + "acc_norm": 0.526564344746163, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kihoonlee/stock-solar-10.7b-v1", + "model_sha": "db6ee3279fbb669f167ba8100f7d22c5fc9c4188", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kihoonlee/stock-solar-10b/result_2024-05-08 00:15:38.json b/kihoonlee/stock-solar-10b/result_2024-05-08 00:15:38.json new file mode 100644 index 0000000000000000000000000000000000000000..80d7e6dc252ef3b5c35eb7492509c4c1b5aafa16 --- /dev/null +++ b/kihoonlee/stock-solar-10b/result_2024-05-08 00:15:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7517064846416383, + "acc_stderr": 0.012624912868089753, + "acc_norm": 0.7832764505119454, + "acc_norm_stderr": 0.012040156713481189 + }, + "harness|ko_hellaswag|10": { + "acc": 0.7184823740290779, + "acc_stderr": 0.0044882017566425965, + "acc_norm": 0.8105954989046007, + "acc_norm_stderr": 0.00391028811701514 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7766990291262136, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.7766990291262136, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6934865900383141, + "acc_stderr": 0.01648695289304151, + "acc_norm": 0.6934865900383141, + "acc_norm_stderr": 0.01648695289304151 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.502127659574468, + "acc_stderr": 0.03268572658667493, + "acc_norm": 0.502127659574468, + "acc_norm_stderr": 0.03268572658667493 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6495176848874598, + "acc_stderr": 0.027098652621301744, + "acc_norm": 0.6495176848874598, + "acc_norm_stderr": 0.027098652621301744 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6502242152466368, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.6502242152466368, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.03154449888270286, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.03154449888270286 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.02450347255711092, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.02450347255711092 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6290322580645161, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.6290322580645161, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.025372139671722933, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.025372139671722933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5886792452830188, + "acc_stderr": 0.030285009259009798, + "acc_norm": 0.5886792452830188, + "acc_norm_stderr": 0.030285009259009798 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857403, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857403 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.031343283582089536, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.031343283582089536 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43915343915343913, + "acc_stderr": 0.02555992055053101, + "acc_norm": 0.43915343915343913, + "acc_norm_stderr": 0.02555992055053101 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.02626167760780665, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.02626167760780665 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334384, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334384 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6697530864197531, + "acc_stderr": 0.026168298456732846, + "acc_norm": 0.6697530864197531, + "acc_norm_stderr": 0.026168298456732846 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7229357798165138, + "acc_stderr": 0.01918848259016954, + "acc_norm": 0.7229357798165138, + "acc_norm_stderr": 0.01918848259016954 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.027914055510467998, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.027914055510467998 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.768595041322314, + "acc_stderr": 0.03849856098794088, + "acc_norm": 0.768595041322314, + "acc_norm_stderr": 0.03849856098794088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849725, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5915032679738562, + "acc_stderr": 0.019886221037501865, + "acc_norm": 0.5915032679738562, + "acc_norm_stderr": 0.019886221037501865 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4432624113475177, + "acc_stderr": 0.029634838473766006, + "acc_norm": 0.4432624113475177, + "acc_norm_stderr": 0.029634838473766006 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3553072625698324, + "acc_stderr": 0.01600698993480319, + "acc_norm": 0.3553072625698324, + "acc_norm_stderr": 0.01600698993480319 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5220588235294118, + "acc_stderr": 0.03034326422421352, + "acc_norm": 0.5220588235294118, + "acc_norm_stderr": 0.03034326422421352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030802, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030802 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7172995780590717, + "acc_stderr": 0.029312814153955924, + "acc_norm": 0.7172995780590717, + "acc_norm_stderr": 0.029312814153955924 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4452411994784876, + "acc_stderr": 0.012693421303973294, + "acc_norm": 0.4452411994784876, + "acc_norm_stderr": 0.012693421303973294 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.033086111132364364, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.033086111132364364 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7613219094247246, + "mc1_stderr": 0.014922629695456416, + "mc2": 0.8243270043105309, + "mc2_stderr": 0.013025812401942408 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.512396694214876, + "acc_stderr": 0.01718506973267654, + "acc_norm": 0.5419126328217237, + "acc_norm_stderr": 0.017129852117911144 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kihoonlee/stock-solar-10b", + "model_sha": "d0fd28f9c50dec8e908fbf0424b6b9bc4a780222", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kimdeokgi/ko-pt-model-test1/result_2024-06-05 01:21:11.json b/kimdeokgi/ko-pt-model-test1/result_2024-06-05 01:21:11.json new file mode 100644 index 0000000000000000000000000000000000000000..d918721191760989caa3655f62b9f7f08cdcec65 --- /dev/null +++ b/kimdeokgi/ko-pt-model-test1/result_2024-06-05 01:21:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4104095563139932, + "acc_stderr": 0.014374922192642666, + "acc_norm": 0.4598976109215017, + "acc_norm_stderr": 0.01456431885692485 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41047600079665403, + "acc_stderr": 0.004909148239488292, + "acc_norm": 0.5488946425014938, + "acc_norm_stderr": 0.004965866098318175 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5440613026819924, + "acc_stderr": 0.017810403925435345, + "acc_norm": 0.5440613026819924, + "acc_norm_stderr": 0.017810403925435345 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564584, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564584 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.034373055019806184, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.034373055019806184 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.02533466708095497, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.02533466708095497 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.034711928605184676, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.034711928605184676 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.03067609659938918, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.03067609659938918 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652459, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652459 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092056, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092056 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.024180497164376907, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.024180497164376907 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.041406856391115014, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.041406856391115014 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5889908256880734, + "acc_stderr": 0.02109505068727765, + "acc_norm": 0.5889908256880734, + "acc_norm_stderr": 0.02109505068727765 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138303, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138303 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.02000791273935936, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.02000791273935936 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3016759776536313, + "acc_stderr": 0.015350767572220285, + "acc_norm": 0.3016759776536313, + "acc_norm_stderr": 0.015350767572220285 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280055, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280055 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.03137624072561619, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.03137624072561619 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585897, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585897 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.034924061041636124, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.034924061041636124 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.03843566993588717, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.03843566993588717 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.44782372526014474, + "mc2_stderr": 0.015380769346979286 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4946871310507674, + "acc_stderr": 0.01718938362722969, + "acc_norm": 0.5997638724911453, + "acc_norm_stderr": 0.01684469351050505 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kimdeokgi/ko-pt-model-test1", + "model_sha": "17bf0b4d8bd7b7c67f7703dbc3c631a1505da2d1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kimjaewon/open-llama-2-ko-7b-kullm/result_2024-01-02 05:49:39.json b/kimjaewon/open-llama-2-ko-7b-kullm/result_2024-01-02 05:49:39.json new file mode 100644 index 0000000000000000000000000000000000000000..9181dc6596ba8fb1b7704b52a5b348ffb7be2612 --- /dev/null +++ b/kimjaewon/open-llama-2-ko-7b-kullm/result_2024-01-02 05:49:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25170648464163825, + "acc_stderr": 0.012682496334042967, + "acc_norm": 0.302901023890785, + "acc_norm_stderr": 0.013428241573185349 + }, + "harness|ko_hellaswag|10": { + "acc": 0.31428002389962156, + "acc_stderr": 0.0046327973752897725, + "acc_norm": 0.37502489543915557, + "acc_norm_stderr": 0.004831399218500231 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.027157150479563824, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.027157150479563824 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596917, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596917 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003337, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003337 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237656, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237656 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577657, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577657 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.024405173935783234, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.024405173935783234 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796617, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26988265971316816, + "acc_stderr": 0.011337381084250397, + "acc_norm": 0.26988265971316816, + "acc_norm_stderr": 0.011337381084250397 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.029102254389674082, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.029102254389674082 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268049, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268049 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156463, + "mc2": 0.42110592575615813, + "mc2_stderr": 0.015553936818344535 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30814639905548996, + "acc_stderr": 0.015874515156298393, + "acc_norm": 0.43565525383707204, + "acc_norm_stderr": 0.017047415229476344 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kimjaewon/open-llama-2-ko-7b-kullm", + "model_sha": "1016ff0f1ed12d46e82bbc4945e5e58b4e1aa0d0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kimwooglae/AISquare-Instruct-SOLAR-10.7b-v0.5.31/result_2024-01-22 00:34:59.json b/kimwooglae/AISquare-Instruct-SOLAR-10.7b-v0.5.31/result_2024-01-22 00:34:59.json new file mode 100644 index 0000000000000000000000000000000000000000..341504c4582fc945f9569f2231b1646a7c4149ad --- /dev/null +++ b/kimwooglae/AISquare-Instruct-SOLAR-10.7b-v0.5.31/result_2024-01-22 00:34:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3174061433447099, + "acc_stderr": 0.01360223908803817, + "acc_norm": 0.37542662116040953, + "acc_norm_stderr": 0.01415063143511173 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3655646285600478, + "acc_stderr": 0.004806039039008963, + "acc_norm": 0.48157737502489545, + "acc_norm_stderr": 0.0049863932662691625 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.42718446601941745, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.42718446601941745, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5006385696040868, + "acc_stderr": 0.01787994891443169, + "acc_norm": 0.5006385696040868, + "acc_norm_stderr": 0.01787994891443169 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.03115852213135778, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.03115852213135778 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4180064308681672, + "acc_stderr": 0.02801365189199507, + "acc_norm": 0.4180064308681672, + "acc_norm_stderr": 0.02801365189199507 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.03348180017060306, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.03348180017060306 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03481285338232963, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03481285338232963 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.031753678460966245, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.031753678460966245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.024283140529467298, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.024283140529467298 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.027709359675032488, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.027709359675032488 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37358490566037733, + "acc_stderr": 0.029773082713319875, + "acc_norm": 0.37358490566037733, + "acc_norm_stderr": 0.029773082713319875 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524575, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524575 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736412, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137602, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303118, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303118 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.02768472141565621, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.02768472141565621 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3908256880733945, + "acc_stderr": 0.02092005834611106, + "acc_norm": 0.3908256880733945, + "acc_norm_stderr": 0.02092005834611106 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.028036092273891762, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.028036092273891762 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.039105257528497236, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.039105257528497236 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024106, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024106 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275941, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275941 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791033, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791033 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553974, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553974 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.027257202606114944, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.027257202606114944 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.30612244897959184, + "acc_stderr": 0.029504896454595968, + "acc_norm": 0.30612244897959184, + "acc_norm_stderr": 0.029504896454595968 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214943, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214943 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608735, + "mc2": 0.4379552746293795, + "mc2_stderr": 0.016153334478802912 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3860684769775679, + "acc_stderr": 0.016738130760321747, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.01716156394991635 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kimwooglae/AISquare-Instruct-SOLAR-10.7b-v0.5.31", + "model_sha": "a8ef130719aa323afa1fec4ce4ebb9236a1d57a0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kimwooglae/AISquare-Instruct-SOLAR-10.7b-v0.5.32/result_2024-01-22 03:18:43.json b/kimwooglae/AISquare-Instruct-SOLAR-10.7b-v0.5.32/result_2024-01-22 03:18:43.json new file mode 100644 index 0000000000000000000000000000000000000000..e9c41b058913083573f9a001acb6ce8da927dc27 --- /dev/null +++ b/kimwooglae/AISquare-Instruct-SOLAR-10.7b-v0.5.32/result_2024-01-22 03:18:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3165529010238908, + "acc_stderr": 0.013592431519068084, + "acc_norm": 0.37627986348122866, + "acc_norm_stderr": 0.014157022555407158 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3676558454491137, + "acc_stderr": 0.00481181595938883, + "acc_norm": 0.4814777932682733, + "acc_norm_stderr": 0.004986356526063971 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5644955300127714, + "acc_stderr": 0.01773058992792658, + "acc_norm": 0.5644955300127714, + "acc_norm_stderr": 0.01773058992792658 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.031565646822367836, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.031565646822367836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.028333277109562807, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.028333277109562807 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986472, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986472 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5161290322580645, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.5161290322580645, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004236, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004236 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969115, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969115 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.038118909889404126, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.038118909889404126 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137602, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.026918645383239004, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.026918645383239004 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6269430051813472, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.6269430051813472, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5688073394495413, + "acc_stderr": 0.021233365030319563, + "acc_norm": 0.5688073394495413, + "acc_norm_stderr": 0.021233365030319563 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.02850980780262657, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.02850980780262657 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.04060127035236397, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.04060127035236397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061177, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281285, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281285 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160834, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160834 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3005586592178771, + "acc_stderr": 0.015334566806251167, + "acc_norm": 0.3005586592178771, + "acc_norm_stderr": 0.015334566806251167 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.02997280717046463, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.02997280717046463 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6919831223628692, + "acc_stderr": 0.0300523893356057, + "acc_norm": 0.6919831223628692, + "acc_norm_stderr": 0.0300523893356057 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38070404172099087, + "acc_stderr": 0.012401430654645891, + "acc_norm": 0.38070404172099087, + "acc_norm_stderr": 0.012401430654645891 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.037425970438065864, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.037425970438065864 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.4443555458524062, + "mc2_stderr": 0.016274797238327463 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4887839433293979, + "acc_stderr": 0.017186028469489283, + "acc_norm": 0.5159386068476978, + "acc_norm_stderr": 0.017181617837190195 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kimwooglae/AISquare-Instruct-SOLAR-10.7b-v0.5.32", + "model_sha": "2d978ca8513d3863d945e59a3569f59773618dc3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kimwooglae/WebSquareAI-Instruct-KoSOLAR-10.7b-v0.5.34/result_2024-01-23 15:54:38.json b/kimwooglae/WebSquareAI-Instruct-KoSOLAR-10.7b-v0.5.34/result_2024-01-23 15:54:38.json new file mode 100644 index 0000000000000000000000000000000000000000..cde53ff79c341084892b7f53afb9b36d61a59829 --- /dev/null +++ b/kimwooglae/WebSquareAI-Instruct-KoSOLAR-10.7b-v0.5.34/result_2024-01-23 15:54:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43430034129692835, + "acc_stderr": 0.014484703048857364, + "acc_norm": 0.5307167235494881, + "acc_norm_stderr": 0.01458379254630404 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4473212507468632, + "acc_stderr": 0.004962010338226348, + "acc_norm": 0.6261700856403107, + "acc_norm_stderr": 0.004828305041904401 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6432748538011696, + "acc_stderr": 0.036740130028609534, + "acc_norm": 0.6432748538011696, + "acc_norm_stderr": 0.036740130028609534 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6781609195402298, + "acc_stderr": 0.01670638141505791, + "acc_norm": 0.6781609195402298, + "acc_norm_stderr": 0.01670638141505791 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.548936170212766, + "acc_stderr": 0.03252909619613197, + "acc_norm": 0.548936170212766, + "acc_norm_stderr": 0.03252909619613197 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6430868167202572, + "acc_stderr": 0.027210420375934023, + "acc_norm": 0.6430868167202572, + "acc_norm_stderr": 0.027210420375934023 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6946564885496184, + "acc_stderr": 0.04039314978724561, + "acc_norm": 0.6946564885496184, + "acc_norm_stderr": 0.04039314978724561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.03274287914026868, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.03274287914026868 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.04810840148082635, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.04810840148082635 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.03169380235712997, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.03169380235712997 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5897435897435898, + "acc_stderr": 0.0249393139069408, + "acc_norm": 0.5897435897435898, + "acc_norm_stderr": 0.0249393139069408 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6225806451612903, + "acc_stderr": 0.027575960723278236, + "acc_norm": 0.6225806451612903, + "acc_norm_stderr": 0.027575960723278236 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.027046857630716657, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.027046857630716657 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5849056603773585, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.5849056603773585, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815632, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815632 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.039837983066598075, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.039837983066598075 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.032200241045342054, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.032200241045342054 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4312169312169312, + "acc_stderr": 0.02550648169813821, + "acc_norm": 0.4312169312169312, + "acc_norm_stderr": 0.02550648169813821 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5486111111111112, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.5486111111111112, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6040462427745664, + "acc_stderr": 0.02632981334194624, + "acc_norm": 0.6040462427745664, + "acc_norm_stderr": 0.02632981334194624 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.026571483480719967, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.026571483480719967 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7155963302752294, + "acc_stderr": 0.0193420365877026, + "acc_norm": 0.7155963302752294, + "acc_norm_stderr": 0.0193420365877026 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.47619047619047616, + "acc_stderr": 0.04467062628403273, + "acc_norm": 0.47619047619047616, + "acc_norm_stderr": 0.04467062628403273 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.027914055510468008, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.027914055510468008 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.03984979653302871, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.03984979653302871 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874143, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874143 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.020212274976302964, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.020212274976302964 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.029097675599463926, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.029097675599463926 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.37318435754189944, + "acc_stderr": 0.016175692013381947, + "acc_norm": 0.37318435754189944, + "acc_norm_stderr": 0.016175692013381947 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.0303720158854282, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.0303720158854282 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6122448979591837, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.6122448979591837, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44002607561929596, + "acc_stderr": 0.012678037478574511, + "acc_norm": 0.44002607561929596, + "acc_norm_stderr": 0.012678037478574511 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6813725490196079, + "acc_stderr": 0.03270287181482082, + "acc_norm": 0.6813725490196079, + "acc_norm_stderr": 0.03270287181482082 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7151515151515152, + "acc_stderr": 0.0352439084451178, + "acc_norm": 0.7151515151515152, + "acc_norm_stderr": 0.0352439084451178 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4259485924112607, + "mc1_stderr": 0.017310471904076544, + "mc2": 0.596431879193073, + "mc2_stderr": 0.015948950013319772 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5702479338842975, + "acc_stderr": 0.017019847535972212, + "acc_norm": 0.5808736717827627, + "acc_norm_stderr": 0.016963995010862792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kimwooglae/WebSquareAI-Instruct-KoSOLAR-10.7b-v0.5.34", + "model_sha": "1bdfa772d590b9bebe1bb28e5efdbcce4bbc1c33", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kimwooglae/WebSquareAI-Instruct-llama-3-8B-v0.5.37/result_2024-04-24 15:36:44.json b/kimwooglae/WebSquareAI-Instruct-llama-3-8B-v0.5.37/result_2024-04-24 15:36:44.json new file mode 100644 index 0000000000000000000000000000000000000000..cbab5988047f10bb35f89e14e9370293854497b5 --- /dev/null +++ b/kimwooglae/WebSquareAI-Instruct-llama-3-8B-v0.5.37/result_2024-04-24 15:36:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3822525597269625, + "acc_stderr": 0.014200454049979282, + "acc_norm": 0.43856655290102387, + "acc_norm_stderr": 0.014500682618212862 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37930691097390956, + "acc_stderr": 0.004842229276915339, + "acc_norm": 0.4978092013543119, + "acc_norm_stderr": 0.004989733513319105 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4610472541507024, + "acc_stderr": 0.017825621793239026, + "acc_norm": 0.4610472541507024, + "acc_norm_stderr": 0.017825621793239026 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5063829787234042, + "acc_stderr": 0.03268335899936336, + "acc_norm": 0.5063829787234042, + "acc_norm_stderr": 0.03268335899936336 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.035594435655639196, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.035594435655639196 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.041379310344827586, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.041379310344827586 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201943, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201943 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5672268907563025, + "acc_stderr": 0.032183581077426124, + "acc_norm": 0.5672268907563025, + "acc_norm_stderr": 0.032183581077426124 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5076923076923077, + "acc_stderr": 0.025348006031534743, + "acc_norm": 0.5076923076923077, + "acc_norm_stderr": 0.025348006031534743 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575494, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575494 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.028438677998909558, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.028438677998909558 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.027236013946196673, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.027236013946196673 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333334, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333334 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.03600244069867179, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.03600244069867179 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5981651376146789, + "acc_stderr": 0.02102010617299701, + "acc_norm": 0.5981651376146789, + "acc_norm_stderr": 0.02102010617299701 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576073, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576073 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.042943408452120954, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.042943408452120954 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.020054269200726452, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.020054269200726452 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.02889395541211588, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.02889395541211588 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5, + "acc_stderr": 0.04745789978762494, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04745789978762494 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.033723432716530624, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.033723432716530624 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.015005762446786166, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.015005762446786166 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933105, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.03160106993449601, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.03160106993449601 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.363754889178618, + "acc_stderr": 0.012286991879902889, + "acc_norm": 0.363754889178618, + "acc_norm_stderr": 0.012286991879902889 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.034924061041636124, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.034924061041636124 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253597, + "mc2": 0.4354629062725224, + "mc2_stderr": 0.015266127378272255 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44391971664698937, + "acc_stderr": 0.017081884623542543, + "acc_norm": 0.4946871310507674, + "acc_norm_stderr": 0.017189383627229687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kimwooglae/WebSquareAI-Instruct-llama-3-8B-v0.5.37", + "model_sha": "df423e960edf574e364043cb978494410b6bd192", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kimwooglae/WebSquareAI-Instruct-llama-3-8B-v0.5.39/result_2024-04-24 15:40:27.json b/kimwooglae/WebSquareAI-Instruct-llama-3-8B-v0.5.39/result_2024-04-24 15:40:27.json new file mode 100644 index 0000000000000000000000000000000000000000..99ef87d93f226ffb21a62c0dc1ddbb4338e1a377 --- /dev/null +++ b/kimwooglae/WebSquareAI-Instruct-llama-3-8B-v0.5.39/result_2024-04-24 15:40:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36860068259385664, + "acc_stderr": 0.014097810678042194, + "acc_norm": 0.4189419795221843, + "acc_norm_stderr": 0.014418106953639011 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3770165305715993, + "acc_stderr": 0.004836486437527272, + "acc_norm": 0.4790878311093408, + "acc_norm_stderr": 0.0049854152506909125 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.038200425866029654, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.038200425866029654 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4661558109833972, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.4661558109833972, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840625, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840625 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207761, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207761 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.0253480060315348, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.0253480060315348 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.034711928605184676, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.034711928605184676 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5129032258064516, + "acc_stderr": 0.02843453315268186, + "acc_norm": 0.5129032258064516, + "acc_norm_stderr": 0.02843453315268186 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.02999695185834948, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.02999695185834948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.0472457740573157, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.0472457740573157 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.02931820364520686, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.02931820364520686 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.037940126746970296, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.037940126746970296 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5208333333333334, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.5208333333333334, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836183, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836183 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.046306532033665956, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.046306532033665956 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5779816513761468, + "acc_stderr": 0.021174991407763175, + "acc_norm": 0.5779816513761468, + "acc_norm_stderr": 0.021174991407763175 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5522875816993464, + "acc_stderr": 0.02847293847803353, + "acc_norm": 0.5522875816993464, + "acc_norm_stderr": 0.02847293847803353 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.44281045751633985, + "acc_stderr": 0.02009508315457735, + "acc_norm": 0.44281045751633985, + "acc_norm_stderr": 0.02009508315457735 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611327, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611327 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5, + "acc_stderr": 0.04745789978762494, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04745789978762494 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.014736926383761994, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.014736926383761994 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280065, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280065 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6204081632653061, + "acc_stderr": 0.03106721126287246, + "acc_norm": 0.6204081632653061, + "acc_norm_stderr": 0.03106721126287246 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36897001303780963, + "acc_stderr": 0.012323936650174859, + "acc_norm": 0.36897001303780963, + "acc_norm_stderr": 0.012323936650174859 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.015945068581236614, + "mc2": 0.48784579403408673, + "mc2_stderr": 0.01640941192094201 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4380165289256198, + "acc_stderr": 0.017057753702160287, + "acc_norm": 0.45808736717827625, + "acc_norm_stderr": 0.017129852117911147 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kimwooglae/WebSquareAI-Instruct-llama-3-8B-v0.5.39", + "model_sha": "3482b74da7b41a944ed1afe64411d4b0d9a42cb0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kiyoonyoo/ko-en-trans-platypus-13b-v2/result_2023-10-20 01:21:48.json b/kiyoonyoo/ko-en-trans-platypus-13b-v2/result_2023-10-20 01:21:48.json new file mode 100644 index 0000000000000000000000000000000000000000..6653ce0b4ec9f7ba5299f280e3f9e27bb2630c67 --- /dev/null +++ b/kiyoonyoo/ko-en-trans-platypus-13b-v2/result_2023-10-20 01:21:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.01407722310847014, + "acc_norm": 0.42150170648464164, + "acc_norm_stderr": 0.014430197069326021 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4064927305317666, + "acc_stderr": 0.004901747426331732, + "acc_norm": 0.5430193188607847, + "acc_norm_stderr": 0.004971278309204197 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48020434227330777, + "acc_stderr": 0.017865944827291615, + "acc_norm": 0.48020434227330777, + "acc_norm_stderr": 0.017865944827291615 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840684, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840684 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.035578062450873145, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.035578062450873145 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149351, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149351 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.02491524398598784, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.02491524398598784 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347354, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347354 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969481, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969481 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.023000086859068642, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.023000086859068642 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.02656417811142262, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.02656417811142262 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680814, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680814 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.0403356566784832, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.0403356566784832 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.018521756215423027, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.018521756215423027 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.02667925227010311, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.02667925227010311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.47257383966244726, + "acc_stderr": 0.03249822718301303, + "acc_norm": 0.47257383966244726, + "acc_norm_stderr": 0.03249822718301303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3011734028683181, + "acc_stderr": 0.011717148751648435, + "acc_norm": 0.3011734028683181, + "acc_norm_stderr": 0.011717148751648435 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.40735838259254725, + "mc2_stderr": 0.0148987552825206 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4557260920897285, + "acc_stderr": 0.017122829143292648, + "acc_norm": 0.5194805194805194, + "acc_norm_stderr": 0.017177301992342544 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kiyoonyoo/ko-en-trans-platypus-13b-v2", + "model_sha": "d050d876d84bdce99f417f180479586cf0fe8a86", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kiyoonyoo/ko-en-trans-platypus-13b-v3/result_2023-10-22 06:24:03.json b/kiyoonyoo/ko-en-trans-platypus-13b-v3/result_2023-10-22 06:24:03.json new file mode 100644 index 0000000000000000000000000000000000000000..41adb4f98b5507787c4cde57d0d7c8e08609d0ca --- /dev/null +++ b/kiyoonyoo/ko-en-trans-platypus-13b-v3/result_2023-10-22 06:24:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37457337883959047, + "acc_stderr": 0.014144193471893456, + "acc_norm": 0.43430034129692835, + "acc_norm_stderr": 0.014484703048857355 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4070902210714997, + "acc_stderr": 0.004902878806733046, + "acc_norm": 0.5408285202150966, + "acc_norm_stderr": 0.004973117975062488 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458935, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5197956577266922, + "acc_stderr": 0.017865944827291633, + "acc_norm": 0.5197956577266922, + "acc_norm_stderr": 0.017865944827291633 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564584, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564584 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.035476014940069384, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.035476014940069384 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236153, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236153 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102318, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102318 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969566, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969566 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836914, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836914 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176095, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176095 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112126, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112126 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.02690290045866664, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.02690290045866664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5394495412844037, + "acc_stderr": 0.021370494609995096, + "acc_norm": 0.5394495412844037, + "acc_norm_stderr": 0.021370494609995096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626567, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626567 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223974, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223974 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952688, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952688 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456053, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456053 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.02922719246003203, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.02922719246003203 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35919165580182527, + "acc_stderr": 0.012253386187584245, + "acc_norm": 0.35919165580182527, + "acc_norm_stderr": 0.012253386187584245 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4457475184349363, + "mc2_stderr": 0.015091782961916999 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38134592680047225, + "acc_stderr": 0.016699301768828074, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.016884749503191392 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kiyoonyoo/ko-en-trans-platypus-13b-v3", + "model_sha": "3c27d710886ff8b6a9fcf321fae0e2f76eaeafa3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kiyoonyoo/ko-en-trans-platypus-13b/result_2023-10-18 00:15:42.json b/kiyoonyoo/ko-en-trans-platypus-13b/result_2023-10-18 00:15:42.json new file mode 100644 index 0000000000000000000000000000000000000000..9b68f6e1c61525954b9517e5fd8e7e2743ca1c51 --- /dev/null +++ b/kiyoonyoo/ko-en-trans-platypus-13b/result_2023-10-18 00:15:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.01407722310847014, + "acc_norm": 0.42150170648464164, + "acc_norm_stderr": 0.014430197069326021 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4064927305317666, + "acc_stderr": 0.004901747426331732, + "acc_norm": 0.5430193188607847, + "acc_norm_stderr": 0.004971278309204197 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48020434227330777, + "acc_stderr": 0.017865944827291615, + "acc_norm": 0.48020434227330777, + "acc_norm_stderr": 0.017865944827291615 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.028345045864840684, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.028345045864840684 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.035578062450873145, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.035578062450873145 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149351, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149351 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.02491524398598784, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.02491524398598784 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347354, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347354 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969481, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969481 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.023000086859068642, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.023000086859068642 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.02656417811142262, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.02656417811142262 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.021393071222680814, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.021393071222680814 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.0403356566784832, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.0403356566784832 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.018521756215423027, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.018521756215423027 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.02667925227010311, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.02667925227010311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.47257383966244726, + "acc_stderr": 0.03249822718301303, + "acc_norm": 0.47257383966244726, + "acc_norm_stderr": 0.03249822718301303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3011734028683181, + "acc_stderr": 0.011717148751648435, + "acc_norm": 0.3011734028683181, + "acc_norm_stderr": 0.011717148751648435 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.40735838259254725, + "mc2_stderr": 0.0148987552825206 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4557260920897285, + "acc_stderr": 0.017122829143292648, + "acc_norm": 0.5194805194805194, + "acc_norm_stderr": 0.017177301992342544 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kiyoonyoo/ko-en-trans-platypus-13b", + "model_sha": "a211ce8adabfe436e59735081efe813176a88e7b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kiyoonyoo/ko-platypus-13b-control/result_2023-10-17 01:13:41.json b/kiyoonyoo/ko-platypus-13b-control/result_2023-10-17 01:13:41.json new file mode 100644 index 0000000000000000000000000000000000000000..c163d0068d7075a4ba12ba81709cd73b3b543a74 --- /dev/null +++ b/kiyoonyoo/ko-platypus-13b-control/result_2023-10-17 01:13:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38822525597269625, + "acc_stderr": 0.01424161420741405, + "acc_norm": 0.4283276450511945, + "acc_norm_stderr": 0.01446049636759902 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40360485958972314, + "acc_stderr": 0.004896173035943316, + "acc_norm": 0.5388368850826528, + "acc_norm_stderr": 0.0049747064284342835 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5300127713920817, + "acc_stderr": 0.01784772308664907, + "acc_norm": 0.5300127713920817, + "acc_norm_stderr": 0.01784772308664907 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745667, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745667 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419034, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419034 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.032339434681820885, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.032339434681820885 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.02518914989476419, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.02518914989476419 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.028358634859836918, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.028358634859836918 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776292, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.02794045713622841, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02794045713622841 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379414, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379414 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.02138786335035399, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.02138786335035399 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.019450768432505518, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.019450768432505518 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509317, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.03137624072561619, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.03137624072561619 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37614080834419816, + "acc_stderr": 0.012372214430599819, + "acc_norm": 0.37614080834419816, + "acc_norm_stderr": 0.012372214430599819 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4343237644069022, + "mc2_stderr": 0.015029108040608447 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3872491145218418, + "acc_stderr": 0.016747577991642792, + "acc_norm": 0.4628099173553719, + "acc_norm_stderr": 0.0171427361176433 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kiyoonyoo/ko-platypus-13b-control", + "model_sha": "6cdc49b0713c6d4ad656fe98f5be7eccb1d8b4ef", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kmyoon/mzllm-solar-10.7B/result_2024-02-08 14:28:21.json b/kmyoon/mzllm-solar-10.7B/result_2024-02-08 14:28:21.json new file mode 100644 index 0000000000000000000000000000000000000000..611db1a1faef073878525fe6def761e3cb3cbf83 --- /dev/null +++ b/kmyoon/mzllm-solar-10.7B/result_2024-02-08 14:28:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2030716723549488, + "acc_stderr": 0.011755899303705582, + "acc_norm": 0.2568259385665529, + "acc_norm_stderr": 0.012766923794116801 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2861979685321649, + "acc_stderr": 0.004510593395289893, + "acc_norm": 0.31617207727544316, + "acc_norm_stderr": 0.004640306719628064 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03615507630310933, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03615507630310933 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26309067688378035, + "acc_stderr": 0.015745497169049046, + "acc_norm": 0.26309067688378035, + "acc_norm_stderr": 0.015745497169049046 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501116, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501116 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.24680851063829787, + "acc_stderr": 0.028185441301234123, + "acc_norm": 0.24680851063829787, + "acc_norm_stderr": 0.028185441301234123 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.03550920185689631, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.03550920185689631 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.18181818181818182, + "acc_stderr": 0.027479603010538797, + "acc_norm": 0.18181818181818182, + "acc_norm_stderr": 0.027479603010538797 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380554, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380554 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03010833071801162, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03010833071801162 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.025284416114900156, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.025284416114900156 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.02704685763071667, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.02704685763071667 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.025604233470899095, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.025604233470899095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766118, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766118 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21393034825870647, + "acc_stderr": 0.028996909693328923, + "acc_norm": 0.21393034825870647, + "acc_norm_stderr": 0.028996909693328923 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641143, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641143 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2147239263803681, + "acc_stderr": 0.03226219377286774, + "acc_norm": 0.2147239263803681, + "acc_norm_stderr": 0.03226219377286774 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023132376234543353, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023132376234543353 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.02925282329180363, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.02925282329180363 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1944954128440367, + "acc_stderr": 0.016970289090458054, + "acc_norm": 0.1944954128440367, + "acc_norm_stderr": 0.016970289090458054 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011745, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011745 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.1830065359477124, + "acc_stderr": 0.02214076751288096, + "acc_norm": 0.1830065359477124, + "acc_norm_stderr": 0.02214076751288096 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.03984979653302872, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.03984979653302872 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23366013071895425, + "acc_stderr": 0.017119158496044506, + "acc_norm": 0.23366013071895425, + "acc_norm_stderr": 0.017119158496044506 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.024987106365642973, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.024987106365642973 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.030998666304560534, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.030998666304560534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961459, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961459 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23161764705882354, + "acc_stderr": 0.025626533803777565, + "acc_norm": 0.23161764705882354, + "acc_norm_stderr": 0.025626533803777565 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788163, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788163 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.23628691983122363, + "acc_stderr": 0.02765215314415927, + "acc_norm": 0.23628691983122363, + "acc_norm_stderr": 0.02765215314415927 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.01098630787004552, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.01098630787004552 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923403, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923403 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024637, + "mc2": 0.47485915543844903, + "mc2_stderr": 0.01615839147593516 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.269185360094451, + "acc_stderr": 0.015249098024144526, + "acc_norm": 0.3789846517119244, + "acc_norm_stderr": 0.016679260684229286 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kmyoon/mzllm-solar-10.7B", + "model_sha": "d136205c8edaf507528bfd8f0b59590f726a4aea", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/knlp/KS-SOLAR-10.7B-v0.1/result_2024-02-14 08:13:17.json b/knlp/KS-SOLAR-10.7B-v0.1/result_2024-02-14 08:13:17.json new file mode 100644 index 0000000000000000000000000000000000000000..66a18b98444c85c4c8eb213228e2331822a7043a --- /dev/null +++ b/knlp/KS-SOLAR-10.7B-v0.1/result_2024-02-14 08:13:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3148464163822526, + "acc_stderr": 0.013572657703084948, + "acc_norm": 0.37627986348122866, + "acc_norm_stderr": 0.014157022555407161 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36207926707827126, + "acc_stderr": 0.004796193584930079, + "acc_norm": 0.47400916152160927, + "acc_norm_stderr": 0.004983035420235714 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5427841634738186, + "acc_stderr": 0.01781438523853445, + "acc_norm": 0.5427841634738186, + "acc_norm_stderr": 0.01781438523853445 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841585, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841585 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.028438677998909558, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.028438677998909558 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.04760548821460325, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.04760548821460325 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815646, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815646 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699947, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699947 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.045981880578165414, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.045981880578165414 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5302752293577981, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.5302752293577981, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176647, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176647 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639865, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639865 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.293854748603352, + "acc_stderr": 0.015235075776719616, + "acc_norm": 0.293854748603352, + "acc_norm_stderr": 0.015235075776719616 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.0314147080258659, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.0314147080258659 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.03172295004332329, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.03172295004332329 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3878748370273794, + "acc_stderr": 0.012444998309675616, + "acc_norm": 0.3878748370273794, + "acc_norm_stderr": 0.012444998309675616 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398394, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398394 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875833, + "mc2": 0.4367172851380593, + "mc2_stderr": 0.016347790443755714 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3837072018890201, + "acc_stderr": 0.01671892463723182, + "acc_norm": 0.4085005903187721, + "acc_norm_stderr": 0.016900062879427115 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "knlp/KS-SOLAR-10.7B-v0.1", + "model_sha": "6db04dbc54551ff57c802f511dc8da4c37e6861b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kodonho/Solar-OrcaDPO-Solar-Instruct-SLERP/result_2024-01-22 00:41:51.json b/kodonho/Solar-OrcaDPO-Solar-Instruct-SLERP/result_2024-01-22 00:41:51.json new file mode 100644 index 0000000000000000000000000000000000000000..a652ea04ad10cd49dd9877978a2b838d7711e596 --- /dev/null +++ b/kodonho/Solar-OrcaDPO-Solar-Instruct-SLERP/result_2024-01-22 00:41:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3796928327645051, + "acc_stderr": 0.014182119866974874, + "acc_norm": 0.4684300341296928, + "acc_norm_stderr": 0.014582236460866978 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3994224258115913, + "acc_stderr": 0.004887787255353492, + "acc_norm": 0.5333598884684326, + "acc_norm_stderr": 0.004978662946687277 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5874840357598978, + "acc_stderr": 0.017604149108671918, + "acc_norm": 0.5874840357598978, + "acc_norm_stderr": 0.017604149108671918 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424004, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.028173917761762906, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.028173917761762906 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6818181818181818, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.6818181818181818, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5630252100840336, + "acc_stderr": 0.03221943636566197, + "acc_norm": 0.5630252100840336, + "acc_norm_stderr": 0.03221943636566197 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5205128205128206, + "acc_stderr": 0.02532966316348994, + "acc_norm": 0.5205128205128206, + "acc_norm_stderr": 0.02532966316348994 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5064516129032258, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.5064516129032258, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.02812096650391439, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.02812096650391439 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389177, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389177 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.033333333333333326, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.033333333333333326 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.03807301726504511, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.03807301726504511 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159788, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159788 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.026636539741116093, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.026636539741116093 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.0275860062216077, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.0275860062216077 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6735751295336787, + "acc_stderr": 0.033840286211432945, + "acc_norm": 0.6735751295336787, + "acc_norm_stderr": 0.033840286211432945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594964, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594964 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6165137614678899, + "acc_stderr": 0.020847156641915977, + "acc_norm": 0.6165137614678899, + "acc_norm_stderr": 0.020847156641915977 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.0439025926537756, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.0439025926537756 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138296, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138296 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.020212274976302954, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.020212274976302954 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.02894733885161411, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.02894733885161411 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31620111731843575, + "acc_stderr": 0.015551673652172556, + "acc_norm": 0.31620111731843575, + "acc_norm_stderr": 0.015551673652172556 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4742647058823529, + "acc_stderr": 0.030332578094555033, + "acc_norm": 0.4742647058823529, + "acc_norm_stderr": 0.030332578094555033 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6122448979591837, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.6122448979591837, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030685820596610798, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030685820596610798 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3859191655801825, + "acc_stderr": 0.012433398911476141, + "acc_norm": 0.3859191655801825, + "acc_norm_stderr": 0.012433398911476141 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.034542365853806094, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.034542365853806094 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03815494308688929, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03815494308688929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3733170134638923, + "mc1_stderr": 0.016932370557570627, + "mc2": 0.5352077685090756, + "mc2_stderr": 0.016487184018491113 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4817001180637544, + "acc_stderr": 0.017178836639177762, + "acc_norm": 0.4817001180637544, + "acc_norm_stderr": 0.017178836639177762 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kodonho/Solar-OrcaDPO-Solar-Instruct-SLERP", + "model_sha": "c1323ff9e1fd44dbadfabd793b341d7f20814068", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kodonho/SolarM-SakuraSolar-SLERP/result_2024-01-12 02:13:11.json b/kodonho/SolarM-SakuraSolar-SLERP/result_2024-01-12 02:13:11.json new file mode 100644 index 0000000000000000000000000000000000000000..d628e32abe71fbedd12b2a8e37102e29bea24916 --- /dev/null +++ b/kodonho/SolarM-SakuraSolar-SLERP/result_2024-01-12 02:13:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38822525597269625, + "acc_stderr": 0.014241614207414053, + "acc_norm": 0.4812286689419795, + "acc_norm_stderr": 0.014601090150633964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4017128062139016, + "acc_stderr": 0.004892425356375709, + "acc_norm": 0.5400318661621191, + "acc_norm_stderr": 0.004973762948302803 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.0380579750559046, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.0380579750559046 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5938697318007663, + "acc_stderr": 0.017562037406478933, + "acc_norm": 0.5938697318007663, + "acc_norm_stderr": 0.017562037406478933 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.02812534098397271, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.02812534098397271 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123005, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123005 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6767676767676768, + "acc_stderr": 0.033322999210706444, + "acc_norm": 0.6767676767676768, + "acc_norm_stderr": 0.033322999210706444 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5102564102564102, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.5102564102564102, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417618, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417618 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205608, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.03794012674697029, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.03794012674697029 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137282, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137282 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.569364161849711, + "acc_stderr": 0.026658800273672376, + "acc_norm": 0.569364161849711, + "acc_norm_stderr": 0.026658800273672376 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.595679012345679, + "acc_stderr": 0.02730662529732768, + "acc_norm": 0.595679012345679, + "acc_norm_stderr": 0.02730662529732768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.689119170984456, + "acc_stderr": 0.03340361906276586, + "acc_norm": 0.689119170984456, + "acc_norm_stderr": 0.03340361906276586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594964, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594964 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6256880733944954, + "acc_stderr": 0.020748959408988313, + "acc_norm": 0.6256880733944954, + "acc_norm_stderr": 0.020748959408988313 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874143, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874143 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.020203517280261436, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.020203517280261436 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347233, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347233 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.03385177976044809, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.03385177976044809 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3340782122905028, + "acc_stderr": 0.015774911422381625, + "acc_norm": 0.3340782122905028, + "acc_norm_stderr": 0.015774911422381625 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5, + "acc_stderr": 0.030372836961539352, + "acc_norm": 0.5, + "acc_norm_stderr": 0.030372836961539352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.030862144921087555, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.030862144921087555 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702368, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702368 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39048239895697523, + "acc_stderr": 0.012460135913945068, + "acc_norm": 0.39048239895697523, + "acc_norm_stderr": 0.012460135913945068 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5931372549019608, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.5931372549019608, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205983, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205983 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3708690330477356, + "mc1_stderr": 0.016909693580248804, + "mc2": 0.5312670028928019, + "mc2_stderr": 0.016467087260091346 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48760330578512395, + "acc_stderr": 0.017185069732676538, + "acc_norm": 0.4899645808736718, + "acc_norm_stderr": 0.01718689128689406 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kodonho/SolarM-SakuraSolar-SLERP", + "model_sha": "46577d94b927e71780a5e70d200d3586b53438a0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kodonho/llama2-chat-koalpaca/result_2024-01-07 05:59:12.json b/kodonho/llama2-chat-koalpaca/result_2024-01-07 05:59:12.json new file mode 100644 index 0000000000000000000000000000000000000000..cd1bab9badb655e38f84ae32aafeb22278e70963 --- /dev/null +++ b/kodonho/llama2-chat-koalpaca/result_2024-01-07 05:59:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25853242320819114, + "acc_stderr": 0.012794553754288679, + "acc_norm": 0.3046075085324232, + "acc_norm_stderr": 0.013449522109932492 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3233419637522406, + "acc_stderr": 0.004667960519938638, + "acc_norm": 0.39145588528181635, + "acc_norm_stderr": 0.004870785036708286 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3818646232439336, + "acc_stderr": 0.017373732736677583, + "acc_norm": 0.3818646232439336, + "acc_norm_stderr": 0.017373732736677583 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3504823151125402, + "acc_stderr": 0.02709865262130175, + "acc_norm": 0.3504823151125402, + "acc_norm_stderr": 0.02709865262130175 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459156, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459156 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.32323232323232326, + "acc_stderr": 0.033322999210706444, + "acc_norm": 0.32323232323232326, + "acc_norm_stderr": 0.033322999210706444 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.02971914287634286, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.02971914287634286 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.022282141204204426, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.022282141204204426 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233485, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.02637756702864586, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.02637756702864586 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.452991452991453, + "acc_stderr": 0.0326109987309862, + "acc_norm": 0.452991452991453, + "acc_norm_stderr": 0.0326109987309862 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3132075471698113, + "acc_stderr": 0.02854479331905533, + "acc_norm": 0.3132075471698113, + "acc_norm_stderr": 0.02854479331905533 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4527363184079602, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.4527363184079602, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.023266512213730575, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.023266512213730575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.025305258131879716, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.025305258131879716 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38580246913580246, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.38580246913580246, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2849740932642487, + "acc_stderr": 0.03257714077709662, + "acc_norm": 0.2849740932642487, + "acc_norm_stderr": 0.03257714077709662 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27889908256880735, + "acc_stderr": 0.019227468876463514, + "acc_norm": 0.27889908256880735, + "acc_norm_stderr": 0.019227468876463514 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.026925654653615686, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.026925654653615686 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32516339869281047, + "acc_stderr": 0.018950886770806315, + "acc_norm": 0.32516339869281047, + "acc_norm_stderr": 0.018950886770806315 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.0312803908432988, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.0312803908432988 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.025767252010855966, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.025767252010855966 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3142857142857143, + "acc_stderr": 0.029719329422417458, + "acc_norm": 0.3142857142857143, + "acc_norm_stderr": 0.029719329422417458 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.02944377302259469, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.02944377302259469 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.010986307870045507, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.010986307870045507 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.03317505930009179, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.03317505930009179 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133033, + "mc2": 0.48093353604874856, + "mc2_stderr": 0.015655555450496628 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2668240850059032, + "acc_stderr": 0.015206575684565895, + "acc_norm": 0.34946871310507677, + "acc_norm_stderr": 0.01639279708576984 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kodonho/llama2-chat-koalpaca", + "model_sha": "4a71ba04ef569a02282c3c40989d656fef6d602f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kody0525/KOpen-platypus-polyglot-ko-12.8b/result_2024-03-20 03:37:52.json b/kody0525/KOpen-platypus-polyglot-ko-12.8b/result_2024-03-20 03:37:52.json new file mode 100644 index 0000000000000000000000000000000000000000..c2b401b26bc44b76548ad088617529b6eb1abd2d --- /dev/null +++ b/kody0525/KOpen-platypus-polyglot-ko-12.8b/result_2024-03-20 03:37:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29266211604095566, + "acc_stderr": 0.013295916103619406, + "acc_norm": 0.35238907849829354, + "acc_norm_stderr": 0.013960142600598682 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3884684325831508, + "acc_stderr": 0.004864058877626281, + "acc_norm": 0.5044811790479984, + "acc_norm_stderr": 0.0049895810081632104 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457923, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457923 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.039891398595317706, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.039891398595317706 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26947637292464877, + "acc_stderr": 0.015866243073215058, + "acc_norm": 0.26947637292464877, + "acc_norm_stderr": 0.015866243073215058 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.18674698795180722, + "acc_stderr": 0.030338749144500576, + "acc_norm": 0.18674698795180722, + "acc_norm_stderr": 0.030338749144500576 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19730941704035873, + "acc_stderr": 0.02670985334496796, + "acc_norm": 0.19730941704035873, + "acc_norm_stderr": 0.02670985334496796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.0320877955878675, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.0320877955878675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.040233822736177476, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.040233822736177476 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23949579831932774, + "acc_stderr": 0.027722065493361266, + "acc_norm": 0.23949579831932774, + "acc_norm_stderr": 0.027722065493361266 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.02136202772522272, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.02136202772522272 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678242, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678242 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.25660377358490566, + "acc_stderr": 0.026880647889051996, + "acc_norm": 0.25660377358490566, + "acc_norm_stderr": 0.026880647889051996 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.03076944496729602, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.03076944496729602 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.024105712607754307, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.024105712607754307 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.025407197798890165, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.025407197798890165 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700286, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700286 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22935779816513763, + "acc_stderr": 0.018025349724618684, + "acc_norm": 0.22935779816513763, + "acc_norm_stderr": 0.018025349724618684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011744, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.02555316999182651, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.02555316999182651 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03782728980865469, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03782728980865469 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.018311653053648222, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.018311653053648222 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902006, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902006 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005344, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005344 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21323529411764705, + "acc_stderr": 0.024880971512294257, + "acc_norm": 0.21323529411764705, + "acc_norm_stderr": 0.024880971512294257 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.027212835884073163, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.027212835884073163 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.010986307870045522, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.010986307870045522 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2215422276621787, + "mc1_stderr": 0.014537867601301145, + "mc2": 0.3725803165154559, + "mc2_stderr": 0.014378870570691916 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33293978748524206, + "acc_stderr": 0.016202431208373797, + "acc_norm": 0.4167650531286895, + "acc_norm_stderr": 0.01695048914610883 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kody0525/KOpen-platypus-polyglot-ko-12.8b", + "model_sha": "4cb0fe4f69a5ce9e4005e922a440a82b61e3909d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.12/result_2023-10-19 23:41:08.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.12/result_2023-10-19 23:41:08.json new file mode 100644 index 0000000000000000000000000000000000000000..6ecbe81f82c405e0f188f76862f91ce7b2a39cc3 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.12/result_2023-10-19 23:41:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938167, + "acc_norm": 0.45307167235494883, + "acc_norm_stderr": 0.014546892052005628 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40450109539932283, + "acc_stderr": 0.004897921845492103, + "acc_norm": 0.5380402310296754, + "acc_norm_stderr": 0.004975319435777095 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.03762738699917056, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.03762738699917056 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729245, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729245 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.016857391247472552, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.016857391247472552 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.028020226271200217, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.028020226271200217 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3762057877813505, + "acc_stderr": 0.027513925683549427, + "acc_norm": 0.3762057877813505, + "acc_norm_stderr": 0.027513925683549427 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3282442748091603, + "acc_stderr": 0.041184385658062976, + "acc_norm": 0.3282442748091603, + "acc_norm_stderr": 0.041184385658062976 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.03208779558786751, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.03208779558786751 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.028359620870533953, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.028359620870533953 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24615384615384617, + "acc_stderr": 0.021840866990423088, + "acc_norm": 0.24615384615384617, + "acc_norm_stderr": 0.021840866990423088 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3387096774193548, + "acc_stderr": 0.02692344605930286, + "acc_norm": 0.3387096774193548, + "acc_norm_stderr": 0.02692344605930286 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03255326307272486, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03255326307272486 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2490566037735849, + "acc_stderr": 0.02661648298050172, + "acc_norm": 0.2490566037735849, + "acc_norm_stderr": 0.02661648298050172 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.04309118709946458, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.04309118709946458 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.46766169154228854, + "acc_stderr": 0.035281314729336065, + "acc_norm": 0.46766169154228854, + "acc_norm_stderr": 0.035281314729336065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.034355680560478746, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.034355680560478746 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3901734104046243, + "acc_stderr": 0.026261677607806636, + "acc_norm": 0.3901734104046243, + "acc_norm_stderr": 0.026261677607806636 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.037466683254700206, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.037466683254700206 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.02640614597362566, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.02640614597362566 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32124352331606215, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.32124352331606215, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30825688073394497, + "acc_stderr": 0.019798366698367268, + "acc_norm": 0.30825688073394497, + "acc_norm_stderr": 0.019798366698367268 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.026716118380156827, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.026716118380156827 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319771, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319771 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.03761070869867479, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.03761070869867479 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.01885008469646872, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.01885008469646872 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340461, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340461 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.037709700493470194, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.037709700493470194 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.029346665094372948, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.029346665094372948 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2324022346368715, + "acc_stderr": 0.014125968754673385, + "acc_norm": 0.2324022346368715, + "acc_norm_stderr": 0.014125968754673385 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20955882352941177, + "acc_stderr": 0.02472311040767705, + "acc_norm": 0.20955882352941177, + "acc_norm_stderr": 0.02472311040767705 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2653061224489796, + "acc_stderr": 0.028263889943784617, + "acc_norm": 0.2653061224489796, + "acc_norm_stderr": 0.028263889943784617 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4472573839662447, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.4472573839662447, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30182529335071706, + "acc_stderr": 0.01172435051810589, + "acc_norm": 0.30182529335071706, + "acc_norm_stderr": 0.01172435051810589 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.03374499356319354, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.03374499356319354 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3072215422276622, + "mc1_stderr": 0.016150201321323002, + "mc2": 0.47008540499028884, + "mc2_stderr": 0.015171096468571796 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40968122786304606, + "acc_stderr": 0.016907568192219474, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.12", + "model_sha": "26d17aadd76e28b7226c206d1e5517b703b540fb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.14/result_2023-10-22 01:18:39.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.14/result_2023-10-22 01:18:39.json new file mode 100644 index 0000000000000000000000000000000000000000..3747e35f1a232e1b590ddb57f95746c4ff315ec3 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.14/result_2023-10-22 01:18:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3779863481228669, + "acc_stderr": 0.014169664520303096, + "acc_norm": 0.4325938566552901, + "acc_norm_stderr": 0.014478005694182531 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4039036048595897, + "acc_stderr": 0.004896757857022551, + "acc_norm": 0.5393347938657638, + "acc_norm_stderr": 0.004974316807920405 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.049224241534589326, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.049224241534589326 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4495530012771392, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.4495530012771392, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.02834504586484068, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.02834504586484068 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621502, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621502 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.03996629574876719, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.03996629574876719 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.025088301454694834, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.025088301454694834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33004926108374383, + "acc_stderr": 0.03308530426228258, + "acc_norm": 0.33004926108374383, + "acc_norm_stderr": 0.03308530426228258 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.02807158890109185, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.02807158890109185 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5854700854700855, + "acc_stderr": 0.03227396567623779, + "acc_norm": 0.5854700854700855, + "acc_norm_stderr": 0.03227396567623779 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731571, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731571 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871927, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871927 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.03528131472933607, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.03528131472933607 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463087, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463087 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.407514450867052, + "acc_stderr": 0.026454578146931498, + "acc_norm": 0.407514450867052, + "acc_norm_stderr": 0.026454578146931498 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.02743162372241502, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.02743162372241502 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.04060127035236395, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.04060127035236395 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.0193733324207245, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.0193733324207245 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534785, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534785 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.033622774366080424, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.033622774366080424 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.01428834380392531, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.01428834380392531 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682485, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682485 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3239895697522816, + "acc_stderr": 0.011952840809646563, + "acc_norm": 0.3239895697522816, + "acc_norm_stderr": 0.011952840809646563 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.01582614243950234, + "mc2": 0.44508082063982635, + "mc2_stderr": 0.014978253495446162 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40968122786304606, + "acc_stderr": 0.01690756819221947, + "acc_norm": 0.5112160566706021, + "acc_norm_stderr": 0.01718602846948929 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.14", + "model_sha": "06b824795d8f7b9efa5cbe1c3a7b21e7c939bf8b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.2/result_2023-10-23 03:57:31.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.2/result_2023-10-23 03:57:31.json new file mode 100644 index 0000000000000000000000000000000000000000..1dd92a3ca437520edcfbb09accf7f18bcfd90286 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.2/result_2023-10-23 03:57:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37627986348122866, + "acc_stderr": 0.014157022555407166, + "acc_norm": 0.4445392491467577, + "acc_norm_stderr": 0.014521226405627074 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40679147580163316, + "acc_stderr": 0.004902314055725591, + "acc_norm": 0.5413264289982075, + "acc_norm_stderr": 0.004972708369656543 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03565079670708311, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03565079670708311 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690879, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690879 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2681992337164751, + "acc_stderr": 0.01584243083526942, + "acc_norm": 0.2681992337164751, + "acc_norm_stderr": 0.01584243083526942 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.026355158413349417, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.026355158413349417 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.22289156626506024, + "acc_stderr": 0.032400048255946876, + "acc_norm": 0.22289156626506024, + "acc_norm_stderr": 0.032400048255946876 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.02679542232789394, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.02679542232789394 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2825112107623318, + "acc_stderr": 0.030216831011508755, + "acc_norm": 0.2825112107623318, + "acc_norm_stderr": 0.030216831011508755 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596917, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596917 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932026, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932026 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.03618664819936246, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.03618664819936246 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380558, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380558 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.02102067268082791, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.02102067268082791 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3548387096774194, + "acc_stderr": 0.027218889773308753, + "acc_norm": 0.3548387096774194, + "acc_norm_stderr": 0.027218889773308753 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.030882736974138653, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.030882736974138653 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2, + "acc_stderr": 0.02461829819586651, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02461829819586651 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.34328358208955223, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.34328358208955223, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.13, + "acc_stderr": 0.0337997668989631, + "acc_norm": 0.13, + "acc_norm_stderr": 0.0337997668989631 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36127167630057805, + "acc_stderr": 0.025862201852277895, + "acc_norm": 0.36127167630057805, + "acc_norm_stderr": 0.025862201852277895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900933, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900933 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.033088185944157515, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.033088185944157515 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25688073394495414, + "acc_stderr": 0.01873249292834245, + "acc_norm": 0.25688073394495414, + "acc_norm_stderr": 0.01873249292834245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.033954900208561116, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.033954900208561116 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.02625605383571896, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.02625605383571896 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.47107438016528924, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998905, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998905 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.01892608291608339, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.01892608291608339 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.029346665094372948, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.029346665094372948 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225606, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225606 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19117647058823528, + "acc_stderr": 0.02388688192244036, + "acc_norm": 0.19117647058823528, + "acc_norm_stderr": 0.02388688192244036 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.031987615467631264, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.031987615467631264 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.031052391937584353, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.031052391937584353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2790091264667536, + "acc_stderr": 0.011455208832803545, + "acc_norm": 0.2790091264667536, + "acc_norm_stderr": 0.011455208832803545 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.032566854844603886, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.032566854844603886 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.03608541011573967, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.03608541011573967 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394816, + "mc2": 0.44907946334045823, + "mc2_stderr": 0.015040408260408762 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3317591499409681, + "acc_stderr": 0.016187984642157316, + "acc_norm": 0.4203069657615112, + "acc_norm_stderr": 0.016970598281177706 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.2", + "model_sha": "116dea6c97133d0729b618bbe76cf650a92a90a8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.8/result_2023-10-20 03:33:51.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.8/result_2023-10-20 03:33:51.json new file mode 100644 index 0000000000000000000000000000000000000000..0297119a14837088f8ef2e8ef3122dd5fab9d8a0 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.8/result_2023-10-20 03:33:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38822525597269625, + "acc_stderr": 0.014241614207414046, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955262 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40689105755825533, + "acc_stderr": 0.004902502514738606, + "acc_norm": 0.5412268472415853, + "acc_norm_stderr": 0.004972790690640187 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.0352821125824523, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.0352821125824523 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.36893203883495146, + "acc_stderr": 0.04777615181156739, + "acc_norm": 0.36893203883495146, + "acc_norm_stderr": 0.04777615181156739 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2822477650063857, + "acc_stderr": 0.016095302969878548, + "acc_norm": 0.2822477650063857, + "acc_norm_stderr": 0.016095302969878548 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.24680851063829787, + "acc_stderr": 0.0281854413012341, + "acc_norm": 0.24680851063829787, + "acc_norm_stderr": 0.0281854413012341 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3247588424437299, + "acc_stderr": 0.026596782287697043, + "acc_norm": 0.3247588424437299, + "acc_norm_stderr": 0.026596782287697043 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.25112107623318386, + "acc_stderr": 0.029105220833224622, + "acc_norm": 0.25112107623318386, + "acc_norm_stderr": 0.029105220833224622 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03274287914026866, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03274287914026866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33076923076923076, + "acc_stderr": 0.023854795680971142, + "acc_norm": 0.33076923076923076, + "acc_norm_stderr": 0.023854795680971142 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694433, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694433 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.02637756702864586, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.02637756702864586 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.33760683760683763, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.33760683760683763, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04265792110940588, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04265792110940588 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.025348097468097856, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.025348097468097856 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.39303482587064675, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.39303482587064675, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.22486772486772486, + "acc_stderr": 0.02150209607822914, + "acc_norm": 0.22486772486772486, + "acc_norm_stderr": 0.02150209607822914 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.025190181327608415, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.025190181327608415 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.31790123456790126, + "acc_stderr": 0.025910063528240865, + "acc_norm": 0.31790123456790126, + "acc_norm_stderr": 0.025910063528240865 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39896373056994816, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.39896373056994816, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29357798165137616, + "acc_stderr": 0.01952515112263966, + "acc_norm": 0.29357798165137616, + "acc_norm_stderr": 0.01952515112263966 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.026992544339297226, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.026992544339297226 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.49586776859504134, + "acc_stderr": 0.045641987674327526, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.045641987674327526 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.017917974069594726, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.017917974069594726 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460994, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460994 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.15178571428571427, + "acc_stderr": 0.03405702838185695, + "acc_norm": 0.15178571428571427, + "acc_norm_stderr": 0.03405702838185695 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364545, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364545 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23161764705882354, + "acc_stderr": 0.025626533803777562, + "acc_norm": 0.23161764705882354, + "acc_norm_stderr": 0.025626533803777562 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.02944377302259469, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.02944377302259469 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501933, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501933 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624335, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624335 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.4600456246073735, + "mc2_stderr": 0.014958372484169768 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3600944510035419, + "acc_stderr": 0.01650368672044008, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1.8", + "model_sha": "424602efb3cb7b2c4e901d325113335c002a1da2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1/result_2023-10-17 12:52:13.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1/result_2023-10-17 12:52:13.json new file mode 100644 index 0000000000000000000000000000000000000000..755a504740ae9170bb3b150c61f6552c3c48cb31 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1/result_2023-10-17 12:52:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3993174061433447, + "acc_stderr": 0.014312094557946704, + "acc_norm": 0.46928327645051193, + "acc_norm_stderr": 0.014583792546304038 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4056960764787891, + "acc_stderr": 0.004900227226433389, + "acc_norm": 0.5419239195379406, + "acc_norm_stderr": 0.00497221024402057 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.046202840822800406, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.046202840822800406 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2822477650063857, + "acc_stderr": 0.016095302969878534, + "acc_norm": 0.2822477650063857, + "acc_norm_stderr": 0.016095302969878534 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.027321078417387533, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.027321078417387533 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944968, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944968 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3762057877813505, + "acc_stderr": 0.027513925683549427, + "acc_norm": 0.3762057877813505, + "acc_norm_stderr": 0.027513925683549427 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2556053811659193, + "acc_stderr": 0.029275891003969923, + "acc_norm": 0.2556053811659193, + "acc_norm_stderr": 0.029275891003969923 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.032424979581788166, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.032424979581788166 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2689655172413793, + "acc_stderr": 0.036951833116502325, + "acc_norm": 0.2689655172413793, + "acc_norm_stderr": 0.036951833116502325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.029597329730978082, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.029597329730978082 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.29743589743589743, + "acc_stderr": 0.023177408131465942, + "acc_norm": 0.29743589743589743, + "acc_norm_stderr": 0.023177408131465942 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.03178529710642749, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.03178529710642749 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3548387096774194, + "acc_stderr": 0.027218889773308757, + "acc_norm": 0.3548387096774194, + "acc_norm_stderr": 0.027218889773308757 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03088273697413865, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03088273697413865 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.25660377358490566, + "acc_stderr": 0.026880647889051996, + "acc_norm": 0.25660377358490566, + "acc_norm_stderr": 0.026880647889051996 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.417910447761194, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.417910447761194, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.021679219663693152, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.021679219663693152 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.407514450867052, + "acc_stderr": 0.0264545781469315, + "acc_norm": 0.407514450867052, + "acc_norm_stderr": 0.0264545781469315 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.02640614597362566, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.02640614597362566 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38341968911917096, + "acc_stderr": 0.03508984236295341, + "acc_norm": 0.38341968911917096, + "acc_norm_stderr": 0.03508984236295341 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3192660550458716, + "acc_stderr": 0.01998782906975, + "acc_norm": 0.3192660550458716, + "acc_norm_stderr": 0.01998782906975 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790605, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790605 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.026643278474508748, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.026643278474508748 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.03761070869867479, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.03761070869867479 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.018875682938069443, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.018875682938069443 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.030998666304560534, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.030998666304560534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.38396624472573837, + "acc_stderr": 0.03165867806410668, + "acc_norm": 0.38396624472573837, + "acc_norm_stderr": 0.03165867806410668 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30247718383311606, + "acc_stderr": 0.011731524234165706, + "acc_norm": 0.30247718383311606, + "acc_norm_stderr": 0.011731524234165706 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.03320574612945432, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.03320574612945432 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.40606060606060607, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.40606060606060607, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3292533659730722, + "mc1_stderr": 0.016451264440068246, + "mc2": 0.4905950778856991, + "mc2_stderr": 0.01526052031524314 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3305785123966942, + "acc_stderr": 0.0161734232988457, + "acc_norm": 0.4037780401416765, + "acc_norm_stderr": 0.016869031540298635 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.1", + "model_sha": "f0e5e0f218635b4dd43f0ba2b3b4cd5007967625", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.1/result_2023-10-25 14:30:21.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.1/result_2023-10-25 14:30:21.json new file mode 100644 index 0000000000000000000000000000000000000000..b0588c628d77836967d1614d9ad0f9660e7d6c8b --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.1/result_2023-10-25 14:30:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35409556313993173, + "acc_stderr": 0.013975454122756557, + "acc_norm": 0.3993174061433447, + "acc_norm_stderr": 0.014312094557946705 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4021111332403904, + "acc_stderr": 0.004893220635011784, + "acc_norm": 0.536247759410476, + "acc_norm_stderr": 0.00497665198975764 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45721583652618136, + "acc_stderr": 0.01781438523853443, + "acc_norm": 0.45721583652618136, + "acc_norm_stderr": 0.01781438523853443 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231004, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231004 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.0282908690541976, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.0282908690541976 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34080717488789236, + "acc_stderr": 0.03181149747055358, + "acc_norm": 0.34080717488789236, + "acc_norm_stderr": 0.03181149747055358 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.042607351576445594, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.042607351576445594 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.039417076320648906, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.039417076320648906 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938145, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938145 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051448, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051448 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.0281291127091659, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.0281291127091659 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051621, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051621 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.0302850092590098, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.0302850092590098 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.03525675167467974, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.03525675167467974 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149152, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149152 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.026296227915613663, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.026296227915613663 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.038566721635489125, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.038566721635489125 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160667, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47889908256880737, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.47889908256880737, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.027996723180631466, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.027996723180631466 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.019184639328092484, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.019184639328092484 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252336, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252336 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.015024083883322869, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.015024083883322869 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.02957326913441112, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.02957326913441112 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585895, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585895 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03484941514429231, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03484941514429231 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253597, + "mc2": 0.4170988801266876, + "mc2_stderr": 0.015242823678966766 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3612750885478158, + "acc_stderr": 0.016515463022412014, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.017185069732676524 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.1", + "model_sha": "37db0cf6282e151ecc013b98fda871ce486e52c3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.2/result_2023-10-26 11:07:01.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.2/result_2023-10-26 11:07:01.json new file mode 100644 index 0000000000000000000000000000000000000000..7a2cdb86aea633c86c98fcce7f82ae2344fb34de --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.2/result_2023-10-26 11:07:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3822525597269625, + "acc_stderr": 0.014200454049979274, + "acc_norm": 0.45051194539249145, + "acc_norm_stderr": 0.014539646098471625 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41575383389762993, + "acc_stderr": 0.004918442328872009, + "acc_norm": 0.5518820952001593, + "acc_norm_stderr": 0.0049628462061255 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370606, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370606 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5363984674329502, + "acc_stderr": 0.017832524079593265, + "acc_norm": 0.5363984674329502, + "acc_norm_stderr": 0.017832524079593265 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638629, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638629 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317227, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317227 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.0311669573672359, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.0311669573672359 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.0260671592222758, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.0260671592222758 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436774, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436774 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.026918645383239015, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.026918645383239015 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833925, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833925 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.02131133500970858, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.02131133500970858 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.02852638345214264, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.02852638345214264 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884124, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874141, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874141 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.019691459052354143, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.019691459052354143 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590954, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590954 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340455, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.03022522616001242, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.03022522616001242 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29720670391061454, + "acc_stderr": 0.015285313353641599, + "acc_norm": 0.29720670391061454, + "acc_norm_stderr": 0.015285313353641599 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.02747227447323382, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.02747227447323382 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897628, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897628 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.035091433756067866, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.035091433756067866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.01570210709062788, + "mc2": 0.4471086568861838, + "mc2_stderr": 0.015281241232491133 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5466351829988194, + "acc_stderr": 0.01711541822522687, + "acc_norm": 0.602125147579693, + "acc_norm_stderr": 0.016827959054733395 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.2", + "model_sha": "b8290fa1d56a9ff58d2fecf3f8edd7058eb85502", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.3/result_2023-10-28 06:04:41.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.3/result_2023-10-28 06:04:41.json new file mode 100644 index 0000000000000000000000000000000000000000..53d4e1cd8b7bec171e9033b4eb54ca74667bb45d --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.3/result_2023-10-28 06:04:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3848122866894198, + "acc_stderr": 0.014218371065251095, + "acc_norm": 0.44112627986348124, + "acc_norm_stderr": 0.014509747749064664 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4186417048396734, + "acc_stderr": 0.00492328184182851, + "acc_norm": 0.5557657837084247, + "acc_norm_stderr": 0.004958649623815337 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.524904214559387, + "acc_stderr": 0.017857770704901035, + "acc_norm": 0.524904214559387, + "acc_norm_stderr": 0.017857770704901035 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840622, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828061, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.024180497164376882, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.024180497164376882 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327228, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327228 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.03602573571288442, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.03602573571288442 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5614678899082569, + "acc_stderr": 0.02127471307395456, + "acc_norm": 0.5614678899082569, + "acc_norm_stderr": 0.02127471307395456 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.0285803410651383, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.0285803410651383 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.01952431674486635, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.01952431674486635 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320207, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320207 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.014736926383761992, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.014736926383761992 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3546284224250326, + "acc_stderr": 0.012218576439090172, + "acc_norm": 0.3546284224250326, + "acc_norm_stderr": 0.012218576439090172 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326905, + "mc2": 0.441499711570202, + "mc2_stderr": 0.015207137327045393 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.01718890435907731, + "acc_norm": 0.5596221959858324, + "acc_norm_stderr": 0.017067699774312967 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.3", + "model_sha": "3444f841fe050a22a95a166ac953f92047e2c411", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.4/result_2023-10-28 05:50:19.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.4/result_2023-10-28 05:50:19.json new file mode 100644 index 0000000000000000000000000000000000000000..057edd92004604a095f4cf37e6ddef6d94c5a5b6 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.4/result_2023-10-28 05:50:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39419795221843, + "acc_stderr": 0.01428052266746733, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955262 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4221270663214499, + "acc_stderr": 0.004928891895874289, + "acc_norm": 0.5546703843855806, + "acc_norm_stderr": 0.00495986429917813 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5044699872286079, + "acc_stderr": 0.017879248970584356, + "acc_norm": 0.5044699872286079, + "acc_norm_stderr": 0.017879248970584356 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894255, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894255 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.02478431694215636, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.02478431694215636 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280458, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280458 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684973, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684973 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507748, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507748 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463084, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463084 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.02680372058320618, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.02680372058320618 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194045, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.03602573571288442, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.03602573571288442 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214338, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617157, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.019023726160724553, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.019023726160724553 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.040073418097558065, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.040073418097558065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.0305467452649532, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.0305467452649532 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.027033041151681456, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.027033041151681456 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.03200682020163908, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.03200682020163908 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29726205997392435, + "acc_stderr": 0.01167334617308604, + "acc_norm": 0.29726205997392435, + "acc_norm_stderr": 0.01167334617308604 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431855, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431855 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155062, + "mc2": 0.47405944536561195, + "mc2_stderr": 0.015299344788205708 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5206611570247934, + "acc_stderr": 0.01717567127983645, + "acc_norm": 0.5726092089728453, + "acc_norm_stderr": 0.017008129844823156 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.4", + "model_sha": "80bc09f73579e2bc021f084424f761f81e4f808d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.6/result_2023-10-30 01:00:17.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.6/result_2023-10-30 01:00:17.json new file mode 100644 index 0000000000000000000000000000000000000000..bde0c51f9ed95c2a24d52cb46eb39e5682f0c0b6 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.6/result_2023-10-30 01:00:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39761092150170646, + "acc_stderr": 0.014301752223279545, + "acc_norm": 0.4641638225255973, + "acc_norm_stderr": 0.01457381366473572 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4228241386178052, + "acc_stderr": 0.00492998369279506, + "acc_norm": 0.569308902609042, + "acc_norm_stderr": 0.004941609820763588 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5593869731800766, + "acc_stderr": 0.017753396973908497, + "acc_norm": 0.5593869731800766, + "acc_norm_stderr": 0.017753396973908497 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.03240038086792747, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.03240038086792747 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115007, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162933, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162933 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.031660988918880785, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.031660988918880785 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.0478200179138006, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.0478200179138006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844072, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.03587014986075659, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.03587014986075659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5761467889908257, + "acc_stderr": 0.021187263209087516, + "acc_norm": 0.5761467889908257, + "acc_norm_stderr": 0.021187263209087516 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534792, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534792 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.03197694118713671, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.03197694118713671 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34876140808344197, + "acc_stderr": 0.012172035157127115, + "acc_norm": 0.34876140808344197, + "acc_norm_stderr": 0.012172035157127115 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630573, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630573 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485083, + "mc2": 0.419680831083004, + "mc2_stderr": 0.014665587466952046 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4899645808736718, + "acc_stderr": 0.017186891286894053, + "acc_norm": 0.5832349468713105, + "acc_norm_stderr": 0.016950489146108815 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.6", + "model_sha": "951581c0603be594b823e8df208a4e6e307d413f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.8/result_2023-11-01 00:45:56.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.8/result_2023-11-01 00:45:56.json new file mode 100644 index 0000000000000000000000000000000000000000..5f83e3a8aab0ec443586c3f255ec0e3623b47fac --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.8/result_2023-11-01 00:45:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4035836177474403, + "acc_stderr": 0.014337158914268434, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955264 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42561242780322645, + "acc_stderr": 0.004934250390879783, + "acc_norm": 0.5671181039633539, + "acc_norm_stderr": 0.004944620712318275 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5568326947637292, + "acc_stderr": 0.017764085035348418, + "acc_norm": 0.5568326947637292, + "acc_norm_stderr": 0.017764085035348418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115476, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115476 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.03618664819936246, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.03618664819936246 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502744, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502744 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.03193705726200293, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.03193705726200293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.03047144586718324, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.03047144586718324 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092056, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092056 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523857, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523857 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748139, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748139 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5798165137614679, + "acc_stderr": 0.021162420048273508, + "acc_norm": 0.5798165137614679, + "acc_norm_stderr": 0.021162420048273508 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423556, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423556 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874142, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874142 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.01978046595477752, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.01978046595477752 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347247, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347247 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.032007041833595914, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.032007041833595914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35723598435462844, + "acc_stderr": 0.012238615750316494, + "acc_norm": 0.35723598435462844, + "acc_norm_stderr": 0.012238615750316494 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768545, + "mc2": 0.45429869166378484, + "mc2_stderr": 0.01501407718716332 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45808736717827625, + "acc_stderr": 0.017129852117911147, + "acc_norm": 0.5974025974025974, + "acc_norm_stderr": 0.016861020486407762 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4.2.8", + "model_sha": "6608bbae2d57a5056ba8d5c82a7ecf8be4a640d6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v4/result_2023-10-13 21:03:52.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4/result_2023-10-13 21:03:52.json new file mode 100644 index 0000000000000000000000000000000000000000..bbf69898a3feaea19bb3ab5f1506bb518009a00c --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v4/result_2023-10-13 21:03:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3720136518771331, + "acc_stderr": 0.014124597881844466, + "acc_norm": 0.4402730375426621, + "acc_norm_stderr": 0.014506769524804243 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40938060147381, + "acc_stderr": 0.004907146229347555, + "acc_norm": 0.5426209918342959, + "acc_norm_stderr": 0.004971619995879755 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.047504583990416925, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.047504583990416925 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4891443167305236, + "acc_stderr": 0.017875748840242418, + "acc_norm": 0.4891443167305236, + "acc_norm_stderr": 0.017875748840242418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357783, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357783 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40836012861736337, + "acc_stderr": 0.027917050748484634, + "acc_norm": 0.40836012861736337, + "acc_norm_stderr": 0.027917050748484634 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.0497569851956243, + "acc_norm": 0.43, + "acc_norm_stderr": 0.0497569851956243 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31092436974789917, + "acc_stderr": 0.030066761582977924, + "acc_norm": 0.31092436974789917, + "acc_norm_stderr": 0.030066761582977924 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.024283140529467284, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.024283140529467284 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3774193548387097, + "acc_stderr": 0.027575960723278243, + "acc_norm": 0.3774193548387097, + "acc_norm_stderr": 0.027575960723278243 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051622, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051622 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3433962264150943, + "acc_stderr": 0.029224526469124792, + "acc_norm": 0.3433962264150943, + "acc_norm_stderr": 0.029224526469124792 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473836, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473836 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48258706467661694, + "acc_stderr": 0.03533389234739244, + "acc_norm": 0.48258706467661694, + "acc_norm_stderr": 0.03533389234739244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736411, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736411 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02141168439369419, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02141168439369419 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.026424816594009845, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.026424816594009845 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4104938271604938, + "acc_stderr": 0.027371350925124764, + "acc_norm": 0.4104938271604938, + "acc_norm_stderr": 0.027371350925124764 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.03561587327685883, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.03561587327685883 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3669724770642202, + "acc_stderr": 0.020664675659520532, + "acc_norm": 0.3669724770642202, + "acc_norm_stderr": 0.020664675659520532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.02742047766262923, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.02742047766262923 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223977, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223977 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.027696910713093936, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.027696910713093936 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23161764705882354, + "acc_stderr": 0.025626533803777565, + "acc_norm": 0.23161764705882354, + "acc_norm_stderr": 0.025626533803777565 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.31020408163265306, + "acc_stderr": 0.029613459872484385, + "acc_norm": 0.31020408163265306, + "acc_norm_stderr": 0.029613459872484385 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.46835443037974683, + "acc_stderr": 0.03248197400511075, + "acc_norm": 0.46835443037974683, + "acc_norm_stderr": 0.03248197400511075 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28226857887874834, + "acc_stderr": 0.011495852176241954, + "acc_norm": 0.28226857887874834, + "acc_norm_stderr": 0.011495852176241954 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589678, + "mc2": 0.4528465622549083, + "mc2_stderr": 0.015125783674090152 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3707201889020071, + "acc_stderr": 0.01660580128921261, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.017185069732676538 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v4", + "model_sha": "26ef51b65661f5762efa36aadf56a7c3820e6762", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.1/result_2023-11-28 10:51:14.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.1/result_2023-11-28 10:51:14.json new file mode 100644 index 0000000000000000000000000000000000000000..0444e5ab71e633884a47669ef84c17c5e160aae6 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.1/result_2023-11-28 10:51:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40017064846416384, + "acc_stderr": 0.014317197787809167, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.014575583922019669 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4239195379406493, + "acc_stderr": 0.004931679059919374, + "acc_norm": 0.5702051384186417, + "acc_norm_stderr": 0.004940349676769318 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.017769250583533243, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.017769250583533243 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.038444531817709175, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.038444531817709175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415866, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028337, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028337 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969481, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969481 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111502, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111502 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.02683080599895223, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.02683080599895223 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5, + "acc_stderr": 0.02782074420373286, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02782074420373286 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5798165137614679, + "acc_stderr": 0.021162420048273504, + "acc_norm": 0.5798165137614679, + "acc_norm_stderr": 0.021162420048273504 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.01989841271763588, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.01989841271763588 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.02878222756134725, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.02878222756134725 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36114732724902215, + "acc_stderr": 0.01226793547751903, + "acc_norm": 0.36114732724902215, + "acc_norm_stderr": 0.01226793547751903 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431855, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431855 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.3982113417748825, + "mc2_stderr": 0.014654617642929734 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4498229043683589, + "acc_stderr": 0.01710357334382571, + "acc_norm": 0.5242030696576151, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v7.1", + "model_sha": "b267bfc53b065e35c632ab140268615abc3e9fbf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.2/result_2023-11-28 11:08:56.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.2/result_2023-11-28 11:08:56.json new file mode 100644 index 0000000000000000000000000000000000000000..0a422b8c48ca8d6e5a67cade527f376cd23e683e --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.2/result_2023-11-28 11:08:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40784982935153585, + "acc_stderr": 0.014361097288449714, + "acc_norm": 0.47696245733788395, + "acc_norm_stderr": 0.014595873205358273 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4289982075283808, + "acc_stderr": 0.0049392156821917695, + "acc_norm": 0.5819557857000598, + "acc_norm_stderr": 0.004922294797766663 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5568326947637292, + "acc_stderr": 0.017764085035348418, + "acc_norm": 0.5568326947637292, + "acc_norm_stderr": 0.017764085035348418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.034889016168527305, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.034889016168527305 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.02515826601686855, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.02515826601686855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051449, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051449 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962956, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.030197611600197953, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.030197611600197953 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.0236369759961018, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.0236369759961018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723369, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723369 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.03606065001832919, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.03606065001832919 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5669724770642202, + "acc_stderr": 0.021244146569074352, + "acc_norm": 0.5669724770642202, + "acc_norm_stderr": 0.021244146569074352 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510467998, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510467998 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.020017629214213104, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.020017629214213104 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.029097675599463926, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.029097675599463926 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312549, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312549 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.0314147080258659, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.0314147080258659 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.031376240725616185, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.031376240725616185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35984354628422427, + "acc_stderr": 0.012258260483689797, + "acc_norm": 0.35984354628422427, + "acc_norm_stderr": 0.012258260483689797 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.014896277441041864, + "mc2": 0.3783182251954889, + "mc2_stderr": 0.01466893499051619 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42621015348288077, + "acc_stderr": 0.017002122609489252, + "acc_norm": 0.4911452184179457, + "acc_norm_stderr": 0.01718765819933674 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v7.2", + "model_sha": "5f5876fd03a477fadca9ac16760a5a1ec8d0acb3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.3/result_2023-11-28 11:18:44.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.3/result_2023-11-28 11:18:44.json new file mode 100644 index 0000000000000000000000000000000000000000..874b52d1e477e1f64ad2ec8434a84adf47eaf777 --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v7.3/result_2023-11-28 11:18:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2901023890784983, + "acc_stderr": 0.013261573677520774, + "acc_norm": 0.36177474402730375, + "acc_norm_stderr": 0.014041957945038075 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3867755427205736, + "acc_stderr": 0.004860162076330979, + "acc_norm": 0.5024895439155547, + "acc_norm_stderr": 0.004989719559439899 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.0356507967070831, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.0356507967070831 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.017570705239256544, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.017570705239256544 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785139, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785139 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288086, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288086 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3890675241157556, + "acc_stderr": 0.027690337536485372, + "acc_norm": 0.3890675241157556, + "acc_norm_stderr": 0.027690337536485372 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677698, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677698 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3686868686868687, + "acc_stderr": 0.034373055019806184, + "acc_norm": 0.3686868686868687, + "acc_norm_stderr": 0.034373055019806184 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.03077805742293167, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.03077805742293167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37948717948717947, + "acc_stderr": 0.024603626924097417, + "acc_norm": 0.37948717948717947, + "acc_norm_stderr": 0.024603626924097417 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38064516129032255, + "acc_stderr": 0.02762171783290704, + "acc_norm": 0.38064516129032255, + "acc_norm_stderr": 0.02762171783290704 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.26495726495726496, + "acc_stderr": 0.028911208802749493, + "acc_norm": 0.26495726495726496, + "acc_norm_stderr": 0.028911208802749493 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3660377358490566, + "acc_stderr": 0.02964781353936524, + "acc_norm": 0.3660377358490566, + "acc_norm_stderr": 0.02964781353936524 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724136, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724136 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3482587064676617, + "acc_stderr": 0.03368787466115459, + "acc_norm": 0.3482587064676617, + "acc_norm_stderr": 0.03368787466115459 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02256989707491841, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02256989707491841 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2861271676300578, + "acc_stderr": 0.02433214677913413, + "acc_norm": 0.2861271676300578, + "acc_norm_stderr": 0.02433214677913413 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2993827160493827, + "acc_stderr": 0.025483115601195462, + "acc_norm": 0.2993827160493827, + "acc_norm_stderr": 0.025483115601195462 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3853211009174312, + "acc_stderr": 0.02086585085279413, + "acc_norm": 0.3853211009174312, + "acc_norm_stderr": 0.02086585085279413 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.026568921015457155, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.026568921015457155 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.30578512396694213, + "acc_stderr": 0.042059539338841254, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.042059539338841254 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2236842105263158, + "acc_stderr": 0.033911609343436025, + "acc_norm": 0.2236842105263158, + "acc_norm_stderr": 0.033911609343436025 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.01843342764940189, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.01843342764940189 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952686, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952686 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647207, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647207 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.014551553659369922, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.014551553659369922 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.028245687391462913, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.028245687391462913 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3510204081632653, + "acc_stderr": 0.03055531675557364, + "acc_norm": 0.3510204081632653, + "acc_norm_stderr": 0.03055531675557364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.34177215189873417, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.34177215189873417, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26401564537157757, + "acc_stderr": 0.011258435537723837, + "acc_norm": 0.26401564537157757, + "acc_norm_stderr": 0.011258435537723837 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456411, + "mc2": 0.38387267624394456, + "mc2_stderr": 0.014963849444289972 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3565525383707202, + "acc_stderr": 0.016467706981527455, + "acc_norm": 0.45690672963400236, + "acc_norm_stderr": 0.017126389093086784 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v7.3", + "model_sha": "2202330599ea1a5a07ee9a472a27b7e22cba0cf1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/LDCC-Instruct-Llama-2-ko-13B-v7/result_2023-10-17 07:32:46.json b/krevas/LDCC-Instruct-Llama-2-ko-13B-v7/result_2023-10-17 07:32:46.json new file mode 100644 index 0000000000000000000000000000000000000000..066c07fcf00ba4168ac5e0aaac05eee716cd4bed --- /dev/null +++ b/krevas/LDCC-Instruct-Llama-2-ko-13B-v7/result_2023-10-17 07:32:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38139931740614336, + "acc_stderr": 0.014194389086685251, + "acc_norm": 0.45307167235494883, + "acc_norm_stderr": 0.014546892052005628 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41057558255327625, + "acc_stderr": 0.004909328992915067, + "acc_norm": 0.5488946425014938, + "acc_norm_stderr": 0.004965866098318175 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.046202840822800406, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.046202840822800406 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.31417624521072796, + "acc_stderr": 0.016599291735884893, + "acc_norm": 0.31417624521072796, + "acc_norm_stderr": 0.016599291735884893 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996793, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996793 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02850485647051419, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02850485647051419 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3954983922829582, + "acc_stderr": 0.027770918531427834, + "acc_norm": 0.3954983922829582, + "acc_norm_stderr": 0.027770918531427834 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.29596412556053814, + "acc_stderr": 0.03063659134869982, + "acc_norm": 0.29596412556053814, + "acc_norm_stderr": 0.03063659134869982 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.03318477333845332, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.03318477333845332 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.037528339580033376, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.037528339580033376 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.09803921568627451, + "acc_stderr": 0.029589188531613252, + "acc_norm": 0.09803921568627451, + "acc_norm_stderr": 0.029589188531613252 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.02971914287634286, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.02971914287634286 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2743589743589744, + "acc_stderr": 0.022622765767493214, + "acc_norm": 0.2743589743589744, + "acc_norm_stderr": 0.022622765767493214 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.03178529710642748, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.03178529710642748 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34838709677419355, + "acc_stderr": 0.027104826328100944, + "acc_norm": 0.34838709677419355, + "acc_norm_stderr": 0.027104826328100944 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.37606837606837606, + "acc_stderr": 0.031733936329694824, + "acc_norm": 0.37606837606837606, + "acc_norm_stderr": 0.031733936329694824 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.027495663683724046, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.027495663683724046 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844065, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.373134328358209, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.373134328358209, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267438, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267438 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.12, + "acc_stderr": 0.03265986323710906, + "acc_norm": 0.12, + "acc_norm_stderr": 0.03265986323710906 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.0360251131880677, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.0360251131880677 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35802469135802467, + "acc_stderr": 0.0266756119260371, + "acc_norm": 0.35802469135802467, + "acc_norm_stderr": 0.0266756119260371 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.03423465100104281, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.03423465100104281 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3174311926605505, + "acc_stderr": 0.019957152198460504, + "acc_norm": 0.3174311926605505, + "acc_norm_stderr": 0.019957152198460504 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848876, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848876 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.02656892101545716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.02656892101545716 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4380165289256198, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351586, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351586 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.01877168389352817, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.01877168389352817 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509317, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.028765111718046944, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.028765111718046944 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02518778666022727, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02518778666022727 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.43037974683544306, + "acc_stderr": 0.032230171959375976, + "acc_norm": 0.43037974683544306, + "acc_norm_stderr": 0.032230171959375976 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29791395045632335, + "acc_stderr": 0.011680717340400057, + "acc_norm": 0.29791395045632335, + "acc_norm_stderr": 0.011680717340400057 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.033321399446680854, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.033321399446680854 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627884, + "mc2": 0.43717065836326097, + "mc2_stderr": 0.014982579691917674 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32585596221959856, + "acc_stderr": 0.016114023894800322, + "acc_norm": 0.3754427390791027, + "acc_norm_stderr": 0.016648411589511088 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/LDCC-Instruct-Llama-2-ko-13B-v7", + "model_sha": "c0836cce043af8ee88da9cb52b2032d3fa8c5ddd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/SOLAR-10.7B/result_2024-03-26 01:47:39.json b/krevas/SOLAR-10.7B/result_2024-03-26 01:47:39.json new file mode 100644 index 0000000000000000000000000000000000000000..3c95f7c21fade5a3640191cb7fb88d51fe8b2f28 --- /dev/null +++ b/krevas/SOLAR-10.7B/result_2024-03-26 01:47:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6928327645051194, + "acc_stderr": 0.013481034054980943, + "acc_norm": 0.726962457337884, + "acc_norm_stderr": 0.013019332762635737 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5427205735909182, + "acc_stderr": 0.0049715348743899586, + "acc_norm": 0.7078271260705039, + "acc_norm_stderr": 0.004538319464111951 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7192982456140351, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.7192982456140351, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7215836526181354, + "acc_stderr": 0.016028295188992448, + "acc_norm": 0.7215836526181354, + "acc_norm_stderr": 0.016028295188992448 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5148936170212766, + "acc_stderr": 0.03267151848924776, + "acc_norm": 0.5148936170212766, + "acc_norm_stderr": 0.03267151848924776 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6237942122186495, + "acc_stderr": 0.027513925683549427, + "acc_norm": 0.6237942122186495, + "acc_norm_stderr": 0.027513925683549427 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6547085201793722, + "acc_stderr": 0.03191100192835795, + "acc_norm": 0.6547085201793722, + "acc_norm_stderr": 0.03191100192835795 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.029620227874790465, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.029620227874790465 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6050420168067226, + "acc_stderr": 0.03175367846096625, + "acc_norm": 0.6050420168067226, + "acc_norm_stderr": 0.03175367846096625 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.617948717948718, + "acc_stderr": 0.024635549163908237, + "acc_norm": 0.617948717948718, + "acc_norm_stderr": 0.024635549163908237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.47783251231527096, + "acc_stderr": 0.03514528562175008, + "acc_norm": 0.47783251231527096, + "acc_norm_stderr": 0.03514528562175008 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.635483870967742, + "acc_stderr": 0.02737987122994324, + "acc_norm": 0.635483870967742, + "acc_norm_stderr": 0.02737987122994324 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8290598290598291, + "acc_stderr": 0.0246624968452098, + "acc_norm": 0.8290598290598291, + "acc_norm_stderr": 0.0246624968452098 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5886792452830188, + "acc_stderr": 0.03028500925900979, + "acc_norm": 0.5886792452830188, + "acc_norm_stderr": 0.03028500925900979 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6636363636363637, + "acc_stderr": 0.04525393596302505, + "acc_norm": 0.6636363636363637, + "acc_norm_stderr": 0.04525393596302505 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.0291857149498574, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.0291857149498574 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.030965903123573037, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.030965903123573037 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4497354497354497, + "acc_stderr": 0.02562085704293665, + "acc_norm": 0.4497354497354497, + "acc_norm_stderr": 0.02562085704293665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6319444444444444, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.6319444444444444, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6011560693641619, + "acc_stderr": 0.02636243757454654, + "acc_norm": 0.6011560693641619, + "acc_norm_stderr": 0.02636243757454654 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6134969325153374, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.6134969325153374, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6944444444444444, + "acc_stderr": 0.02563082497562135, + "acc_norm": 0.6944444444444444, + "acc_norm_stderr": 0.02563082497562135 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7467889908256881, + "acc_stderr": 0.01864407304137505, + "acc_norm": 0.7467889908256881, + "acc_norm_stderr": 0.01864407304137505 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6143790849673203, + "acc_stderr": 0.027870745278290286, + "acc_norm": 0.6143790849673203, + "acc_norm_stderr": 0.027870745278290286 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6776315789473685, + "acc_stderr": 0.03803510248351585, + "acc_norm": 0.6776315789473685, + "acc_norm_stderr": 0.03803510248351585 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5800653594771242, + "acc_stderr": 0.019966811178256477, + "acc_norm": 0.5800653594771242, + "acc_norm_stderr": 0.019966811178256477 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.02946218923337059, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.02946218923337059 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.034086558679777494, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.034086558679777494 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.4145251396648045, + "acc_stderr": 0.016476342210254003, + "acc_norm": 0.4145251396648045, + "acc_norm_stderr": 0.016476342210254003 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03035969707904611, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03035969707904611 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.710204081632653, + "acc_stderr": 0.029043088683304345, + "acc_norm": 0.710204081632653, + "acc_norm_stderr": 0.029043088683304345 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.02798569938703642, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.02798569938703642 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4491525423728814, + "acc_stderr": 0.012704030518851476, + "acc_norm": 0.4491525423728814, + "acc_norm_stderr": 0.012704030518851476 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.03283472056108561, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.03283472056108561 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6474908200734394, + "mc1_stderr": 0.01672464638075655, + "mc2": 0.7638029447692003, + "mc2_stderr": 0.013876730332832427 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4935064935064935, + "acc_stderr": 0.01718890435907731, + "acc_norm": 0.5194805194805194, + "acc_norm_stderr": 0.01717730199234255 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/SOLAR-10.7B", + "model_sha": "020b4047a95b0c785085c5dd487bcb3fb06b670c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/krevas/SOLAR-10.7B/result_2024-03-26 14:59:50.json b/krevas/SOLAR-10.7B/result_2024-03-26 14:59:50.json new file mode 100644 index 0000000000000000000000000000000000000000..7c42b085e90cb0221db9b833cb3ba2c5f6d71d3a --- /dev/null +++ b/krevas/SOLAR-10.7B/result_2024-03-26 14:59:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7244027303754266, + "acc_stderr": 0.013057169655761843, + "acc_norm": 0.7704778156996587, + "acc_norm_stderr": 0.012288926760890797 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6589324835690101, + "acc_stderr": 0.004730991357194308, + "acc_norm": 0.7934674367655845, + "acc_norm_stderr": 0.00403989742368948 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7281553398058253, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.7281553398058253, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.01685739124747254, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.01685739124747254 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890593, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890593 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.027466610213140095, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.027466610213140095 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6502242152466368, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.6502242152466368, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7474747474747475, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.7474747474747475, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207763, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207763 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6256410256410256, + "acc_stderr": 0.024537591572830496, + "acc_norm": 0.6256410256410256, + "acc_norm_stderr": 0.024537591572830496 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5903225806451613, + "acc_stderr": 0.027976054915347364, + "acc_norm": 0.5903225806451613, + "acc_norm_stderr": 0.027976054915347364 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8333333333333334, + "acc_stderr": 0.024414947304543688, + "acc_norm": 0.8333333333333334, + "acc_norm_stderr": 0.024414947304543688 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5509433962264151, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.5509433962264151, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.02925290592725198, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.02925290592725198 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.032200241045342054, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.032200241045342054 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.455026455026455, + "acc_stderr": 0.025646928361049398, + "acc_norm": 0.455026455026455, + "acc_norm_stderr": 0.025646928361049398 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.73, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.026636539741116076, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.026636539741116076 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6134969325153374, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.6134969325153374, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6419753086419753, + "acc_stderr": 0.02667561192603711, + "acc_norm": 0.6419753086419753, + "acc_norm_stderr": 0.02667561192603711 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7247706422018348, + "acc_stderr": 0.019149093743155196, + "acc_norm": 0.7247706422018348, + "acc_norm_stderr": 0.019149093743155196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5620915032679739, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.5620915032679739, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5620915032679739, + "acc_stderr": 0.02007125788688652, + "acc_norm": 0.5620915032679739, + "acc_norm_stderr": 0.02007125788688652 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4148936170212766, + "acc_stderr": 0.029392236584612503, + "acc_norm": 0.4148936170212766, + "acc_norm_stderr": 0.029392236584612503 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.311731843575419, + "acc_stderr": 0.015491756531894638, + "acc_norm": 0.311731843575419, + "acc_norm_stderr": 0.015491756531894638 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.49264705882352944, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.49264705882352944, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.673469387755102, + "acc_stderr": 0.030021056238440334, + "acc_norm": 0.673469387755102, + "acc_norm_stderr": 0.030021056238440334 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7172995780590717, + "acc_stderr": 0.02931281415395593, + "acc_norm": 0.7172995780590717, + "acc_norm_stderr": 0.02931281415395593 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4517601043024772, + "acc_stderr": 0.012710662233660247, + "acc_norm": 0.4517601043024772, + "acc_norm_stderr": 0.012710662233660247 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.033086111132364364, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.033086111132364364 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7478580171358629, + "mc1_stderr": 0.015201522246299976, + "mc2": 0.8329759969455349, + "mc2_stderr": 0.012437365324018988 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5277449822904369, + "acc_stderr": 0.017163867979456016, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.01712282914329265 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "krevas/SOLAR-10.7B", + "model_sha": "9c85e654ecc292f4491e332f7f25e6870f166f1e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kurugai/Kurugai-EEVE-v1.0/result_2024-03-03 13:24:15.json b/kurugai/Kurugai-EEVE-v1.0/result_2024-03-03 13:24:15.json new file mode 100644 index 0000000000000000000000000000000000000000..19bdad9c81d971bc716d8aab2edbb6704dedf963 --- /dev/null +++ b/kurugai/Kurugai-EEVE-v1.0/result_2024-03-03 13:24:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.431740614334471, + "acc_stderr": 0.014474591427196204, + "acc_norm": 0.49658703071672355, + "acc_norm_stderr": 0.014611050403244081 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4425413264289982, + "acc_stderr": 0.004956724392646536, + "acc_norm": 0.5891256721768572, + "acc_norm_stderr": 0.0049098700063888384 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6140350877192983, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.6140350877192983, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6819923371647509, + "acc_stderr": 0.016653486275615408, + "acc_norm": 0.6819923371647509, + "acc_norm_stderr": 0.016653486275615408 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5819935691318328, + "acc_stderr": 0.028013651891995072, + "acc_norm": 0.5819935691318328, + "acc_norm_stderr": 0.028013651891995072 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.042764865428145914, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.042764865428145914 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.032087795587867514, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.032087795587867514 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.03163145807552378, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.03163145807552378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5512820512820513, + "acc_stderr": 0.02521731518484649, + "acc_norm": 0.5512820512820513, + "acc_norm_stderr": 0.02521731518484649 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5870967741935483, + "acc_stderr": 0.028009138125400387, + "acc_norm": 0.5870967741935483, + "acc_norm_stderr": 0.028009138125400387 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.02685345037700915, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.02685345037700915 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5811320754716981, + "acc_stderr": 0.03036505082911521, + "acc_norm": 0.5811320754716981, + "acc_norm_stderr": 0.03036505082911521 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731573, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731573 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857403, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857403 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.03983798306659808, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.03983798306659808 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.02479606060269995, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.02479606060269995 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5578034682080925, + "acc_stderr": 0.026738603643807403, + "acc_norm": 0.5578034682080925, + "acc_norm_stderr": 0.026738603643807403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.558641975308642, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.558641975308642, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7098445595854922, + "acc_stderr": 0.03275264467791516, + "acc_norm": 0.7098445595854922, + "acc_norm_stderr": 0.03275264467791516 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6844036697247706, + "acc_stderr": 0.019926117513869666, + "acc_norm": 0.6844036697247706, + "acc_norm_stderr": 0.019926117513869666 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5816993464052288, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.5816993464052288, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.040260970832965634, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.040260970832965634 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4624183006535948, + "acc_stderr": 0.02017061497496977, + "acc_norm": 0.4624183006535948, + "acc_norm_stderr": 0.02017061497496977 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.045723723587374296, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.045723723587374296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2324022346368715, + "acc_stderr": 0.014125968754673384, + "acc_norm": 0.2324022346368715, + "acc_norm_stderr": 0.014125968754673384 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.030254372573976722, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.030254372573976722 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7341772151898734, + "acc_stderr": 0.028756799629658335, + "acc_norm": 0.7341772151898734, + "acc_norm_stderr": 0.028756799629658335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3878748370273794, + "acc_stderr": 0.012444998309675624, + "acc_norm": 0.3878748370273794, + "acc_norm_stderr": 0.012444998309675624 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.34394124847001223, + "mc1_stderr": 0.016629087514276754, + "mc2": 0.5086814733236086, + "mc2_stderr": 0.01602142587508382 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5820543093270366, + "acc_stderr": 0.016957292005279703, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.016689333596980094 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kurugai/Kurugai-EEVE-v1.0", + "model_sha": "a18620d32f162086aa2b45d79c792d729bc3601a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kurugai/Kurugai-EEVE-v1.1/result_2024-03-10 14:28:31.json b/kurugai/Kurugai-EEVE-v1.1/result_2024-03-10 14:28:31.json new file mode 100644 index 0000000000000000000000000000000000000000..66191132f969dc87a5068ebe265ee15aa1e5a92c --- /dev/null +++ b/kurugai/Kurugai-EEVE-v1.1/result_2024-03-10 14:28:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.431740614334471, + "acc_stderr": 0.014474591427196204, + "acc_norm": 0.49658703071672355, + "acc_norm_stderr": 0.014611050403244081 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4425413264289982, + "acc_stderr": 0.004956724392646536, + "acc_norm": 0.5891256721768572, + "acc_norm_stderr": 0.0049098700063888384 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6140350877192983, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.6140350877192983, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6819923371647509, + "acc_stderr": 0.016653486275615408, + "acc_norm": 0.6819923371647509, + "acc_norm_stderr": 0.016653486275615408 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5819935691318328, + "acc_stderr": 0.028013651891995072, + "acc_norm": 0.5819935691318328, + "acc_norm_stderr": 0.028013651891995072 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.042764865428145914, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.042764865428145914 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.032087795587867514, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.032087795587867514 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.03163145807552378, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.03163145807552378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5512820512820513, + "acc_stderr": 0.02521731518484649, + "acc_norm": 0.5512820512820513, + "acc_norm_stderr": 0.02521731518484649 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5870967741935483, + "acc_stderr": 0.028009138125400387, + "acc_norm": 0.5870967741935483, + "acc_norm_stderr": 0.028009138125400387 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.02685345037700915, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.02685345037700915 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5811320754716981, + "acc_stderr": 0.03036505082911521, + "acc_norm": 0.5811320754716981, + "acc_norm_stderr": 0.03036505082911521 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731573, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731573 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857403, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857403 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.03983798306659808, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.03983798306659808 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.02479606060269995, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.02479606060269995 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5578034682080925, + "acc_stderr": 0.026738603643807403, + "acc_norm": 0.5578034682080925, + "acc_norm_stderr": 0.026738603643807403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.558641975308642, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.558641975308642, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7098445595854922, + "acc_stderr": 0.03275264467791516, + "acc_norm": 0.7098445595854922, + "acc_norm_stderr": 0.03275264467791516 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6844036697247706, + "acc_stderr": 0.019926117513869666, + "acc_norm": 0.6844036697247706, + "acc_norm_stderr": 0.019926117513869666 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5816993464052288, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.5816993464052288, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.040260970832965634, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.040260970832965634 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4624183006535948, + "acc_stderr": 0.02017061497496977, + "acc_norm": 0.4624183006535948, + "acc_norm_stderr": 0.02017061497496977 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.045723723587374296, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.045723723587374296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2324022346368715, + "acc_stderr": 0.014125968754673384, + "acc_norm": 0.2324022346368715, + "acc_norm_stderr": 0.014125968754673384 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.030254372573976722, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.030254372573976722 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7341772151898734, + "acc_stderr": 0.028756799629658335, + "acc_norm": 0.7341772151898734, + "acc_norm_stderr": 0.028756799629658335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3878748370273794, + "acc_stderr": 0.012444998309675624, + "acc_norm": 0.3878748370273794, + "acc_norm_stderr": 0.012444998309675624 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.34394124847001223, + "mc1_stderr": 0.016629087514276754, + "mc2": 0.5086814733236086, + "mc2_stderr": 0.01602142587508382 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5820543093270366, + "acc_stderr": 0.016957292005279703, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.016689333596980094 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kurugai/Kurugai-EEVE-v1.1", + "model_sha": "50f8c6607aec63ef9b6d0aac7ffe9fd91d94b231", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/CoT-llama-2k-7b/result_2023-09-27 16:26:44.json b/kyujinpy/CoT-llama-2k-7b/result_2023-09-27 16:26:44.json new file mode 100644 index 0000000000000000000000000000000000000000..b8b7e4919429e6627141ed1d7b4749b3a921de26 --- /dev/null +++ b/kyujinpy/CoT-llama-2k-7b/result_2023-09-27 16:26:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3174061433447099, + "acc_stderr": 0.01360223908803817, + "acc_norm": 0.3677474402730375, + "acc_norm_stderr": 0.014090995618168484 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3822943636725752, + "acc_stderr": 0.004849547819134473, + "acc_norm": 0.4938259310894244, + "acc_norm_stderr": 0.00498940098472222 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3486590038314176, + "acc_stderr": 0.017041243143490946, + "acc_norm": 0.3486590038314176, + "acc_norm_stderr": 0.017041243143490946 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386705, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386705 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.34726688102893893, + "acc_stderr": 0.027040745502307336, + "acc_norm": 0.34726688102893893, + "acc_norm_stderr": 0.027040745502307336 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3787878787878788, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.3787878787878788, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.03878352372138622, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.03878352372138622 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.27310924369747897, + "acc_stderr": 0.028942004040998167, + "acc_norm": 0.27310924369747897, + "acc_norm_stderr": 0.028942004040998167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.25384615384615383, + "acc_stderr": 0.022066054378726257, + "acc_norm": 0.25384615384615383, + "acc_norm_stderr": 0.022066054378726257 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233484, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233484 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.026148685930671746, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.026148685930671746 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3169811320754717, + "acc_stderr": 0.028637235639800935, + "acc_norm": 0.3169811320754717, + "acc_norm_stderr": 0.028637235639800935 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.32338308457711445, + "acc_stderr": 0.03307615947979033, + "acc_norm": 0.32338308457711445, + "acc_norm_stderr": 0.03307615947979033 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918417, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.18055555555555555, + "acc_stderr": 0.032166008088022675, + "acc_norm": 0.18055555555555555, + "acc_norm_stderr": 0.032166008088022675 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3265895953757225, + "acc_stderr": 0.02524826477424284, + "acc_norm": 0.3265895953757225, + "acc_norm_stderr": 0.02524826477424284 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32098765432098764, + "acc_stderr": 0.025976566010862744, + "acc_norm": 0.32098765432098764, + "acc_norm_stderr": 0.025976566010862744 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916647, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916647 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3724770642201835, + "acc_stderr": 0.020728368457638494, + "acc_norm": 0.3724770642201835, + "acc_norm_stderr": 0.020728368457638494 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011744, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.027420477662629245, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.027420477662629245 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.018120224251484577, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.018120224251484577 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.02689170942834396, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.02689170942834396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841196, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2816326530612245, + "acc_stderr": 0.02879518557429129, + "acc_norm": 0.2816326530612245, + "acc_norm_stderr": 0.02879518557429129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35864978902953587, + "acc_stderr": 0.03121956944530184, + "acc_norm": 0.35864978902953587, + "acc_norm_stderr": 0.03121956944530184 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2842242503259452, + "acc_stderr": 0.011519880596516076, + "acc_norm": 0.2842242503259452, + "acc_norm_stderr": 0.011519880596516076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03588624800091708, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03588624800091708 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.01510240479735965, + "mc2": 0.3775578914340665, + "mc2_stderr": 0.014769349915486594 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21605667060212513, + "acc_stderr": 0.014149496716043137, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.015840538932534103 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/CoT-llama-2k-7b", + "model_sha": "67fb09946bc99c9ba5f97b8675e27d217b353280", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/KO-Platypus2-13B/result_2023-10-05 18:34:43.json b/kyujinpy/KO-Platypus2-13B/result_2023-10-05 18:34:43.json new file mode 100644 index 0000000000000000000000000000000000000000..792482e69d35acf3ea82024b5b762dc3ad807866 --- /dev/null +++ b/kyujinpy/KO-Platypus2-13B/result_2023-10-05 18:34:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910471, + "acc_norm": 0.44197952218430037, + "acc_norm_stderr": 0.014512682523128345 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40818562039434375, + "acc_stderr": 0.004904933500255867, + "acc_norm": 0.5431189006174069, + "acc_norm_stderr": 0.0049711923872024465 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5261813537675607, + "acc_stderr": 0.017855434554042, + "acc_norm": 0.5261813537675607, + "acc_norm_stderr": 0.017855434554042 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489424, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489424 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4797979797979798, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.4797979797979798, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.032061837832361516, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.032061837832361516 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710852, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710852 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.03053333843046751, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.03053333843046751 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303128, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303128 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194048, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194048 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5321100917431193, + "acc_stderr": 0.021393071222680804, + "acc_norm": 0.5321100917431193, + "acc_norm_stderr": 0.021393071222680804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490437, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490437 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.019291961895066382, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.019291961895066382 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611317, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966346, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966346 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37353324641460234, + "acc_stderr": 0.012354994823515274, + "acc_norm": 0.37353324641460234, + "acc_norm_stderr": 0.012354994823515274 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006514, + "mc2": 0.44412739310048044, + "mc2_stderr": 0.015229602209106612 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.37662337662337664, + "acc_stderr": 0.01665879987405197, + "acc_norm": 0.42621015348288077, + "acc_norm_stderr": 0.01700212260948926 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/KO-Platypus2-13B", + "model_sha": "7c01146d2de47036b18a99357ea4ea80c3cfebf5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/KO-Platypus2-7B-ex/result_2023-09-27 11:27:48.json b/kyujinpy/KO-Platypus2-7B-ex/result_2023-09-27 11:27:48.json new file mode 100644 index 0000000000000000000000000000000000000000..32a44e034230999f3f8ae70f25f5360de4517aeb --- /dev/null +++ b/kyujinpy/KO-Platypus2-7B-ex/result_2023-09-27 11:27:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3302047781569966, + "acc_stderr": 0.013743085603760427, + "acc_norm": 0.39078498293515357, + "acc_norm_stderr": 0.014258563880513778 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3868751244771958, + "acc_stderr": 0.004860393011974673, + "acc_norm": 0.5085640310695081, + "acc_norm_stderr": 0.004989049430391292 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.045821241601615506, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.045821241601615506 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.38697318007662834, + "acc_stderr": 0.017417138059440146, + "acc_norm": 0.38697318007662834, + "acc_norm_stderr": 0.017417138059440146 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.029241883869628817, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.029241883869628817 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.037400593820293204, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.037400593820293204 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.37942122186495175, + "acc_stderr": 0.027559949802347813, + "acc_norm": 0.37942122186495175, + "acc_norm_stderr": 0.027559949802347813 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.0316314580755238, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.0316314580755238 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2846153846153846, + "acc_stderr": 0.022878322799706294, + "acc_norm": 0.2846153846153846, + "acc_norm_stderr": 0.022878322799706294 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.33548387096774196, + "acc_stderr": 0.026860206444724342, + "acc_norm": 0.33548387096774196, + "acc_norm_stderr": 0.026860206444724342 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.47863247863247865, + "acc_stderr": 0.032726164476349545, + "acc_norm": 0.47863247863247865, + "acc_norm_stderr": 0.032726164476349545 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33962264150943394, + "acc_stderr": 0.029146904747798356, + "acc_norm": 0.33962264150943394, + "acc_norm_stderr": 0.029146904747798356 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.046075820907199756, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.046075820907199756 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4626865671641791, + "acc_stderr": 0.03525675167467974, + "acc_norm": 0.4626865671641791, + "acc_norm_stderr": 0.03525675167467974 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.02271746789770861, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.02271746789770861 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.40173410404624277, + "acc_stderr": 0.026394104177643634, + "acc_norm": 0.40173410404624277, + "acc_norm_stderr": 0.026394104177643634 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3765432098765432, + "acc_stderr": 0.026959344518747794, + "acc_norm": 0.3765432098765432, + "acc_norm_stderr": 0.026959344518747794 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4036697247706422, + "acc_stderr": 0.021035704856574963, + "acc_norm": 0.4036697247706422, + "acc_norm_stderr": 0.021035704856574963 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.028036092273891776, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.028036092273891776 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.512396694214876, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32516339869281047, + "acc_stderr": 0.018950886770806308, + "acc_norm": 0.32516339869281047, + "acc_norm_stderr": 0.018950886770806308 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176851, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176851 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877746, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.031251275910891656, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.031251275910891656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.01197767670471599, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.01197767670471599 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674119, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674119 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.03756335775187897, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.03756335775187897 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752334, + "mc2": 0.3794460140456843, + "mc2_stderr": 0.014936611984494383 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2066115702479339, + "acc_stderr": 0.013919866463909341, + "acc_norm": 0.2987012987012987, + "acc_norm_stderr": 0.015735657391438278 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/KO-Platypus2-7B-ex", + "model_sha": "63ad569198c7fabc62f292604211fed3435b3f48", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/KOR-Orca-Platypus-13B-v2/result_2023-11-10 07:29:20.json b/kyujinpy/KOR-Orca-Platypus-13B-v2/result_2023-11-10 07:29:20.json new file mode 100644 index 0000000000000000000000000000000000000000..880218cff3036be318cf9ec67a68babd7e1b3832 --- /dev/null +++ b/kyujinpy/KOR-Orca-Platypus-13B-v2/result_2023-11-10 07:29:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938167, + "acc_norm": 0.4402730375426621, + "acc_norm_stderr": 0.014506769524804248 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4082852021509659, + "acc_stderr": 0.00490511903984946, + "acc_norm": 0.5443138816968731, + "acc_norm_stderr": 0.004970145708187994 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5070242656449553, + "acc_stderr": 0.017878199003432214, + "acc_norm": 0.5070242656449553, + "acc_norm_stderr": 0.017878199003432214 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464242, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464242 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.03078373675774564, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.03078373675774564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3991031390134529, + "acc_stderr": 0.03286745312567961, + "acc_norm": 0.3991031390134529, + "acc_norm_stderr": 0.03286745312567961 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.040131241954243856, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.040131241954243856 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.033864057460620905, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.033864057460620905 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561053, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561053 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618554, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618554 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.035333892347392454, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.035333892347392454 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.023266512213730557, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.023266512213730557 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.026918645383239004, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.026918645383239004 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.027807490044276198, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.027807490044276198 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.03602573571288442, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.03602573571288442 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5119266055045871, + "acc_stderr": 0.021431223617362223, + "acc_norm": 0.5119266055045871, + "acc_norm_stderr": 0.021431223617362223 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171563, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171563 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874141, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874141 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.01913994374848703, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.01913994374848703 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.038342410214190735, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.038342410214190735 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31290743155149936, + "acc_stderr": 0.011842529823062997, + "acc_norm": 0.31290743155149936, + "acc_norm_stderr": 0.011842529823062997 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237028, + "mc2": 0.41635797039765154, + "mc2_stderr": 0.015043272865517212 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5596221959858324, + "acc_stderr": 0.017067699774312967, + "acc_norm": 0.6505312868949232, + "acc_norm_stderr": 0.01639279708576985 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/KOR-Orca-Platypus-13B-v2", + "model_sha": "a7466c0b153313306597a1f3abee65a9ee73869e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/KOR-Orca-Platypus-13B-v3/result_2023-11-12 12:48:05.json b/kyujinpy/KOR-Orca-Platypus-13B-v3/result_2023-11-12 12:48:05.json new file mode 100644 index 0000000000000000000000000000000000000000..85e918295cf9b1e1c41a466e35f09a9bf5272dc7 --- /dev/null +++ b/kyujinpy/KOR-Orca-Platypus-13B-v3/result_2023-11-12 12:48:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3924914675767918, + "acc_stderr": 0.014269634635670722, + "acc_norm": 0.4377133105802048, + "acc_norm_stderr": 0.014497573881108282 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4113722366062537, + "acc_stderr": 0.004910767540867421, + "acc_norm": 0.5427205735909182, + "acc_norm_stderr": 0.004971534874389945 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.048979577377811674, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.048979577377811674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5274584929757343, + "acc_stderr": 0.017852981266633938, + "acc_norm": 0.5274584929757343, + "acc_norm_stderr": 0.017852981266633938 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357773, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357773 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.025254485424799605, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.025254485424799605 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998576, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998576 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983693, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983693 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.0311669573672359, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.0311669573672359 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945266, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945266 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.03487558640462063, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.03487558640462063 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092056, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092056 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762613, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762613 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539288, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539288 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5431192660550459, + "acc_stderr": 0.021357458785226217, + "acc_norm": 0.5431192660550459, + "acc_norm_stderr": 0.021357458785226217 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749255, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791434, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.04060127035236397, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.04060127035236397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.019162418588623553, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.019162418588623553 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347247, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347247 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483924, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.01198993664066654, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.01198993664066654 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456416, + "mc2": 0.3858413065485574, + "mc2_stderr": 0.014719494606316911 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.538370720188902, + "acc_stderr": 0.017139660221845564, + "acc_norm": 0.6257378984651711, + "acc_norm_stderr": 0.016637917789798746 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/KOR-Orca-Platypus-13B-v3", + "model_sha": "249ae0349d4c536d33d68d9d36946b1abd76c80f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Ko-PlatYi-6B-O/result_2023-12-04 08:36:16.json b/kyujinpy/Ko-PlatYi-6B-O/result_2023-12-04 08:36:16.json new file mode 100644 index 0000000000000000000000000000000000000000..1fb14c7adf47229962b38a8da4fa0acd47719968 --- /dev/null +++ b/kyujinpy/Ko-PlatYi-6B-O/result_2023-12-04 08:36:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3703071672354949, + "acc_stderr": 0.01411129875167495, + "acc_norm": 0.4351535836177474, + "acc_norm_stderr": 0.01448798619718605 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40131447918741286, + "acc_stderr": 0.004891626718097273, + "acc_norm": 0.5359490141406095, + "acc_norm_stderr": 0.004976867796583556 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5798212005108557, + "acc_stderr": 0.017650651363078026, + "acc_norm": 0.5798212005108557, + "acc_norm_stderr": 0.017650651363078026 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6565656565656566, + "acc_stderr": 0.033832012232444426, + "acc_norm": 0.6565656565656566, + "acc_norm_stderr": 0.033832012232444426 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5096774193548387, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.5096774193548387, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524582, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524582 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.423841059602649, + "acc_stderr": 0.04034846678603397, + "acc_norm": 0.423841059602649, + "acc_norm_stderr": 0.04034846678603397 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.02446442662559643, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.02446442662559643 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5346820809248555, + "acc_stderr": 0.02685425792825888, + "acc_norm": 0.5346820809248555, + "acc_norm_stderr": 0.02685425792825888 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6321243523316062, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.6321243523316062, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958215, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958215 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6458715596330276, + "acc_stderr": 0.0205047290138291, + "acc_norm": 0.6458715596330276, + "acc_norm_stderr": 0.0205047290138291 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557836, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557836 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.02855582751652879, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.02855582751652879 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.020054269200726452, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.020054269200726452 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28268156424581004, + "acc_stderr": 0.015060381730018104, + "acc_norm": 0.28268156424581004, + "acc_norm_stderr": 0.015060381730018104 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.030320243265004137, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.030320243265004137 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34159061277705344, + "acc_stderr": 0.012112391320842842, + "acc_norm": 0.34159061277705344, + "acc_norm_stderr": 0.012112391320842842 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03756335775187897, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03756335775187897 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015002, + "mc2": 0.4101339837114337, + "mc2_stderr": 0.014973082840461931 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5395513577331759, + "acc_stderr": 0.017136487626049846, + "acc_norm": 0.5938606847697757, + "acc_norm_stderr": 0.0168847495031914 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Ko-PlatYi-6B-O", + "model_sha": "3774765323b0aa133fbf0aac2b600662619143c2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Ko-PlatYi-6B-gu/result_2023-12-03 20:06:11.json b/kyujinpy/Ko-PlatYi-6B-gu/result_2023-12-03 20:06:11.json new file mode 100644 index 0000000000000000000000000000000000000000..3af13ef2b021755e6e8e82f58702a547806abc19 --- /dev/null +++ b/kyujinpy/Ko-PlatYi-6B-gu/result_2023-12-03 20:06:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.371160409556314, + "acc_stderr": 0.014117971901142824, + "acc_norm": 0.4274744027303754, + "acc_norm_stderr": 0.014456862944650645 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40310695080661224, + "acc_stderr": 0.004895194143892681, + "acc_norm": 0.5400318661621191, + "acc_norm_stderr": 0.004973762948302801 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.561941251596424, + "acc_stderr": 0.017742232238257244, + "acc_norm": 0.561941251596424, + "acc_norm_stderr": 0.017742232238257244 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542125, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542125 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883232, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883232 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762613, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762613 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.041124909746707884, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.041124909746707884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6165137614678899, + "acc_stderr": 0.020847156641915984, + "acc_norm": 0.6165137614678899, + "acc_norm_stderr": 0.020847156641915984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437538, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437538 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.015005762446786164, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.015005762446786164 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280065, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280065 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214936, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214936 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.034956245220154766, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.034956245220154766 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875828, + "mc2": 0.41224631631998104, + "mc2_stderr": 0.014851766323164695 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5312868949232585, + "acc_stderr": 0.017156666859785466, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.016756921571069422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Ko-PlatYi-6B-gu", + "model_sha": "bc972ad5d404c634de847af30c1e4e665f18e939", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Ko-PlatYi-6B-kiwi/result_2023-12-03 21:11:04.json b/kyujinpy/Ko-PlatYi-6B-kiwi/result_2023-12-03 21:11:04.json new file mode 100644 index 0000000000000000000000000000000000000000..123cb800180701d8186ae95938846844f83332da --- /dev/null +++ b/kyujinpy/Ko-PlatYi-6B-kiwi/result_2023-12-03 21:11:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35494880546075086, + "acc_stderr": 0.013983036904094095, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.014422181226303024 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4047002589125672, + "acc_stderr": 0.004898308167211844, + "acc_norm": 0.5361481776538538, + "acc_norm_stderr": 0.00497672412485057 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.017850410794380173, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.017850410794380173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5225806451612903, + "acc_stderr": 0.02841498501970786, + "acc_norm": 0.5225806451612903, + "acc_norm_stderr": 0.02841498501970786 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.02905858830374885, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.02905858830374885 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808093, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808093 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307712, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307712 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270697, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270697 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6440366972477064, + "acc_stderr": 0.020528559278244218, + "acc_norm": 0.6440366972477064, + "acc_norm_stderr": 0.020528559278244218 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225868, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225868 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.01994491413687358, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.01994491413687358 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475363, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475363 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585895, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585895 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148123, + "mc2": 0.3830022681696009, + "mc2_stderr": 0.014630531364087136 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.538370720188902, + "acc_stderr": 0.017139660221845553, + "acc_norm": 0.6375442739079102, + "acc_norm_stderr": 0.016527131240453692 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Ko-PlatYi-6B-kiwi", + "model_sha": "ba064c7052774a9d2935b9066785962323f9190f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Ko-PlatYi-6B/result_2023-12-03 08:38:12.json b/kyujinpy/Ko-PlatYi-6B/result_2023-12-03 08:38:12.json new file mode 100644 index 0000000000000000000000000000000000000000..53f39ccd1e70dc432a32a2df5dda12364a6c9681 --- /dev/null +++ b/kyujinpy/Ko-PlatYi-6B/result_2023-12-03 08:38:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35665529010238906, + "acc_stderr": 0.013998056902620194, + "acc_norm": 0.4300341296928328, + "acc_norm_stderr": 0.014467631559137994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4018123879705238, + "acc_stderr": 0.004892624490937213, + "acc_norm": 0.5354511053574985, + "acc_norm_stderr": 0.004977223485342017 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5606641123882503, + "acc_stderr": 0.017747874245683606, + "acc_norm": 0.5606641123882503, + "acc_norm_stderr": 0.017747874245683606 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.028397944907806612, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.028397944907806612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993177, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.032363611119519416, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.032363611119519416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.02533466708095495, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.02533466708095495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851302, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851302 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871927, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871927 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596437, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596437 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.02683080599895224, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.02683080599895224 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.616580310880829, + "acc_stderr": 0.03508984236295342, + "acc_norm": 0.616580310880829, + "acc_norm_stderr": 0.03508984236295342 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6275229357798165, + "acc_stderr": 0.020728368457638494, + "acc_norm": 0.6275229357798165, + "acc_norm_stderr": 0.020728368457638494 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.04065771002562605, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.04065771002562605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43300653594771243, + "acc_stderr": 0.02004544247332422, + "acc_norm": 0.43300653594771243, + "acc_norm_stderr": 0.02004544247332422 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063989, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063989 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.014676252009319482, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.014676252009319482 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3513689700130378, + "acc_stderr": 0.012192969457484042, + "acc_norm": 0.3513689700130378, + "acc_norm_stderr": 0.012192969457484042 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.01550620472283456, + "mc2": 0.4031449478693446, + "mc2_stderr": 0.014887476682682517 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5903187721369539, + "acc_stderr": 0.016907568192219478, + "acc_norm": 0.6646989374262101, + "acc_norm_stderr": 0.01623098123298981 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Ko-PlatYi-6B", + "model_sha": "8be4a0d75b1fd70867e5f27184e2eb4e9a72dc37", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/KoR-Orca-Platypus-13B/result_2023-10-14 11:37:08.json b/kyujinpy/KoR-Orca-Platypus-13B/result_2023-10-14 11:37:08.json new file mode 100644 index 0000000000000000000000000000000000000000..ed51c5ea8336cd0f01cc81e3e049af5917745f21 --- /dev/null +++ b/kyujinpy/KoR-Orca-Platypus-13B/result_2023-10-14 11:37:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36860068259385664, + "acc_stderr": 0.0140978106780422, + "acc_norm": 0.4206484641638225, + "acc_norm_stderr": 0.014426211252508394 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4047002589125672, + "acc_stderr": 0.004898308167211838, + "acc_norm": 0.5395339573790081, + "acc_norm_stderr": 0.004974159561342694 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5095785440613027, + "acc_stderr": 0.01787668227534086, + "acc_norm": 0.5095785440613027, + "acc_norm_stderr": 0.01787668227534086 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745657, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745657 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.037400593820293204, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.037400593820293204 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006938, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006938 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.032339434681820885, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.032339434681820885 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042335, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042335 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998576, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998576 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502744, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502744 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066475, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066475 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.035161847729521675, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.035161847729521675 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562427, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562427 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008746, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008746 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0383515395439942, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0383515395439942 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5376146788990825, + "acc_stderr": 0.021376575274397576, + "acc_norm": 0.5376146788990825, + "acc_norm_stderr": 0.021376575274397576 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.028304576673141107, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.028304576673141107 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36437908496732024, + "acc_stderr": 0.019469518221573702, + "acc_norm": 0.36437908496732024, + "acc_norm_stderr": 0.019469518221573702 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650147, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983583, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983583 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937599, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937599 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35658409387222945, + "acc_stderr": 0.012233642989273891, + "acc_norm": 0.35658409387222945, + "acc_norm_stderr": 0.012233642989273891 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326905, + "mc2": 0.43550201857978377, + "mc2_stderr": 0.015311053526638174 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4498229043683589, + "acc_stderr": 0.017103573343825708, + "acc_norm": 0.5112160566706021, + "acc_norm_stderr": 0.017186028469489294 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/KoR-Orca-Platypus-13B", + "model_sha": "66063590ce01dc70a30bcf04f1f8addd7e72f73b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/KoT-platypus2-13B/result_2023-10-07 18:04:13.json b/kyujinpy/KoT-platypus2-13B/result_2023-10-07 18:04:13.json new file mode 100644 index 0000000000000000000000000000000000000000..be130bdbcdcda34f4c76c8830cf54dbeebf8b128 --- /dev/null +++ b/kyujinpy/KoT-platypus2-13B/result_2023-10-07 18:04:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.371160409556314, + "acc_stderr": 0.014117971901142818, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.014494421584256515 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40221071499701255, + "acc_stderr": 0.004893418929918276, + "acc_norm": 0.5304720175263892, + "acc_norm_stderr": 0.004980506329407588 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.017869330154003698, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.017869330154003698 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.02825666072336018, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.02825666072336018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563918, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563918 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03196876989195779, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03196876989195779 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.02486499515976777, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.02486499515976777 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051448, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051448 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.03053333843046751, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.03053333843046751 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432564, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432564 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194048, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194048 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5541284403669725, + "acc_stderr": 0.02131133500970857, + "acc_norm": 0.5541284403669725, + "acc_norm_stderr": 0.02131133500970857 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.0193733324207245, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.0193733324207245 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611317, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210756, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210756 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3683181225554107, + "acc_stderr": 0.01231940336956464, + "acc_norm": 0.3683181225554107, + "acc_norm_stderr": 0.01231940336956464 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237272, + "mc2": 0.4334291763920242, + "mc2_stderr": 0.014968924711902113 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3872491145218418, + "acc_stderr": 0.016747577991642792, + "acc_norm": 0.44391971664698937, + "acc_norm_stderr": 0.017081884623542543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/KoT-platypus2-13B", + "model_sha": "1d45520e4c2a4b5dc52dcafb788efb2420ad20b5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/KoT-platypus2-7B/result_2023-09-30 05:32:02.json b/kyujinpy/KoT-platypus2-7B/result_2023-09-30 05:32:02.json new file mode 100644 index 0000000000000000000000000000000000000000..f4c86b8d5cd7365d2ec7b6cc9bab1f1425013b47 --- /dev/null +++ b/kyujinpy/KoT-platypus2-7B/result_2023-09-30 05:32:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3225255972696246, + "acc_stderr": 0.013659980894277375, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349819 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38179645488946423, + "acc_stderr": 0.004848341560492151, + "acc_norm": 0.4963154750049791, + "acc_norm_stderr": 0.004989645929811438 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.34951456310679613, + "acc_stderr": 0.04721188506097172, + "acc_norm": 0.34951456310679613, + "acc_norm_stderr": 0.04721188506097172 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39208173690932313, + "acc_stderr": 0.01745852405014764, + "acc_norm": 0.39208173690932313, + "acc_norm_stderr": 0.01745852405014764 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386705, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386705 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.39228295819935693, + "acc_stderr": 0.027731258647011998, + "acc_norm": 0.39228295819935693, + "acc_norm_stderr": 0.027731258647011998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330313, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330313 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3739495798319328, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.3739495798319328, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31794871794871793, + "acc_stderr": 0.02361088430892786, + "acc_norm": 0.31794871794871793, + "acc_norm_stderr": 0.02361088430892786 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114485, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34516129032258064, + "acc_stderr": 0.027045746573534327, + "acc_norm": 0.34516129032258064, + "acc_norm_stderr": 0.027045746573534327 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5, + "acc_stderr": 0.03275608910402091, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03275608910402091 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3433962264150943, + "acc_stderr": 0.029224526469124792, + "acc_norm": 0.3433962264150943, + "acc_norm_stderr": 0.029224526469124792 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505415, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505415 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.02620276653465215, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.02620276653465215 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43781094527363185, + "acc_stderr": 0.035080801121998406, + "acc_norm": 0.43781094527363185, + "acc_norm_stderr": 0.035080801121998406 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267437, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267437 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.022101128787415426, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.022101128787415426 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.02653818910470548, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.02653818910470548 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.32515337423312884, + "acc_stderr": 0.03680350371286461, + "acc_norm": 0.32515337423312884, + "acc_norm_stderr": 0.03680350371286461 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.02716368603827123, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.02716368603827123 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3926605504587156, + "acc_stderr": 0.020937505161201093, + "acc_norm": 0.3926605504587156, + "acc_norm_stderr": 0.020937505161201093 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.038932596106046734, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.038932596106046734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.49586776859504134, + "acc_stderr": 0.04564198767432754, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.04564198767432754 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952925, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952925 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.01899970738316266, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.01899970738316266 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460997, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460997 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966344, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966344 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.02997280717046463, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.02997280717046463 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.031251275910891656, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.031251275910891656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32529335071707954, + "acc_stderr": 0.01196531153657153, + "acc_norm": 0.32529335071707954, + "acc_norm_stderr": 0.01196531153657153 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.033644872860882996, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.033644872860882996 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752332, + "mc2": 0.37686510476734664, + "mc2_stderr": 0.014752533377181794 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21959858323494688, + "acc_stderr": 0.01423274308558027, + "acc_norm": 0.30932703659976385, + "acc_norm_stderr": 0.015891320505520893 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/KoT-platypus2-7B", + "model_sha": "33eb53d72129db3b1936f07fd894a18b571d7ab6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Korean-OpenOrca-13B-v2/result_2023-10-30 20:07:34.json b/kyujinpy/Korean-OpenOrca-13B-v2/result_2023-10-30 20:07:34.json new file mode 100644 index 0000000000000000000000000000000000000000..83c0c1697d491e2053d05bb361947acec0bfaa6e --- /dev/null +++ b/kyujinpy/Korean-OpenOrca-13B-v2/result_2023-10-30 20:07:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3848122866894198, + "acc_stderr": 0.014218371065251098, + "acc_norm": 0.431740614334471, + "acc_norm_stderr": 0.014474591427196202 + }, + "harness|ko_hellaswag|10": { + "acc": 0.410973909579765, + "acc_stderr": 0.004910049928688081, + "acc_norm": 0.5451105357498506, + "acc_norm_stderr": 0.004969431900874307 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5210727969348659, + "acc_stderr": 0.017864076786212914, + "acc_norm": 0.5210727969348659, + "acc_norm_stderr": 0.017864076786212914 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4495798319327731, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.4495798319327731, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.02742001935094527, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.02742001935094527 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.03525675167467975, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.03525675167467975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523864, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5357798165137615, + "acc_stderr": 0.021382364775701906, + "acc_norm": 0.5357798165137615, + "acc_norm_stderr": 0.021382364775701906 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223974, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223974 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966734, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966734 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.032419206846933335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.032419206846933335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30964797913950454, + "acc_stderr": 0.011808598262503316, + "acc_norm": 0.30964797913950454, + "acc_norm_stderr": 0.011808598262503316 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.01532182168847619, + "mc2": 0.4181669609619488, + "mc2_stderr": 0.015057490220303692 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5100354191263282, + "acc_stderr": 0.01718689128689406, + "acc_norm": 0.5844155844155844, + "acc_norm_stderr": 0.016943586313076565 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Korean-OpenOrca-13B-v2", + "model_sha": "2b0b46a6b5b6bbf41029a0918c49ac11456c3512", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Korean-OpenOrca-13B/result_2023-10-09 08:06:20.json b/kyujinpy/Korean-OpenOrca-13B/result_2023-10-09 08:06:20.json new file mode 100644 index 0000000000000000000000000000000000000000..32c71ba3b209566af1b4bc8c3d2b5b70ddb08820 --- /dev/null +++ b/kyujinpy/Korean-OpenOrca-13B/result_2023-10-09 08:06:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37372013651877134, + "acc_stderr": 0.014137708601759091, + "acc_norm": 0.4308873720136519, + "acc_norm_stderr": 0.014471133392642475 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4080860386377216, + "acc_stderr": 0.004904747752286962, + "acc_norm": 0.5413264289982075, + "acc_norm_stderr": 0.0049727083696565425 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.01786209177850786, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.01786209177850786 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.028217683556652315, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.028217683556652315 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.035578062450873145, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.035578062450873145 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969566, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969566 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145654, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145654 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.02339382650048487, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.02339382650048487 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.039015918258361836, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.039015918258361836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833935, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833935 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4935779816513762, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.4935779816513762, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.02811092849280907, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.02811092849280907 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.01916241858862356, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.01916241858862356 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169934, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169934 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.027365861131513805, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.027365861131513805 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30247718383311606, + "acc_stderr": 0.0117315242341657, + "acc_norm": 0.30247718383311606, + "acc_norm_stderr": 0.0117315242341657 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502332, + "mc2": 0.4522241098057631, + "mc2_stderr": 0.015289294572002421 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5301062573789846, + "acc_stderr": 0.017159163590170216, + "acc_norm": 0.6127508854781583, + "acc_norm_stderr": 0.01674757799164278 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Korean-OpenOrca-13B", + "model_sha": "1f0024f9356a1601ba642c01fd01b309c59b65b8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Korean-OpenOrca-v3/result_2023-11-04 08:15:25.json b/kyujinpy/Korean-OpenOrca-v3/result_2023-11-04 08:15:25.json new file mode 100644 index 0000000000000000000000000000000000000000..fab9fcc32d7901ff3e3ada998a4a8dcb4316fe05 --- /dev/null +++ b/kyujinpy/Korean-OpenOrca-v3/result_2023-11-04 08:15:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38822525597269625, + "acc_stderr": 0.014241614207414044, + "acc_norm": 0.4377133105802048, + "acc_norm_stderr": 0.014497573881108288 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40938060147381, + "acc_stderr": 0.004907146229347557, + "acc_norm": 0.5430193188607847, + "acc_norm_stderr": 0.004971278309204196 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5274584929757343, + "acc_stderr": 0.01785298126663394, + "acc_norm": 0.5274584929757343, + "acc_norm_stderr": 0.01785298126663394 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03540294377095368, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03540294377095368 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808779, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808779 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561056, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561056 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.03056159042673183, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.03056159042673183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655812, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655812 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.026864624366756653, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.026864624366756653 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836183, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836183 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5137614678899083, + "acc_stderr": 0.021429202089874075, + "acc_norm": 0.5137614678899083, + "acc_norm_stderr": 0.021429202089874075 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617157, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981747, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981747 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.019139943748487043, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.019139943748487043 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347019, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347019 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841196, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.032006820201639086, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.032006820201639086 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3161668839634941, + "acc_stderr": 0.011875780894386578, + "acc_norm": 0.3161668839634941, + "acc_norm_stderr": 0.011875780894386578 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608735, + "mc2": 0.43845744832254846, + "mc2_stderr": 0.015212073687467038 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5277449822904369, + "acc_stderr": 0.017163867979456016, + "acc_norm": 0.6056670602125147, + "acc_norm_stderr": 0.01680209067489322 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Korean-OpenOrca-v3", + "model_sha": "933525ee691e7002a63054208e012a6bf6b08623", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Kosy-Platypus2-13B/result_2023-10-24 13:04:50.json b/kyujinpy/Kosy-Platypus2-13B/result_2023-10-24 13:04:50.json new file mode 100644 index 0000000000000000000000000000000000000000..b80882a0f8a667ab68ec7939cc327642e240f2a8 --- /dev/null +++ b/kyujinpy/Kosy-Platypus2-13B/result_2023-10-24 13:04:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.01421244498065189, + "acc_norm": 0.439419795221843, + "acc_norm_stderr": 0.014503747823580122 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40679147580163316, + "acc_stderr": 0.0049023140557255904, + "acc_norm": 0.5388368850826528, + "acc_norm_stderr": 0.00497470642843428 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5159642401021711, + "acc_stderr": 0.01787084750608174, + "acc_norm": 0.5159642401021711, + "acc_norm_stderr": 0.01787084750608174 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.032339434681820885, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.032339434681820885 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.02518914989476419, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.02518914989476419 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.034524539038220385, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.034524539038220385 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.02827241018621491, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.02827241018621491 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768817, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768817 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.030242233800854498, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.030242233800854498 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696525, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.026918645383239004, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.026918645383239004 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.0276671385694227, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.0276671385694227 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.02141822475426465, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.02141822475426465 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687754, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687754 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35984354628422427, + "acc_stderr": 0.012258260483689803, + "acc_norm": 0.35984354628422427, + "acc_norm_stderr": 0.012258260483689803 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522509, + "mc2": 0.43461012650741965, + "mc2_stderr": 0.015133199211121806 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3246753246753247, + "acc_stderr": 0.016098883939346453, + "acc_norm": 0.34238488783943327, + "acc_norm_stderr": 0.016313907844146373 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Kosy-Platypus2-13B", + "model_sha": "7d5af714d5429ed3496f73e8a44525bec4a73d20", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Kosy-platypus2-13B-v2/result_2023-10-26 16:55:23.json b/kyujinpy/Kosy-platypus2-13B-v2/result_2023-10-26 16:55:23.json new file mode 100644 index 0000000000000000000000000000000000000000..aff2f85c8df60da5e65f4604b2289447e6bf76e0 --- /dev/null +++ b/kyujinpy/Kosy-platypus2-13B-v2/result_2023-10-26 16:55:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3848122866894198, + "acc_stderr": 0.014218371065251093, + "acc_norm": 0.44197952218430037, + "acc_norm_stderr": 0.014512682523128343 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4116709818761203, + "acc_stderr": 0.004911303569769794, + "acc_norm": 0.5456084445329615, + "acc_norm_stderr": 0.004968979259738331 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.017869330154003698, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.017869330154003698 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231015, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231015 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168284, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168284 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042328, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042328 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.02837228779796295, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.02837228779796295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523867, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523867 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.027807490044276198, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.027807490044276198 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5431192660550459, + "acc_stderr": 0.02135745878522621, + "acc_norm": 0.5431192660550459, + "acc_norm_stderr": 0.02135745878522621 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626057, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626057 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.019333142020797073, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.019333142020797073 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650133, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650133 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176852, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176852 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824852, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824852 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.031591887529658504, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.031591887529658504 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.011971507294982775, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.011971507294982775 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237033, + "mc2": 0.42682166058026266, + "mc2_stderr": 0.015080394807895544 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3837072018890201, + "acc_stderr": 0.016718924637231822, + "acc_norm": 0.43211334120425027, + "acc_norm_stderr": 0.01703117019885175 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Kosy-platypus2-13B-v2", + "model_sha": "6f4bbfe83457bd7c30e3229be576883534ae37b1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Kosy-platypus2-13B-v3/result_2023-10-27 11:47:47.json b/kyujinpy/Kosy-platypus2-13B-v3/result_2023-10-27 11:47:47.json new file mode 100644 index 0000000000000000000000000000000000000000..3698ec9a8efa58cace3872e7c4f7103482a1d31d --- /dev/null +++ b/kyujinpy/Kosy-platypus2-13B-v3/result_2023-10-27 11:47:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38139931740614336, + "acc_stderr": 0.014194389086685247, + "acc_norm": 0.4334470989761092, + "acc_norm_stderr": 0.0144813762245589 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4124676359290978, + "acc_stderr": 0.004912723848944785, + "acc_norm": 0.5454092810197172, + "acc_norm_stderr": 0.004969160917379657 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.017867695938429774, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.017867695938429774 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.03115852213135778, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.03115852213135778 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.034815208033673474, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.034815208033673474 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353928, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353928 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5522935779816514, + "acc_stderr": 0.021319754962425462, + "acc_norm": 0.5522935779816514, + "acc_norm_stderr": 0.021319754962425462 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.04065771002562605, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.04065771002562605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.01967580813528151, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.01967580813528151 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.17857142857142858, + "acc_stderr": 0.036352091215778065, + "acc_norm": 0.17857142857142858, + "acc_norm_stderr": 0.036352091215778065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252336, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252336 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125478, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125478 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3513689700130378, + "acc_stderr": 0.01219296945748402, + "acc_norm": 0.3513689700130378, + "acc_norm_stderr": 0.01219296945748402 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.44114088993275297, + "mc2_stderr": 0.015165075535391745 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38961038961038963, + "acc_stderr": 0.0167661616718935, + "acc_norm": 0.46162927981109797, + "acc_norm_stderr": 0.01713966022184556 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Kosy-platypus2-13B-v3", + "model_sha": "221e5e31480c06f46c707f92ea261bb2903729f2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Kosy-platypus2-13B-v4/result_2023-10-28 18:01:16.json b/kyujinpy/Kosy-platypus2-13B-v4/result_2023-10-28 18:01:16.json new file mode 100644 index 0000000000000000000000000000000000000000..30f385d87060b79b8f1ae7b272b40f300861ccd4 --- /dev/null +++ b/kyujinpy/Kosy-platypus2-13B-v4/result_2023-10-28 18:01:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36860068259385664, + "acc_stderr": 0.014097810678042194, + "acc_norm": 0.42918088737201365, + "acc_norm_stderr": 0.014464085894870657 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4082852021509659, + "acc_stderr": 0.00490511903984946, + "acc_norm": 0.5448117904799841, + "acc_norm_stderr": 0.00496970108106838 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5223499361430396, + "acc_stderr": 0.017862091778507852, + "acc_norm": 0.5223499361430396, + "acc_norm_stderr": 0.017862091778507852 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.02834504586484063, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.02834504586484063 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.02515826601686854, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.02515826601686854 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.046166311118017146, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.046166311118017146 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540643, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540643 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688173, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688173 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.035161847729521654, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.035161847729521654 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.024180497164376896, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.024180497164376896 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138936, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138936 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.042663394431593955, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.042663394431593955 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664278, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664278 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176853, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176853 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010071, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010071 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983583, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983583 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.03195514741370672, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.03195514741370672 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.363754889178618, + "acc_stderr": 0.012286991879902887, + "acc_norm": 0.363754889178618, + "acc_norm_stderr": 0.012286991879902887 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.03495624522015477, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.03495624522015477 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237272, + "mc2": 0.4299526725985081, + "mc2_stderr": 0.015097005552664109 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3825265643447462, + "acc_stderr": 0.01670916538722882, + "acc_norm": 0.41204250295159384, + "acc_norm_stderr": 0.01692227673852836 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Kosy-platypus2-13B-v4", + "model_sha": "f7f8972f4e1221436272e0d16b946c816373ce93", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Kosy-platypus2-13B-v5/result_2023-11-01 20:41:13.json b/kyujinpy/Kosy-platypus2-13B-v5/result_2023-11-01 20:41:13.json new file mode 100644 index 0000000000000000000000000000000000000000..ebeed6abaa595fd5db7850dd4b649c415900107d --- /dev/null +++ b/kyujinpy/Kosy-platypus2-13B-v5/result_2023-11-01 20:41:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3703071672354949, + "acc_stderr": 0.01411129875167495, + "acc_norm": 0.4308873720136519, + "acc_norm_stderr": 0.014471133392642463 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4063931487751444, + "acc_stderr": 0.004901558132335531, + "acc_norm": 0.5361481776538538, + "acc_norm_stderr": 0.004976724124850573 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5108556832694764, + "acc_stderr": 0.017875748840242407, + "acc_norm": 0.5108556832694764, + "acc_norm_stderr": 0.017875748840242407 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685516, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685516 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.043171711948702535, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.043171711948702535 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.040434618619167466, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.040434618619167466 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03196876989195779, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03196876989195779 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540646, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540646 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.02983280811479601, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.02983280811479601 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.02763490726417854, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.02763490726417854 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.0368122963339432, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.0368122963339432 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.02333065405453589, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.02333065405453589 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138938, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138938 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.021364122533881688, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.021364122533881688 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424523, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424523 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536671, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536671 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35947712418300654, + "acc_stderr": 0.01941253924203216, + "acc_norm": 0.35947712418300654, + "acc_norm_stderr": 0.01941253924203216 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611313, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611313 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289804, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289804 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.378748370273794, + "acc_stderr": 0.012389052105003734, + "acc_norm": 0.378748370273794, + "acc_norm_stderr": 0.012389052105003734 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476196, + "mc2": 0.4346617472360019, + "mc2_stderr": 0.015047283841012499 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3754427390791027, + "acc_stderr": 0.016648411589511084, + "acc_norm": 0.44155844155844154, + "acc_norm_stderr": 0.017072525875563103 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Kosy-platypus2-13B-v5", + "model_sha": "1a82ca82d6bb7b00b7318dc21f431f8c15fca3bc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Mistral-7B-Ko-v2/result_2023-11-17 02:55:02.json b/kyujinpy/Mistral-7B-Ko-v2/result_2023-11-17 02:55:02.json new file mode 100644 index 0000000000000000000000000000000000000000..b807c653bbc80a57a3210718d8d6fad1cc6bee4b --- /dev/null +++ b/kyujinpy/Mistral-7B-Ko-v2/result_2023-11-17 02:55:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33276450511945393, + "acc_stderr": 0.013769863046192304, + "acc_norm": 0.36689419795221845, + "acc_norm_stderr": 0.014084133118104296 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3646683927504481, + "acc_stderr": 0.004803533333364229, + "acc_norm": 0.4689304919338777, + "acc_norm_stderr": 0.004980138679161039 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44572158365261816, + "acc_stderr": 0.017774297282479506, + "acc_norm": 0.44572158365261816, + "acc_norm_stderr": 0.017774297282479506 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079023, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.02804339985821063, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.02804339985821063 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.47474747474747475, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.47474747474747475, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413925, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413925 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4025641025641026, + "acc_stderr": 0.024864995159767762, + "acc_norm": 0.4025641025641026, + "acc_norm_stderr": 0.024864995159767762 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019413, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019413 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3433962264150943, + "acc_stderr": 0.029224526469124792, + "acc_norm": 0.3433962264150943, + "acc_norm_stderr": 0.029224526469124792 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815642, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815642 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.035080801121998406, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.035080801121998406 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736411, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736411 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02459497512892094, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02459497512892094 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03942082639927213, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03942082639927213 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456602, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456602 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43005181347150256, + "acc_stderr": 0.03572954333144807, + "acc_norm": 0.43005181347150256, + "acc_norm_stderr": 0.03572954333144807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070435, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070435 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44036697247706424, + "acc_stderr": 0.02128431062376155, + "acc_norm": 0.44036697247706424, + "acc_norm_stderr": 0.02128431062376155 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626564, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626564 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.019139943748487022, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.019139943748487022 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808848, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808848 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254184, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254184 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089166, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089166 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31029986962190353, + "acc_stderr": 0.011815439293469813, + "acc_norm": 0.31029986962190353, + "acc_norm_stderr": 0.011815439293469813 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923393, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923393 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.015723139524608746, + "mc2": 0.429618345662767, + "mc2_stderr": 0.015308199749335972 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3577331759149941, + "acc_stderr": 0.016479808935749976, + "acc_norm": 0.4935064935064935, + "acc_norm_stderr": 0.017188904359077304 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Mistral-7B-Ko-v2", + "model_sha": "7a9974a87cb1ec441eb64b1bde9a4ab2ad76db4c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Mistral-7B-Ko-v3/result_2023-11-24 08:51:45.json b/kyujinpy/Mistral-7B-Ko-v3/result_2023-11-24 08:51:45.json new file mode 100644 index 0000000000000000000000000000000000000000..efdeb727b0845c3b160e1b32288db7c6547b3604 --- /dev/null +++ b/kyujinpy/Mistral-7B-Ko-v3/result_2023-11-24 08:51:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.01389693846114568, + "acc_norm": 0.3873720136518771, + "acc_norm_stderr": 0.014235872487909874 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3736307508464449, + "acc_stderr": 0.00482778628907485, + "acc_norm": 0.48615813582951606, + "acc_norm_stderr": 0.004987868988629998 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48020434227330777, + "acc_stderr": 0.017865944827291612, + "acc_norm": 0.48020434227330777, + "acc_norm_stderr": 0.017865944827291612 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.028447965476231022, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.028447965476231022 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.029443169323031537, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.029443169323031537 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.034288678487786564, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.034288678487786564 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159784, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159784 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03981240543717861, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03981240543717861 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.02677299065336182, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.02677299065336182 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.021436998359765324, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.021436998359765324 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.02862747055055606, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.02862747055055606 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094597, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094597 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293647, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293647 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010088, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010088 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.029624663581159706, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.029624663581159706 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.03195514741370672, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.03195514741370672 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3396349413298566, + "acc_stderr": 0.01209559250693197, + "acc_norm": 0.3396349413298566, + "acc_norm_stderr": 0.01209559250693197 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627877, + "mc2": 0.43927914817995606, + "mc2_stderr": 0.015458133669329948 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190192, + "acc_norm": 0.5678866587957497, + "acc_norm_stderr": 0.017031170198851742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Mistral-7B-Ko-v3", + "model_sha": "d55ba861816137dd858e44d1db4e4dcefae09f55", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Mistral-7B-ko-v1/result_2023-11-17 02:55:15.json b/kyujinpy/Mistral-7B-ko-v1/result_2023-11-17 02:55:15.json new file mode 100644 index 0000000000000000000000000000000000000000..08d88083069ed8ca5be65b5487de48ba996e1d58 --- /dev/null +++ b/kyujinpy/Mistral-7B-ko-v1/result_2023-11-17 02:55:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3293515358361775, + "acc_stderr": 0.013734057652635473, + "acc_norm": 0.37542662116040953, + "acc_norm_stderr": 0.014150631435111728 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3654650468034256, + "acc_stderr": 0.004805761513803415, + "acc_norm": 0.4763991236805417, + "acc_norm_stderr": 0.004984219681732662 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458935, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44189016602809705, + "acc_stderr": 0.01775880053421442, + "acc_norm": 0.44189016602809705, + "acc_norm_stderr": 0.01775880053421442 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079023, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828063, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828063 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.032284106267163895, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.032284106267163895 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017827, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017827 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942645, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942645 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696525, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.034288678487786564, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.034288678487786564 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699954, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699954 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.027002521034516478, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.027002521034516478 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.0358701498607566, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.0358701498607566 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46605504587155966, + "acc_stderr": 0.02138786335035399, + "acc_norm": 0.46605504587155966, + "acc_norm_stderr": 0.02138786335035399 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423556, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423556 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.01945076843250551, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.01945076843250551 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100998, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100998 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.014736926383761968, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.014736926383761968 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.34285714285714286, + "acc_stderr": 0.030387262919547735, + "acc_norm": 0.34285714285714286, + "acc_norm_stderr": 0.030387262919547735 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33376792698826596, + "acc_stderr": 0.012043812655846146, + "acc_norm": 0.33376792698826596, + "acc_norm_stderr": 0.012043812655846146 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3575757575757576, + "acc_stderr": 0.03742597043806585, + "acc_norm": 0.3575757575757576, + "acc_norm_stderr": 0.03742597043806585 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834553, + "mc2": 0.43674349953921776, + "mc2_stderr": 0.015557097313851508 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49940968122786306, + "acc_stderr": 0.01719034212344865, + "acc_norm": 0.5796930342384888, + "acc_norm_stderr": 0.016970598281177713 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Mistral-7B-ko-v1", + "model_sha": "7e4aaac38b8c44fca2cf9b90d82504dbf1b6b66c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Mistral-koplatypus-v1/result_2023-11-08 15:04:52.json b/kyujinpy/Mistral-koplatypus-v1/result_2023-11-08 15:04:52.json new file mode 100644 index 0000000000000000000000000000000000000000..964e09b1cee690b2b795d0de440552b2dd5b61a3 --- /dev/null +++ b/kyujinpy/Mistral-koplatypus-v1/result_2023-11-08 15:04:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3378839590443686, + "acc_stderr": 0.013822047922283507, + "acc_norm": 0.3771331058020478, + "acc_norm_stderr": 0.014163366896192587 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3685520812587134, + "acc_stderr": 0.004814261966376847, + "acc_norm": 0.4778928500298745, + "acc_norm_stderr": 0.004984901752846396 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4623243933588761, + "acc_stderr": 0.017829131764287198, + "acc_norm": 0.4623243933588761, + "acc_norm_stderr": 0.017829131764287198 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223264, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674064, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674064 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.030052580579557835, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.030052580579557835 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360385, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360385 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.03567603799639171, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.03567603799639171 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115978, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.03594413711272436, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.03594413711272436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48990825688073397, + "acc_stderr": 0.02143295620345332, + "acc_norm": 0.48990825688073397, + "acc_norm_stderr": 0.02143295620345332 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483184, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483184 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.020007912739359365, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.020007912739359365 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.028538650028878634, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.028538650028878634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608042, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608042 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220501, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220501 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.02902942281568141, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.02902942281568141 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163908, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163908 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.03137624072561619, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.03137624072561619 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37614080834419816, + "acc_stderr": 0.012372214430599816, + "acc_norm": 0.37614080834419816, + "acc_norm_stderr": 0.012372214430599816 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627877, + "mc2": 0.45967145819252797, + "mc2_stderr": 0.015531270159359699 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36481700118063753, + "acc_stderr": 0.016550144337046595, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.016977101932601525 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Mistral-koplatypus-v1", + "model_sha": "520c485adecb47aefb23b0b3f5fb2240886651d8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Sakura-SOLAR-Instruct-DPO-v2/result_2024-01-04 07:20:58.json b/kyujinpy/Sakura-SOLAR-Instruct-DPO-v2/result_2024-01-04 07:20:58.json new file mode 100644 index 0000000000000000000000000000000000000000..cb2e47a5cce493b607c7a819e07a9e8e3ab250b3 --- /dev/null +++ b/kyujinpy/Sakura-SOLAR-Instruct-DPO-v2/result_2024-01-04 07:20:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3873720136518771, + "acc_stderr": 0.014235872487909865, + "acc_norm": 0.4735494880546075, + "acc_norm_stderr": 0.014590931358120174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4012148974307907, + "acc_stderr": 0.004891426533390627, + "acc_norm": 0.5374427404899422, + "acc_norm_stderr": 0.004975770805464644 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5874840357598978, + "acc_stderr": 0.017604149108671915, + "acc_norm": 0.5874840357598978, + "acc_norm_stderr": 0.017604149108671915 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424004, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5659163987138264, + "acc_stderr": 0.028150232244535604, + "acc_norm": 0.5659163987138264, + "acc_norm_stderr": 0.028150232244535604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6818181818181818, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.6818181818181818, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5282051282051282, + "acc_stderr": 0.02531063925493387, + "acc_norm": 0.5282051282051282, + "acc_norm_stderr": 0.02531063925493387 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.02812096650391439, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.02812096650391439 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.025075981767601684, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.025075981767601684 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.02668013476167922, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.02668013476167922 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5771604938271605, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.5771604938271605, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.689119170984456, + "acc_stderr": 0.03340361906276586, + "acc_norm": 0.689119170984456, + "acc_norm_stderr": 0.03340361906276586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6201834862385321, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.6201834862385321, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.0404633688397825, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.0404633688397825 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.020206653187884786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.020206653187884786 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347233, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347233 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.033953227263757976, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.033953227263757976 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.33519553072625696, + "acc_stderr": 0.015788007190185884, + "acc_norm": 0.33519553072625696, + "acc_norm_stderr": 0.015788007190185884 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.030862144921087555, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.030862144921087555 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.03058732629470237, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.03058732629470237 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39374185136897, + "acc_stderr": 0.012478532272564433, + "acc_norm": 0.39374185136897, + "acc_norm_stderr": 0.012478532272564433 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.03804913653971009, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.03804913653971009 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3671970624235006, + "mc1_stderr": 0.016874805001453178, + "mc2": 0.5293040633801356, + "mc2_stderr": 0.016443090504045955 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.01717730199234254, + "acc_norm": 0.48288075560802834, + "acc_norm_stderr": 0.01718027524608563 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Sakura-SOLAR-Instruct-DPO-v2", + "model_sha": "2bff2080a64687196315ebe04eebe2d1e2f04b0a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Sakura-SOLAR-Instruct/result_2024-01-04 06:45:58.json b/kyujinpy/Sakura-SOLAR-Instruct/result_2024-01-04 06:45:58.json new file mode 100644 index 0000000000000000000000000000000000000000..56b892185ccf19282b2f6d2d431130da5af55b76 --- /dev/null +++ b/kyujinpy/Sakura-SOLAR-Instruct/result_2024-01-04 06:45:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910467, + "acc_norm": 0.4761092150170648, + "acc_norm_stderr": 0.014594701798071654 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40111531567416847, + "acc_stderr": 0.0048912261385780625, + "acc_norm": 0.5375423222465644, + "acc_norm_stderr": 0.0049756960762408434 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5874840357598978, + "acc_stderr": 0.017604149108671918, + "acc_norm": 0.5874840357598978, + "acc_norm_stderr": 0.017604149108671918 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424004, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.572347266881029, + "acc_stderr": 0.02809924077580956, + "acc_norm": 0.572347266881029, + "acc_norm_stderr": 0.02809924077580956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6767676767676768, + "acc_stderr": 0.033322999210706444, + "acc_norm": 0.6767676767676768, + "acc_norm_stderr": 0.033322999210706444 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.041641887201693775, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.041641887201693775 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.03228410626716391, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.03228410626716391 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.025323990861736246, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.025323990861736246 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199985, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199985 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5290322580645161, + "acc_stderr": 0.028396016402761, + "acc_norm": 0.5290322580645161, + "acc_norm_stderr": 0.028396016402761 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417618, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417618 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556552, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556552 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137285, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137285 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5635838150289018, + "acc_stderr": 0.02670054542494369, + "acc_norm": 0.5635838150289018, + "acc_norm_stderr": 0.02670054542494369 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5802469135802469, + "acc_stderr": 0.02746009955700513, + "acc_norm": 0.5802469135802469, + "acc_norm_stderr": 0.02746009955700513 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6839378238341969, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.6839378238341969, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6220183486238532, + "acc_stderr": 0.02078918706672811, + "acc_norm": 0.6220183486238532, + "acc_norm_stderr": 0.02078918706672811 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.0404633688397825, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.0404633688397825 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.020206653187884786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.020206653187884786 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347233, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347233 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.033953227263757976, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.033953227263757976 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3340782122905028, + "acc_stderr": 0.015774911422381622, + "acc_norm": 0.3340782122905028, + "acc_norm_stderr": 0.015774911422381622 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4963235294117647, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.4963235294117647, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.030932858792789848, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.030932858792789848 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702368, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702368 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39308996088657105, + "acc_stderr": 0.012474899613873956, + "acc_norm": 0.39308996088657105, + "acc_norm_stderr": 0.012474899613873956 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.03804913653971009, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.03804913653971009 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3708690330477356, + "mc1_stderr": 0.016909693580248804, + "mc2": 0.5294901711955363, + "mc2_stderr": 0.01642972914648021 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48288075560802834, + "acc_stderr": 0.01718027524608563, + "acc_norm": 0.48642266824085006, + "acc_norm_stderr": 0.01718401506040145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Sakura-SOLAR-Instruct", + "model_sha": "7c5913761b67a5ab694d400e38dfd297c90ea878", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Sakura-SOLRCA-Instruct-DPO/result_2024-01-04 07:22:25.json b/kyujinpy/Sakura-SOLRCA-Instruct-DPO/result_2024-01-04 07:22:25.json new file mode 100644 index 0000000000000000000000000000000000000000..1115ab5c4e77ff5164a7f1d823965de2f5e8fad3 --- /dev/null +++ b/kyujinpy/Sakura-SOLRCA-Instruct-DPO/result_2024-01-04 07:22:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38993174061433444, + "acc_stderr": 0.0142529598488929, + "acc_norm": 0.4812286689419795, + "acc_norm_stderr": 0.014601090150633964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4018123879705238, + "acc_stderr": 0.004892624490937211, + "acc_norm": 0.5404301931886079, + "acc_norm_stderr": 0.004973442060741627 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.03815827365913238, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.03815827365913238 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.047504583990416974, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.047504583990416974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5938697318007663, + "acc_stderr": 0.01756203740647893, + "acc_norm": 0.5938697318007663, + "acc_norm_stderr": 0.01756203740647893 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.572347266881029, + "acc_stderr": 0.028099240775809567, + "acc_norm": 0.572347266881029, + "acc_norm_stderr": 0.028099240775809567 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.03345678422756776, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.03345678422756776 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.041641887201693775, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.041641887201693775 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.03228410626716391, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.03228410626716391 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.025334667080954904, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.025334667080954904 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356462, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356462 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.535483870967742, + "acc_stderr": 0.02837228779796294, + "acc_norm": 0.535483870967742, + "acc_norm_stderr": 0.02837228779796294 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392926, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392926 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851105, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851105 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.02668013476167922, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.02668013476167922 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.027431623722415012, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.027431623722415012 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6839378238341969, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.6839378238341969, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6330275229357798, + "acc_stderr": 0.020664675659520525, + "acc_norm": 0.6330275229357798, + "acc_norm_stderr": 0.020664675659520525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.0404633688397825, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.0404633688397825 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.47875816993464054, + "acc_stderr": 0.02020957238860024, + "acc_norm": 0.47875816993464054, + "acc_norm_stderr": 0.02020957238860024 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.028893955412115886, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.028893955412115886 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538271, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538271 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.329608938547486, + "acc_stderr": 0.015721531075183884, + "acc_norm": 0.329608938547486, + "acc_norm_stderr": 0.015721531075183884 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.0303720158854282, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.0303720158854282 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.030932858792789848, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.030932858792789848 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702368, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702368 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3924380704041721, + "acc_stderr": 0.012471243669229103, + "acc_norm": 0.3924380704041721, + "acc_norm_stderr": 0.012471243669229103 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.03434131164719129, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.03434131164719129 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03815494308688929, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03815494308688929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3769889840881273, + "mc1_stderr": 0.016965517578930354, + "mc2": 0.5375187736796017, + "mc2_stderr": 0.016465285838861653 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.4852420306965762, + "acc_norm_stderr": 0.01718286443499856 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Sakura-SOLRCA-Instruct-DPO", + "model_sha": "7ed66c37ba8906a1ac866cd68a12fa2241b191ad", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/Sakura-SOLRCA-Math-Instruct-DPO-v1/result_2024-01-04 07:19:13.json b/kyujinpy/Sakura-SOLRCA-Math-Instruct-DPO-v1/result_2024-01-04 07:19:13.json new file mode 100644 index 0000000000000000000000000000000000000000..fc34bff13819d62c93abeae863d4355814b060e3 --- /dev/null +++ b/kyujinpy/Sakura-SOLRCA-Math-Instruct-DPO-v1/result_2024-01-04 07:19:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38993174061433444, + "acc_stderr": 0.0142529598488929, + "acc_norm": 0.4812286689419795, + "acc_norm_stderr": 0.014601090150633964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.401911969727146, + "acc_stderr": 0.0048928234155465496, + "acc_norm": 0.5407289384584744, + "acc_norm_stderr": 0.004973199296339969 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.03815827365913238, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.03815827365913238 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5887611749680716, + "acc_stderr": 0.017595971908056573, + "acc_norm": 0.5887611749680716, + "acc_norm_stderr": 0.017595971908056573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5755627009646302, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.5755627009646302, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.03345678422756776, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.03345678422756776 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.03228410626716391, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.03228410626716391 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5153846153846153, + "acc_stderr": 0.025339003010106505, + "acc_norm": 0.5153846153846153, + "acc_norm_stderr": 0.025339003010106505 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356462, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356462 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5419354838709678, + "acc_stderr": 0.028343787250540618, + "acc_norm": 0.5419354838709678, + "acc_norm_stderr": 0.028343787250540618 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417607, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205608, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137285, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137285 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5635838150289018, + "acc_stderr": 0.026700545424943687, + "acc_norm": 0.5635838150289018, + "acc_norm_stderr": 0.026700545424943687 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5802469135802469, + "acc_stderr": 0.027460099557005135, + "acc_norm": 0.5802469135802469, + "acc_norm_stderr": 0.027460099557005135 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6839378238341969, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.6839378238341969, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.634862385321101, + "acc_stderr": 0.02064280145438401, + "acc_norm": 0.634862385321101, + "acc_norm_stderr": 0.02064280145438401 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5592105263157895, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.5592105263157895, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.020206653187884786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.020206653187884786 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347237, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347237 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538271, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538271 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3307262569832402, + "acc_stderr": 0.01573502625896612, + "acc_norm": 0.3307262569832402, + "acc_norm_stderr": 0.01573502625896612 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.0303720158854282, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.0303720158854282 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6244897959183674, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.6244897959183674, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.030486039389105296, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.030486039389105296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39374185136897, + "acc_stderr": 0.012478532272564435, + "acc_norm": 0.39374185136897, + "acc_norm_stderr": 0.012478532272564435 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.03434131164719129, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.03434131164719129 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380027, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380027 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3769889840881273, + "mc1_stderr": 0.016965517578930354, + "mc2": 0.5376334492061923, + "mc2_stderr": 0.016464377457548 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.01718286443499856, + "acc_norm": 0.48406139315230223, + "acc_norm_stderr": 0.017181617837190195 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/Sakura-SOLRCA-Math-Instruct-DPO-v1", + "model_sha": "0a63f7a2bf3565120f89fca7e9e8a0d92737e772", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/ko-platypus-kiwi-13B/result_2023-11-14 12:30:19.json b/kyujinpy/ko-platypus-kiwi-13B/result_2023-11-14 12:30:19.json new file mode 100644 index 0000000000000000000000000000000000000000..13825c7be3bbbc80686ea985e759c7b156a2bddd --- /dev/null +++ b/kyujinpy/ko-platypus-kiwi-13B/result_2023-11-14 12:30:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.014175915490000324, + "acc_norm": 0.42406143344709896, + "acc_norm_stderr": 0.014441889627464396 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40888269269069905, + "acc_stderr": 0.004906227902850752, + "acc_norm": 0.5429197371041625, + "acc_norm_stderr": 0.0049713640310625916 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5274584929757343, + "acc_stderr": 0.017852981266633938, + "acc_norm": 0.5274584929757343, + "acc_norm_stderr": 0.017852981266633938 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.30638297872340425, + "acc_stderr": 0.030135906478517563, + "acc_norm": 0.30638297872340425, + "acc_norm_stderr": 0.030135906478517563 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.028386198084177673, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.028386198084177673 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077636, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.025174048384000766, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.025174048384000766 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145668, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145668 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123935, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123935 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523867, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523867 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377906, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377906 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5027522935779817, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.5027522935779817, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.038522733649243135, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.038522733649243135 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355435, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355435 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861131, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861131 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012404, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012404 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.02806499816704009, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.02806499816704009 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.303129074315515, + "acc_stderr": 0.011738669951254296, + "acc_norm": 0.303129074315515, + "acc_norm_stderr": 0.011738669951254296 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.039025510073744475, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.039025510073744475 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707689, + "mc2": 0.4004845005349835, + "mc2_stderr": 0.014923471142092035 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5655253837072018, + "acc_stderr": 0.017042098620824935, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.01627295299701915 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/ko-platypus-kiwi-13B", + "model_sha": "069a1dd610e02969baaecbe54305a431e6e18d23", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/kosy-openorca/result_2023-11-03 06:06:41.json b/kyujinpy/kosy-openorca/result_2023-11-03 06:06:41.json new file mode 100644 index 0000000000000000000000000000000000000000..ec3ed9a33eec28523302a0f4fdc6bc337db1f9ba --- /dev/null +++ b/kyujinpy/kosy-openorca/result_2023-11-03 06:06:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3839590443686007, + "acc_stderr": 0.01421244498065189, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.014494421584256512 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40838478390758814, + "acc_stderr": 0.004905304371090869, + "acc_norm": 0.5449113722366062, + "acc_norm_stderr": 0.0049696115546853945 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5197956577266922, + "acc_stderr": 0.01786594482729163, + "acc_norm": 0.5197956577266922, + "acc_norm_stderr": 0.01786594482729163 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115476, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115476 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.03163145807552378, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.03163145807552378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.0332085274234831, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.0332085274234831 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.028071588901091852, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.028071588901091852 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696545, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696545 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.026830805998952243, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.026830805998952243 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.018999707383162662, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.018999707383162662 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605607, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605607 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2536764705882353, + "acc_stderr": 0.02643132987078955, + "acc_norm": 0.2536764705882353, + "acc_norm_stderr": 0.02643132987078955 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935893, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935893 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271808, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271808 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.016058999026100626, + "mc2": 0.45297396150774194, + "mc2_stderr": 0.015202543307381022 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49586776859504134, + "acc_stderr": 0.017189767032130824, + "acc_norm": 0.5879574970484062, + "acc_norm_stderr": 0.01692227673852836 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/kosy-openorca", + "model_sha": "52c7495d1e211ac32c5e383418f3c1019c8883e1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/kyujinpy/mistral-Ko-Orca-7B/result_2023-11-01 14:00:13.json b/kyujinpy/mistral-Ko-Orca-7B/result_2023-11-01 14:00:13.json new file mode 100644 index 0000000000000000000000000000000000000000..25f088a53963c82fa6e43c6e3303ed20ecc44411 --- /dev/null +++ b/kyujinpy/mistral-Ko-Orca-7B/result_2023-11-01 14:00:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3319112627986348, + "acc_stderr": 0.013760988200880534, + "acc_norm": 0.3720136518771331, + "acc_norm_stderr": 0.014124597881844456 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36885082652858, + "acc_stderr": 0.0048150733340005985, + "acc_norm": 0.47689703246365267, + "acc_norm_stderr": 0.004984452002563923 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.038110796698335316, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.038110796698335316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4355044699872286, + "acc_stderr": 0.01773058992792661, + "acc_norm": 0.4355044699872286, + "acc_norm_stderr": 0.01773058992792661 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.031778212502369216, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.031778212502369216 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330313, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330313 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.042607351576445594, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.042607351576445594 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.039609335494512087, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.039609335494512087 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03196876989195779, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03196876989195779 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.024915243985987833, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.024915243985987833 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.027906150826041136, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.027906150826041136 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159665, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159665 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523864, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357334, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.035780381650085846, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.035780381650085846 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824096, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824096 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33169934640522875, + "acc_stderr": 0.01904748523936038, + "acc_norm": 0.33169934640522875, + "acc_norm_stderr": 0.01904748523936038 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833585, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833585 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669276, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669276 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.03253302807877738, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.03253302807877738 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271805, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271805 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674098, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674098 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.01559475363200651, + "mc2": 0.434406639546979, + "mc2_stderr": 0.01542967405561873 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.017168187201429253, + "acc_norm": 0.5655253837072018, + "acc_norm_stderr": 0.01704209862082493 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "kyujinpy/mistral-Ko-Orca-7B", + "model_sha": "9658ad38439195153512b6b7117f94000d8f5c3a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lIlBrother/llama2-merge-v0.1/result_2023-11-10 13:59:02.json b/lIlBrother/llama2-merge-v0.1/result_2023-11-10 13:59:02.json new file mode 100644 index 0000000000000000000000000000000000000000..fdb0c03f34288cef2ceed242814e8318768b9738 --- /dev/null +++ b/lIlBrother/llama2-merge-v0.1/result_2023-11-10 13:59:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4104095563139932, + "acc_stderr": 0.014374922192642662, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.014575583922019672 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44054969129655447, + "acc_stderr": 0.004954384702021653, + "acc_norm": 0.5907189802828122, + "acc_norm_stderr": 0.004906962980328293 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5906432748538012, + "acc_stderr": 0.03771283107626545, + "acc_norm": 0.5906432748538012, + "acc_norm_stderr": 0.03771283107626545 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.017779225233394216, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.017779225233394216 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540218, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540218 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5562700964630225, + "acc_stderr": 0.02821768355665231, + "acc_norm": 0.5562700964630225, + "acc_norm_stderr": 0.02821768355665231 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.02517404838400076, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.02517404838400076 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575494, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575494 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.02834378725054063, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.02834378725054063 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009794, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009794 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.037786210790920545, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.037786210790920545 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5154320987654321, + "acc_stderr": 0.0278074900442762, + "acc_norm": 0.5154320987654321, + "acc_norm_stderr": 0.0278074900442762 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5522935779816514, + "acc_stderr": 0.02131975496242546, + "acc_norm": 0.5522935779816514, + "acc_norm_stderr": 0.02131975496242546 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.0200176292142131, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.0200176292142131 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042394, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042394 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898428, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898428 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.03181425118197787, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.03181425118197787 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030685820596610798, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030685820596610798 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3604954367666232, + "acc_stderr": 0.01226311023729924, + "acc_norm": 0.3604954367666232, + "acc_norm_stderr": 0.01226311023729924 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3353733170134639, + "mc1_stderr": 0.016527534039668987, + "mc2": 0.5055997170755017, + "mc2_stderr": 0.015472709251284784 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4734356552538371, + "acc_stderr": 0.017166075717577747, + "acc_norm": 0.5312868949232585, + "acc_norm_stderr": 0.017156666859785473 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lIlBrother/llama2-merge-v0.1", + "model_sha": "7c5ff11a49acb01a0b030ae244509c224dd2377e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lIlBrother/llama2-merge-v0.2/result_2023-11-10 14:27:07.json b/lIlBrother/llama2-merge-v0.2/result_2023-11-10 14:27:07.json new file mode 100644 index 0000000000000000000000000000000000000000..d696128709bdaf6358c3d87cf135042d92ad3f92 --- /dev/null +++ b/lIlBrother/llama2-merge-v0.2/result_2023-11-10 14:27:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4121160409556314, + "acc_stderr": 0.014383915302225398, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.014575583922019672 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4404501095399323, + "acc_stderr": 0.004954265595373461, + "acc_norm": 0.5911173073093009, + "acc_norm_stderr": 0.004906227902850757 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.037792759455032014, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.037792759455032014 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.017779225233394216, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.017779225233394216 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.032025630761017346, + "acc_norm": 0.4, + "acc_norm_stderr": 0.032025630761017346 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540218, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540218 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5562700964630225, + "acc_stderr": 0.02821768355665231, + "acc_norm": 0.5562700964630225, + "acc_norm_stderr": 0.02821768355665231 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764194, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.02834378725054063, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.02834378725054063 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009794, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009794 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.037786210790920545, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.037786210790920545 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.023456037383982026, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.023456037383982026 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5522935779816514, + "acc_stderr": 0.02131975496242546, + "acc_norm": 0.5522935779816514, + "acc_norm_stderr": 0.02131975496242546 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.0200176292142131, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.0200176292142131 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042394, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042394 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898428, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898428 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.03181425118197787, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.03181425118197787 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6624472573839663, + "acc_stderr": 0.03078154910202622, + "acc_norm": 0.6624472573839663, + "acc_norm_stderr": 0.03078154910202622 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36310299869621904, + "acc_stderr": 0.012282264406018765, + "acc_norm": 0.36310299869621904, + "acc_norm_stderr": 0.012282264406018765 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3353733170134639, + "mc1_stderr": 0.016527534039668987, + "mc2": 0.5056596305681444, + "mc2_stderr": 0.015473471074051047 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4734356552538371, + "acc_stderr": 0.017166075717577747, + "acc_norm": 0.5301062573789846, + "acc_norm_stderr": 0.01715916359017022 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lIlBrother/llama2-merge-v0.2", + "model_sha": "570a429fcf20bc0af28daf1286bc45d1829f5122", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lIlBrother/llama2-merge-v0.3/result_2023-11-11 02:04:09.json b/lIlBrother/llama2-merge-v0.3/result_2023-11-11 02:04:09.json new file mode 100644 index 0000000000000000000000000000000000000000..27792e8d137d80573ced55959bd547d23daeb547 --- /dev/null +++ b/lIlBrother/llama2-merge-v0.3/result_2023-11-11 02:04:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3873720136518771, + "acc_stderr": 0.014235872487909869, + "acc_norm": 0.4257679180887372, + "acc_norm_stderr": 0.014449464278868803 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43616809400517825, + "acc_stderr": 0.004948952519517522, + "acc_norm": 0.5819557857000598, + "acc_norm_stderr": 0.004922294797766665 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5606641123882503, + "acc_stderr": 0.017747874245683602, + "acc_norm": 0.5606641123882503, + "acc_norm_stderr": 0.017747874245683602 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.02524277098712617, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.02524277098712617 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540632, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540632 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.03058805297427065, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.03058805297427065 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.023266512213730575, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.023266512213730575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.026772990653361816, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.026772990653361816 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5798165137614679, + "acc_stderr": 0.021162420048273515, + "acc_norm": 0.5798165137614679, + "acc_norm_stderr": 0.021162420048273515 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536671, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536671 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401164, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401164 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340455, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293648, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293648 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776125, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776125 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702368, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702368 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.01210681720306721, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.01210681720306721 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3378212974296206, + "mc1_stderr": 0.016557167322516893, + "mc2": 0.506693694303165, + "mc2_stderr": 0.015539179895065392 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4722550177095632, + "acc_stderr": 0.01716386797945601, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.017161563949916345 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lIlBrother/llama2-merge-v0.3", + "model_sha": "9051a05341dbfb26b4a83210c8fb6d72b6b64bca", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lIlBrother/llama2-merge-v0.4/result_2023-12-04 12:04:17.json b/lIlBrother/llama2-merge-v0.4/result_2023-12-04 12:04:17.json new file mode 100644 index 0000000000000000000000000000000000000000..bb631d0ac5b8a364df993e23487770c5777930a1 --- /dev/null +++ b/lIlBrother/llama2-merge-v0.4/result_2023-12-04 12:04:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41552901023890787, + "acc_stderr": 0.014401366641216376, + "acc_norm": 0.4786689419795222, + "acc_norm_stderr": 0.014598087973127106 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4296952798247361, + "acc_stderr": 0.0049402086413720785, + "acc_norm": 0.5809599681338379, + "acc_norm_stderr": 0.004923935749842495 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.558109833971903, + "acc_stderr": 0.01775880053421441, + "acc_norm": 0.558109833971903, + "acc_norm_stderr": 0.01775880053421441 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.035094383488796295, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.035094383488796295 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.031342504862454025, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.031342504862454025 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871927, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871927 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778657, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778657 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.038073017265045125, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.038073017265045125 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.023865206836972592, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.023865206836972592 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323674, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323674 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.020707458164352984, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.020707458164352984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401164, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401164 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611317, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.02985526139348393, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.02985526139348393 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3741851368970013, + "acc_stderr": 0.012359335618172063, + "acc_norm": 0.3741851368970013, + "acc_norm_stderr": 0.012359335618172063 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777308, + "mc2": 0.44672046780889485, + "mc2_stderr": 0.01512449543014224 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46162927981109797, + "acc_stderr": 0.01713966022184556, + "acc_norm": 0.5466351829988194, + "acc_norm_stderr": 0.01711541822522687 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lIlBrother/llama2-merge-v0.4", + "model_sha": "71c4402544b11eef6ae4a156fe79a452f3c9db53", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/labq/Ko-Qwen2-7B-Instruct-Test/result_2024-08-04 17:17:57.json b/labq/Ko-Qwen2-7B-Instruct-Test/result_2024-08-04 17:17:57.json new file mode 100644 index 0000000000000000000000000000000000000000..e9ab319280f6d400791cf45f849a6f9a7b09d5d1 --- /dev/null +++ b/labq/Ko-Qwen2-7B-Instruct-Test/result_2024-08-04 17:17:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.23122866894197952, + "acc_stderr": 0.012320858834772274, + "acc_norm": 0.2568259385665529, + "acc_norm_stderr": 0.012766923794116801 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2504481179047998, + "acc_stderr": 0.004323856300539177, + "acc_norm": 0.2504481179047998, + "acc_norm_stderr": 0.004323856300539177 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23391812865497075, + "acc_stderr": 0.032467217651178264, + "acc_norm": 0.23391812865497075, + "acc_norm_stderr": 0.032467217651178264 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2848020434227331, + "acc_stderr": 0.016139174096522577, + "acc_norm": 0.2848020434227331, + "acc_norm_stderr": 0.016139174096522577 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552004, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.030472973363380045, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.030472973363380045 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.032361983509282745, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.032361983509282745 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252628, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.032826493853041504, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.032826493853041504 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.042657921109405895, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.042657921109405895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.19205298013245034, + "acc_stderr": 0.03216298420593614, + "acc_norm": 0.19205298013245034, + "acc_norm_stderr": 0.03216298420593614 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.03152439186555404, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.03152439186555404 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.021851509822031708, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.021851509822031708 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.022497230190967547, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.022497230190967547 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.1901840490797546, + "acc_stderr": 0.030833491146281224, + "acc_norm": 0.1901840490797546, + "acc_norm_stderr": 0.030833491146281224 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.03712454853721368, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.03712454853721368 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.20733944954128442, + "acc_stderr": 0.01738141556360867, + "acc_norm": 0.20733944954128442, + "acc_norm_stderr": 0.01738141556360867 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.03395490020856113, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.03395490020856113 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103984, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103984 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25945241199478486, + "acc_stderr": 0.011195262076350318, + "acc_norm": 0.25945241199478486, + "acc_norm_stderr": 0.011195262076350318 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485083, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.014846044968252247, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.014846044968252247 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "labq/Ko-Qwen2-7B-Instruct-Test", + "model_sha": "3c9d140e8a26960988bb2f60b77581516223d8e9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/labq/Solar-Ko-Recovery-11B-sft-Test/result_2024-08-07 00:12:32.json b/labq/Solar-Ko-Recovery-11B-sft-Test/result_2024-08-07 00:12:32.json new file mode 100644 index 0000000000000000000000000000000000000000..05fd56042e70427305185d4b25f6364b8039a1de --- /dev/null +++ b/labq/Solar-Ko-Recovery-11B-sft-Test/result_2024-08-07 00:12:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.47696245733788395, + "acc_stderr": 0.014595873205358264, + "acc_norm": 0.5341296928327645, + "acc_norm_stderr": 0.014577311315231099 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46683927504481176, + "acc_stderr": 0.0049787954542167175, + "acc_norm": 0.6426010754829715, + "acc_norm_stderr": 0.00478254275410208 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7017543859649122, + "acc_stderr": 0.03508771929824562, + "acc_norm": 0.7017543859649122, + "acc_norm_stderr": 0.03508771929824562 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7318007662835249, + "acc_stderr": 0.015842430835269466, + "acc_norm": 0.7318007662835249, + "acc_norm_stderr": 0.015842430835269466 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6463022508038585, + "acc_stderr": 0.027155208103200875, + "acc_norm": 0.6463022508038585, + "acc_norm_stderr": 0.027155208103200875 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6412556053811659, + "acc_stderr": 0.03219079200419996, + "acc_norm": 0.6412556053811659, + "acc_norm_stderr": 0.03219079200419996 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207761, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207761 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5966386554621849, + "acc_stderr": 0.031866081214088314, + "acc_norm": 0.5966386554621849, + "acc_norm_stderr": 0.031866081214088314 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5564102564102564, + "acc_stderr": 0.025189149894764208, + "acc_norm": 0.5564102564102564, + "acc_norm_stderr": 0.025189149894764208 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04712821257426769, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04712821257426769 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6064516129032258, + "acc_stderr": 0.02779187875313227, + "acc_norm": 0.6064516129032258, + "acc_norm_stderr": 0.02779187875313227 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.02624677294689048, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.02624677294689048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.569811320754717, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.569811320754717, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652458, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652458 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.024870815251057093, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.024870815251057093 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.76, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6358381502890174, + "acc_stderr": 0.025906632631016127, + "acc_norm": 0.6358381502890174, + "acc_norm_stderr": 0.025906632631016127 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5828220858895705, + "acc_stderr": 0.03874102859818082, + "acc_norm": 0.5828220858895705, + "acc_norm_stderr": 0.03874102859818082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5864197530864198, + "acc_stderr": 0.027402042040269973, + "acc_norm": 0.5864197530864198, + "acc_norm_stderr": 0.027402042040269973 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7150259067357513, + "acc_stderr": 0.03257714077709661, + "acc_norm": 0.7150259067357513, + "acc_norm_stderr": 0.03257714077709661 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6752293577981652, + "acc_stderr": 0.020077729109310327, + "acc_norm": 0.6752293577981652, + "acc_norm_stderr": 0.020077729109310327 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6045751633986928, + "acc_stderr": 0.02799672318063146, + "acc_norm": 0.6045751633986928, + "acc_norm_stderr": 0.02799672318063146 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.04026097083296563, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.04026097083296563 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.020220920829626916, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.020220920829626916 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.029097675599463926, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.029097675599463926 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608043, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22905027932960895, + "acc_stderr": 0.014054314935614563, + "acc_norm": 0.22905027932960895, + "acc_norm_stderr": 0.014054314935614563 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03035969707904611, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03035969707904611 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6081632653061224, + "acc_stderr": 0.031251275910891656, + "acc_norm": 0.6081632653061224, + "acc_norm_stderr": 0.031251275910891656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.729957805907173, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.729957805907173, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3859191655801825, + "acc_stderr": 0.012433398911476148, + "acc_norm": 0.3859191655801825, + "acc_norm_stderr": 0.012433398911476148 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7009803921568627, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.7009803921568627, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.037131580674819135, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.037131580674819135 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30966952264381886, + "mc1_stderr": 0.01618574435514491, + "mc2": 0.4464315200395308, + "mc2_stderr": 0.015096731845835917 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5974025974025974, + "acc_stderr": 0.016861020486407773, + "acc_norm": 0.615112160566706, + "acc_norm_stderr": 0.016728579701498644 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "labq/Solar-Ko-Recovery-11B-sft-Test", + "model_sha": "f43be05fd819ea14f040778b94a444d974803edd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lamhieu/ghost-7b-v0.9.0/result_2024-03-13 09:04:14.json b/lamhieu/ghost-7b-v0.9.0/result_2024-03-13 09:04:14.json new file mode 100644 index 0000000000000000000000000000000000000000..26d735d34b7781c49dd95b67b54637ec7834a687 --- /dev/null +++ b/lamhieu/ghost-7b-v0.9.0/result_2024-03-13 09:04:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26706484641638223, + "acc_stderr": 0.012928933196496354, + "acc_norm": 0.3242320819112628, + "acc_norm_stderr": 0.013678810399518824 + }, + "harness|ko_hellaswag|10": { + "acc": 0.326229834694284, + "acc_stderr": 0.004678743563766657, + "acc_norm": 0.394443337980482, + "acc_norm_stderr": 0.004877319683639068 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.03660298834049163, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.03660298834049163 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39208173690932313, + "acc_stderr": 0.017458524050147636, + "acc_norm": 0.39208173690932313, + "acc_norm_stderr": 0.017458524050147636 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36977491961414793, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.36977491961414793, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.37373737373737376, + "acc_stderr": 0.03446897738659333, + "acc_norm": 0.37373737373737376, + "acc_norm_stderr": 0.03446897738659333 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.041379310344827586, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.041379310344827586 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3384615384615385, + "acc_stderr": 0.023991500500313023, + "acc_norm": 0.3384615384615385, + "acc_norm_stderr": 0.023991500500313023 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.033959703819985726, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.033959703819985726 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4064516129032258, + "acc_stderr": 0.027941727346256315, + "acc_norm": 0.4064516129032258, + "acc_norm_stderr": 0.027941727346256315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5897435897435898, + "acc_stderr": 0.03222414045241107, + "acc_norm": 0.5897435897435898, + "acc_norm_stderr": 0.03222414045241107 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3849056603773585, + "acc_stderr": 0.029946498567699945, + "acc_norm": 0.3849056603773585, + "acc_norm_stderr": 0.029946498567699945 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.035319879302087305, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.035319879302087305 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165582, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165582 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523864, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.40173410404624277, + "acc_stderr": 0.02639410417764363, + "acc_norm": 0.40173410404624277, + "acc_norm_stderr": 0.02639410417764363 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.03746668325470021, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.03746668325470021 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.02733954664066274, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.02733954664066274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.034998072761933376, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.034998072761933376 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0383515395439942, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0383515395439942 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3706422018348624, + "acc_stderr": 0.020707458164352984, + "acc_norm": 0.3706422018348624, + "acc_norm_stderr": 0.020707458164352984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171566, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171566 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5041322314049587, + "acc_stderr": 0.045641987674327526, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.045641987674327526 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013317, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013317 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.018635594034423972, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.018635594034423972 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553974, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553974 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.0273658611315138, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.0273658611315138 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48523206751054854, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.48523206751054854, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2796610169491525, + "acc_stderr": 0.011463397393861964, + "acc_norm": 0.2796610169491525, + "acc_norm_stderr": 0.011463397393861964 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.031660096793998116, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.031660096793998116 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03588624800091709, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03588624800091709 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713616, + "mc2": 0.48375410919222295, + "mc2_stderr": 0.01600186191990142 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27508854781582054, + "acc_stderr": 0.015353010757952652, + "acc_norm": 0.3659976387249115, + "acc_norm_stderr": 0.016561489664895696 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lamhieu/ghost-7b-v0.9.0", + "model_sha": "590d8e3d3db2a26ee4831e1814a6ec0607cb27dc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lamhieu/ghost-7b-v0.9.1/result_2024-03-13 09:04:06.json b/lamhieu/ghost-7b-v0.9.1/result_2024-03-13 09:04:06.json new file mode 100644 index 0000000000000000000000000000000000000000..14e5dc65079ddea87717d4e2b7d0a2edff24f72a --- /dev/null +++ b/lamhieu/ghost-7b-v0.9.1/result_2024-03-13 09:04:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26791808873720135, + "acc_stderr": 0.012942030195136425, + "acc_norm": 0.3267918088737201, + "acc_norm_stderr": 0.013706665975587333 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3236407090221072, + "acc_stderr": 0.004669085411342188, + "acc_norm": 0.3932483569010157, + "acc_norm_stderr": 0.004874728756528195 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3742690058479532, + "acc_stderr": 0.03711601185389481, + "acc_norm": 0.3742690058479532, + "acc_norm_stderr": 0.03711601185389481 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.04846748253977239, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.04846748253977239 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.017268607560005766, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.017268607560005766 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.037117251907407514, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.037117251907407514 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3954983922829582, + "acc_stderr": 0.027770918531427838, + "acc_norm": 0.3954983922829582, + "acc_norm_stderr": 0.027770918531427838 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3991031390134529, + "acc_stderr": 0.03286745312567961, + "acc_norm": 0.3991031390134529, + "acc_norm_stderr": 0.03286745312567961 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.041423137719966634, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.041423137719966634 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077636, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3865546218487395, + "acc_stderr": 0.03163145807552378, + "acc_norm": 0.3865546218487395, + "acc_norm_stderr": 0.03163145807552378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3, + "acc_stderr": 0.0232345810884285, + "acc_norm": 0.3, + "acc_norm_stderr": 0.0232345810884285 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.027709359675032488, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.027709359675032488 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5726495726495726, + "acc_stderr": 0.03240847393516327, + "acc_norm": 0.5726495726495726, + "acc_norm_stderr": 0.03240847393516327 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.030102793781791187, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.030102793781791187 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.046075820907199756, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.046075820907199756 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066482, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066482 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.03525675167467974, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.03525675167467974 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624576, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624576 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.026424816594009845, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.026424816594009845 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.03746668325470021, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.03746668325470021 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.027339546640662734, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.027339546640662734 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38860103626943004, + "acc_stderr": 0.035177397963731316, + "acc_norm": 0.38860103626943004, + "acc_norm_stderr": 0.035177397963731316 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4018348623853211, + "acc_stderr": 0.021020106172997013, + "acc_norm": 0.4018348623853211, + "acc_norm_stderr": 0.021020106172997013 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.028304576673141114, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.028304576673141114 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.512396694214876, + "acc_stderr": 0.04562951548180765, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.04562951548180765 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849726, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849726 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.018663359671463667, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.018663359671463667 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053479, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053479 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.0443280405529152, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.0443280405529152 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898428, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898428 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29595827900912647, + "acc_stderr": 0.011658518525277039, + "acc_norm": 0.29595827900912647, + "acc_norm_stderr": 0.011658518525277039 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.03296245110172228, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.03296245110172228 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.03663974994391243, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.03663974994391243 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156474, + "mc2": 0.43905040290162545, + "mc2_stderr": 0.01596047675950481 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2727272727272727, + "acc_stderr": 0.01531185311030035, + "acc_norm": 0.31995277449822906, + "acc_norm_stderr": 0.016037153840280517 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lamhieu/ghost-7b-v0.9.1", + "model_sha": "71dcaaaa515d6e49cc03ded1935719f92931fcd8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lashid11/CheckGPT-SOLAR-10.7B/result_2024-07-26 01:44:10.json b/lashid11/CheckGPT-SOLAR-10.7B/result_2024-07-26 01:44:10.json new file mode 100644 index 0000000000000000000000000000000000000000..4f706828710a587584d9d197bfed84ca1099ab3e --- /dev/null +++ b/lashid11/CheckGPT-SOLAR-10.7B/result_2024-07-26 01:44:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7192832764505119, + "acc_stderr": 0.013131238126975584, + "acc_norm": 0.764505119453925, + "acc_norm_stderr": 0.01239945185500475 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6000796654052978, + "acc_stderr": 0.00488880500310304, + "acc_norm": 0.7464648476399124, + "acc_norm_stderr": 0.004341454841892319 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7475728155339806, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.7475728155339806, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7011494252873564, + "acc_stderr": 0.01636925681509311, + "acc_norm": 0.7011494252873564, + "acc_norm_stderr": 0.01636925681509311 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.04319223625811331, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.04319223625811331 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5319148936170213, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.5319148936170213, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.03892212195333045, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.03892212195333045 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6463022508038585, + "acc_stderr": 0.027155208103200875, + "acc_norm": 0.6463022508038585, + "acc_norm_stderr": 0.027155208103200875 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6681614349775785, + "acc_stderr": 0.031602951437766785, + "acc_norm": 0.6681614349775785, + "acc_norm_stderr": 0.031602951437766785 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.03154449888270286, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.03154449888270286 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6230769230769231, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.6230769230769231, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.7037037037037037, + "acc_stderr": 0.04414343666854933, + "acc_norm": 0.7037037037037037, + "acc_norm_stderr": 0.04414343666854933 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.632258064516129, + "acc_stderr": 0.027430866579973463, + "acc_norm": 0.632258064516129, + "acc_norm_stderr": 0.027430866579973463 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8247863247863247, + "acc_stderr": 0.024904439098918218, + "acc_norm": 0.8247863247863247, + "acc_norm_stderr": 0.024904439098918218 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.030437794342983052, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.030437794342983052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.029723278961476664, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.029723278961476664 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.0320384104021332, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.0320384104021332 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.038073017265045125, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.038073017265045125 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4417989417989418, + "acc_stderr": 0.025576257061253833, + "acc_norm": 0.4417989417989418, + "acc_norm_stderr": 0.025576257061253833 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5486111111111112, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.5486111111111112, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932263, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932263 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5895953757225434, + "acc_stderr": 0.026483392042098177, + "acc_norm": 0.5895953757225434, + "acc_norm_stderr": 0.026483392042098177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6380368098159509, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.6380368098159509, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6728395061728395, + "acc_stderr": 0.02610567386140983, + "acc_norm": 0.6728395061728395, + "acc_norm_stderr": 0.02610567386140983 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7512953367875648, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.7512953367875648, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7229357798165138, + "acc_stderr": 0.019188482590169545, + "acc_norm": 0.7229357798165138, + "acc_norm_stderr": 0.019188482590169545 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6143790849673203, + "acc_stderr": 0.02787074527829028, + "acc_norm": 0.6143790849673203, + "acc_norm_stderr": 0.02787074527829028 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7768595041322314, + "acc_stderr": 0.03800754475228732, + "acc_norm": 0.7768595041322314, + "acc_norm_stderr": 0.03800754475228732 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.625, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.625, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.020007912739359365, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.020007912739359365 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4326241134751773, + "acc_stderr": 0.02955545423677885, + "acc_norm": 0.4326241134751773, + "acc_norm_stderr": 0.02955545423677885 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.03407632093854052, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.03407632093854052 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.35195530726256985, + "acc_stderr": 0.015972668523689067, + "acc_norm": 0.35195530726256985, + "acc_norm_stderr": 0.015972668523689067 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5257352941176471, + "acc_stderr": 0.030332578094555033, + "acc_norm": 0.5257352941176471, + "acc_norm_stderr": 0.030332578094555033 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.673469387755102, + "acc_stderr": 0.030021056238440327, + "acc_norm": 0.673469387755102, + "acc_norm_stderr": 0.030021056238440327 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7215189873417721, + "acc_stderr": 0.029178682304842538, + "acc_norm": 0.7215189873417721, + "acc_norm_stderr": 0.029178682304842538 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.43741851368970014, + "acc_stderr": 0.012669813464935715, + "acc_norm": 0.43741851368970014, + "acc_norm_stderr": 0.012669813464935715 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.037131580674819135, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.037131580674819135 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7527539779681762, + "mc1_stderr": 0.015102404797359654, + "mc2": 0.8150192985052159, + "mc2_stderr": 0.013109910304119234 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5147579693034239, + "acc_stderr": 0.017182864434998564, + "acc_norm": 0.5395513577331759, + "acc_norm_stderr": 0.017136487626049846 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lashid11/CheckGPT-SOLAR-10.7B", + "model_sha": "c40cceeb3e67cdea30a3cda38bb1d21d77a9a374", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/google-gemma-14B-ko-chang/result_2024-04-13 12:13:43.json b/lcw99/google-gemma-14B-ko-chang/result_2024-04-13 12:13:43.json new file mode 100644 index 0000000000000000000000000000000000000000..2570d5b894e07e9280bfdbd7d086c36e9e847b24 --- /dev/null +++ b/lcw99/google-gemma-14B-ko-chang/result_2024-04-13 12:13:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4061433447098976, + "acc_stderr": 0.014351656690097858, + "acc_norm": 0.4684300341296928, + "acc_norm_stderr": 0.014582236460866984 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4411471818362876, + "acc_stderr": 0.004955095096264716, + "acc_norm": 0.5955984863572994, + "acc_norm_stderr": 0.0048977283707372435 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7192982456140351, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.7192982456140351, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6104725415070242, + "acc_stderr": 0.01743808255626461, + "acc_norm": 0.6104725415070242, + "acc_norm_stderr": 0.01743808255626461 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789958, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789958 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.038743715565879536, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.038743715565879536 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024932, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.0332085274234831, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.0332085274234831 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5387096774193548, + "acc_stderr": 0.028358634859836935, + "acc_norm": 0.5387096774193548, + "acc_norm_stderr": 0.028358634859836935 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.02999695185834948, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.02999695185834948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594295, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.02345603738398203, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.02345603738398203 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6220183486238532, + "acc_stderr": 0.020789187066728117, + "acc_norm": 0.6220183486238532, + "acc_norm_stderr": 0.020789187066728117 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604672, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604672 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.040516463428741406, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.040516463428741406 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4199346405228758, + "acc_stderr": 0.019966811178256477, + "acc_norm": 0.4199346405228758, + "acc_norm_stderr": 0.019966811178256477 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631296, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631296 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.0314147080258659, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.0314147080258659 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3246414602346806, + "acc_stderr": 0.011959089388530027, + "acc_norm": 0.3246414602346806, + "acc_norm_stderr": 0.011959089388530027 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6078431372549019, + "acc_stderr": 0.034267123492472726, + "acc_norm": 0.6078431372549019, + "acc_norm_stderr": 0.034267123492472726 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.01613222972815504, + "mc2": 0.4536480408591891, + "mc2_stderr": 0.015196893311329153 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5041322314049587, + "acc_stderr": 0.017189767032130814, + "acc_norm": 0.5430932703659976, + "acc_norm_stderr": 0.017126389093086784 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/google-gemma-14B-ko-chang", + "model_sha": "3e329404ca273be3af7dd1006ebe0bf0b7e7194a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/llama-3-10b-it-ko-2024-0527/result_2024-05-27 06:32:23.json b/lcw99/llama-3-10b-it-ko-2024-0527/result_2024-05-27 06:32:23.json new file mode 100644 index 0000000000000000000000000000000000000000..9fe57f7c66260ab4eb2a689598baefdf1830fcb0 --- /dev/null +++ b/lcw99/llama-3-10b-it-ko-2024-0527/result_2024-05-27 06:32:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38310580204778155, + "acc_stderr": 0.01420647266167288, + "acc_norm": 0.45307167235494883, + "acc_norm_stderr": 0.014546892052005626 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40091615216092413, + "acc_stderr": 0.004890824718530299, + "acc_norm": 0.5351523600876319, + "acc_norm_stderr": 0.004977434505403351 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.038200425866029654, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.038200425866029654 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4789272030651341, + "acc_stderr": 0.017864076786212896, + "acc_norm": 0.4789272030651341, + "acc_norm_stderr": 0.017864076786212896 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033582, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033582 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.592436974789916, + "acc_stderr": 0.031918633744784666, + "acc_norm": 0.592436974789916, + "acc_norm_stderr": 0.031918633744784666 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5025641025641026, + "acc_stderr": 0.025350672979412184, + "acc_norm": 0.5025641025641026, + "acc_norm_stderr": 0.025350672979412184 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5387096774193548, + "acc_stderr": 0.028358634859836945, + "acc_norm": 0.5387096774193548, + "acc_norm_stderr": 0.028358634859836945 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674064, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674064 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652458, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652458 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6616915422885572, + "acc_stderr": 0.033455630703391914, + "acc_norm": 0.6616915422885572, + "acc_norm_stderr": 0.033455630703391914 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02459497512892094, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02459497512892094 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.02680372058320618, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.02680372058320618 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5339506172839507, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.5339506172839507, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6217616580310881, + "acc_stderr": 0.03499807276193338, + "acc_norm": 0.6217616580310881, + "acc_norm_stderr": 0.03499807276193338 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336938, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336938 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5706422018348624, + "acc_stderr": 0.021222286397236508, + "acc_norm": 0.5706422018348624, + "acc_norm_stderr": 0.021222286397236508 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.02856869975222588, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.02856869975222588 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.040516463428741406, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.040516463428741406 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.020007912739359365, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.020007912739359365 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26927374301675977, + "acc_stderr": 0.014835616582882613, + "acc_norm": 0.26927374301675977, + "acc_norm_stderr": 0.014835616582882613 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877753, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877753 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.031717528240626645, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.031717528240626645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.030486039389105317, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.030486039389105317 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35528031290743156, + "acc_stderr": 0.012223623364044034, + "acc_norm": 0.35528031290743156, + "acc_norm_stderr": 0.012223623364044034 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6225490196078431, + "acc_stderr": 0.03402272044340703, + "acc_norm": 0.6225490196078431, + "acc_norm_stderr": 0.03402272044340703 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380027, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380027 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.44889562783604925, + "mc2_stderr": 0.015223103027290418 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4911452184179457, + "acc_stderr": 0.01718765819933674, + "acc_norm": 0.5808736717827627, + "acc_norm_stderr": 0.016963995010862792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/llama-3-10b-it-ko-2024-0527", + "model_sha": "859c6c2e0420e1f921f7780dd2f3b3a76d50c476", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/llama-3-10b-it-kor-extented-chang-pro8/result_2024-05-21 21:33:21.json b/lcw99/llama-3-10b-it-kor-extented-chang-pro8/result_2024-05-21 21:33:21.json new file mode 100644 index 0000000000000000000000000000000000000000..9f78f966a8fce4543a445591550ef4607ee832fc --- /dev/null +++ b/lcw99/llama-3-10b-it-kor-extented-chang-pro8/result_2024-05-21 21:33:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38822525597269625, + "acc_stderr": 0.01424161420741404, + "acc_norm": 0.4445392491467577, + "acc_norm_stderr": 0.014521226405627074 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3963353913563035, + "acc_stderr": 0.004881359589148995, + "acc_norm": 0.526090420235013, + "acc_norm_stderr": 0.004982983592459192 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.03815827365913237, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.03815827365913237 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.04656147110012351, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.04656147110012351 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.017867695938429774, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.017867695938429774 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.032600385118357715, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.032600385118357715 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.03348180017060306, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.03348180017060306 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929777, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.0323854694875898, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.0323854694875898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.025348006031534805, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.025348006031534805 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199985, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199985 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.035107665979592154, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.035107665979592154 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5548387096774193, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.5548387096774193, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.02974504857267406, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.02974504857267406 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556552, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556552 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066475, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066475 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332786, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332786 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.026788811931562764, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.026788811931562764 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5432098765432098, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.5432098765432098, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5779816513761468, + "acc_stderr": 0.021174991407763175, + "acc_norm": 0.5779816513761468, + "acc_norm_stderr": 0.021174991407763175 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.0439025926537756, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.0439025926537756 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.02856869975222588, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.02856869975222588 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.040562422522490316, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.040562422522490316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.01988622103750187, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.01988622103750187 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.01489339173524962, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.01489339173524962 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714864, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714864 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.031717528240626645, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.031717528240626645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.03137624072561619, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.03137624072561619 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.012198140605353602, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.012198140605353602 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03460228327239172, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03460228327239172 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03815494308688929, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03815494308688929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4493359307836501, + "mc2_stderr": 0.0151771762631482 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48288075560802834, + "acc_stderr": 0.017180275246085633, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/llama-3-10b-it-kor-extented-chang-pro8", + "model_sha": "803776b5bba048f3e91fb09b97f120e13480332b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/llama-3-10b-it-kor-extented-chang/result_2024-05-15 06:52:56.json b/lcw99/llama-3-10b-it-kor-extented-chang/result_2024-05-15 06:52:56.json new file mode 100644 index 0000000000000000000000000000000000000000..5f225482637b46d30cbdd05b308e234fd1f76c8a --- /dev/null +++ b/lcw99/llama-3-10b-it-kor-extented-chang/result_2024-05-15 06:52:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37627986348122866, + "acc_stderr": 0.014157022555407161, + "acc_norm": 0.44112627986348124, + "acc_norm_stderr": 0.014509747749064666 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3851822346146186, + "acc_stderr": 0.004856437955719849, + "acc_norm": 0.5128460466042621, + "acc_norm_stderr": 0.004988134303021786 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.0458212416016155, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.0458212416016155 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.524904214559387, + "acc_stderr": 0.017857770704901032, + "acc_norm": 0.524904214559387, + "acc_norm_stderr": 0.017857770704901032 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5063829787234042, + "acc_stderr": 0.032683358999363366, + "acc_norm": 0.5063829787234042, + "acc_norm_stderr": 0.032683358999363366 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305693, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305693 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383887, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383887 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.541025641025641, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.541025641025641, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.47783251231527096, + "acc_stderr": 0.03514528562175008, + "acc_norm": 0.47783251231527096, + "acc_norm_stderr": 0.03514528562175008 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5580645161290323, + "acc_stderr": 0.028251557906849748, + "acc_norm": 0.5580645161290323, + "acc_norm_stderr": 0.028251557906849748 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.02812096650391439, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.02812096650391439 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.02904560029061626, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.02904560029061626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155254, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155254 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.041614023984032786, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.041614023984032786 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.569364161849711, + "acc_stderr": 0.026658800273672376, + "acc_norm": 0.569364161849711, + "acc_norm_stderr": 0.026658800273672376 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5617283950617284, + "acc_stderr": 0.027607914087400487, + "acc_norm": 0.5617283950617284, + "acc_norm_stderr": 0.027607914087400487 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.045796394220704355, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.045796394220704355 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6055045871559633, + "acc_stderr": 0.02095464210858747, + "acc_norm": 0.6055045871559633, + "acc_norm_stderr": 0.02095464210858747 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768176, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768176 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.02835895631342355, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.02835895631342355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42320261437908496, + "acc_stderr": 0.01998780976948207, + "acc_norm": 0.42320261437908496, + "acc_norm_stderr": 0.01998780976948207 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.03408655867977749, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.03408655867977749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.01461446582196634, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.01461446582196634 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.030254372573976694, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.030254372573976694 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556166, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556166 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6877637130801688, + "acc_stderr": 0.030165137867847, + "acc_norm": 0.6877637130801688, + "acc_norm_stderr": 0.030165137867847 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3435462842242503, + "acc_stderr": 0.012128961174190161, + "acc_norm": 0.3435462842242503, + "acc_norm_stderr": 0.012128961174190161 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.034760990605016355, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.034760990605016355 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.037563357751878954, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.037563357751878954 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.4273947056584965, + "mc2_stderr": 0.015042601972189502 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45808736717827625, + "acc_stderr": 0.017129852117911144, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.017019847535972205 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/llama-3-10b-it-kor-extented-chang", + "model_sha": "3989fb93cdd54239a35a5be84168c9645067be3b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/llama-3-10b-ko-240604-e2f/result_2024-06-06 03:05:14.json b/lcw99/llama-3-10b-ko-240604-e2f/result_2024-06-06 03:05:14.json new file mode 100644 index 0000000000000000000000000000000000000000..46e85bbd75261c6b9b09187556ad916297987f7d --- /dev/null +++ b/lcw99/llama-3-10b-ko-240604-e2f/result_2024-06-06 03:05:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37627986348122866, + "acc_stderr": 0.014157022555407163, + "acc_norm": 0.4402730375426621, + "acc_norm_stderr": 0.014506769524804234 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3999203345947023, + "acc_stderr": 0.004888805003103072, + "acc_norm": 0.5286795459071898, + "acc_norm_stderr": 0.004981566295189444 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4955300127713921, + "acc_stderr": 0.017879248970584384, + "acc_norm": 0.4955300127713921, + "acc_norm_stderr": 0.017879248970584384 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.03252909619613197, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.03252909619613197 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.03711725190740751, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.03711725190740751 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564584, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564584 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.03536085947529482, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.03536085947529482 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.025348006031534805, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.025348006031534805 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.535483870967742, + "acc_stderr": 0.028372287797962942, + "acc_norm": 0.535483870967742, + "acc_norm_stderr": 0.028372287797962942 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674064, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674064 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5433962264150943, + "acc_stderr": 0.030656748696739438, + "acc_norm": 0.5433962264150943, + "acc_norm_stderr": 0.030656748696739438 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.039837983066598075, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.039837983066598075 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.03400598505599015, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.03400598505599015 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.03794012674697031, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.03794012674697031 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342658, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342658 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.041406856391115014, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.041406856391115014 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.02677299065336182, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.02677299065336182 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5401234567901234, + "acc_stderr": 0.02773102275353927, + "acc_norm": 0.5401234567901234, + "acc_norm_stderr": 0.02773102275353927 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5963302752293578, + "acc_stderr": 0.021035704856574963, + "acc_norm": 0.5963302752293578, + "acc_norm_stderr": 0.021035704856574963 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5359477124183006, + "acc_stderr": 0.02855582751652878, + "acc_norm": 0.5359477124183006, + "acc_norm_stderr": 0.02855582751652878 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309172, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309172 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.01988622103750187, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.01988622103750187 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509317, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.03406315360711507, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.03406315360711507 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364541, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364541 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5918367346938775, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.5918367346938775, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3539765319426336, + "acc_stderr": 0.012213504731731646, + "acc_norm": 0.3539765319426336, + "acc_norm_stderr": 0.012213504731731646 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.0341078533890472, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.0341078533890472 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070264, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070264 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25458996328029376, + "mc1_stderr": 0.015250117079156456, + "mc2": 0.4178007728405335, + "mc2_stderr": 0.015184449400420122 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5442739079102715, + "acc_stderr": 0.017122829143292648, + "acc_norm": 0.6221959858323495, + "acc_norm_stderr": 0.016669082840694974 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/llama-3-10b-ko-240604-e2f", + "model_sha": "ab260a3cceec3f1170c4b6c8e671d79d12fe40a1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/llama-3-10b-wiki-240709-f/result_2024-07-09 19:21:15.json b/lcw99/llama-3-10b-wiki-240709-f/result_2024-07-09 19:21:15.json new file mode 100644 index 0000000000000000000000000000000000000000..dcac7aa0f5784218ce459379397cd0840e248ed8 --- /dev/null +++ b/lcw99/llama-3-10b-wiki-240709-f/result_2024-07-09 19:21:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.014150631435111728, + "acc_norm": 0.447098976109215, + "acc_norm_stderr": 0.014529380160526843 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3968333001394145, + "acc_stderr": 0.00488241002993544, + "acc_norm": 0.5264887472615017, + "acc_norm_stderr": 0.004982774293927772 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.038237270928823064, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.038237270928823064 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.04541609446503948, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.04541609446503948 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49680715197956576, + "acc_stderr": 0.017879598945933068, + "acc_norm": 0.49680715197956576, + "acc_norm_stderr": 0.017879598945933068 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.032662042990646775, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.032662042990646775 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840625, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840625 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.031566630992154156, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.031566630992154156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.025334667080954887, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.025334667080954887 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5580645161290323, + "acc_stderr": 0.028251557906849748, + "acc_norm": 0.5580645161290323, + "acc_norm_stderr": 0.028251557906849748 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5509433962264151, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.5509433962264151, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.0379401267469703, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.0379401267469703 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752056, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.558641975308642, + "acc_stderr": 0.02762873715566877, + "acc_norm": 0.558641975308642, + "acc_norm_stderr": 0.02762873715566877 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6073394495412844, + "acc_stderr": 0.020937505161201093, + "acc_norm": 0.6073394495412844, + "acc_norm_stderr": 0.020937505161201093 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.028614624752805427, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.028614624752805427 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.040261875275912046, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.040261875275912046 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309172, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309172 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.02003639376835263, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.02003639376835263 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169938, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169938 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.046695106638751926, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.046695106638751926 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933105, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933105 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.03160106993449601, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.03160106993449601 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6624472573839663, + "acc_stderr": 0.030781549102026205, + "acc_norm": 0.6624472573839663, + "acc_norm_stderr": 0.030781549102026205 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.01218777337074152, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.01218777337074152 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165634, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165634 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.0154610276272536, + "mc2": 0.42803315462125224, + "mc2_stderr": 0.015070050124664487 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46989374262101535, + "acc_stderr": 0.017159163590170216, + "acc_norm": 0.5879574970484062, + "acc_norm_stderr": 0.01692227673852836 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/llama-3-10b-wiki-240709-f", + "model_sha": "c4fde3e9d2ff37cd2c4ea662975d76b6434cb785", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/llama-3-8b-it-kor-extented-chang/result_2024-05-02 23:45:03.json b/lcw99/llama-3-8b-it-kor-extented-chang/result_2024-05-02 23:45:03.json new file mode 100644 index 0000000000000000000000000000000000000000..0669f1eda80f573b97335aaa70b4a61c516c6407 --- /dev/null +++ b/lcw99/llama-3-8b-it-kor-extented-chang/result_2024-05-02 23:45:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.014150631435111728, + "acc_norm": 0.4300341296928328, + "acc_norm_stderr": 0.014467631559138003 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3752240589523999, + "acc_stderr": 0.004831911860478689, + "acc_norm": 0.5010953993228441, + "acc_norm_stderr": 0.0049897694369569244 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.017874698667491345, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.017874698667491345 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.035594435655639196, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.035594435655639196 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954953, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954953 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.535483870967742, + "acc_stderr": 0.02837228779796295, + "acc_norm": 0.535483870967742, + "acc_norm_stderr": 0.02837228779796295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.02860595370200425, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.02860595370200425 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.03368787466115459, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.03368787466115459 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.0379401267469703, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.0379401267469703 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.04598188057816542, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.04598188057816542 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5963302752293578, + "acc_stderr": 0.021035704856574963, + "acc_norm": 0.5963302752293578, + "acc_norm_stderr": 0.021035704856574963 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4297385620915033, + "acc_stderr": 0.020027122784928558, + "acc_norm": 0.4297385620915033, + "acc_norm_stderr": 0.020027122784928558 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3664804469273743, + "acc_stderr": 0.01611523550486549, + "acc_norm": 0.3664804469273743, + "acc_norm_stderr": 0.01611523550486549 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687758, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687758 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6122448979591837, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.6122448979591837, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.679324894514768, + "acc_stderr": 0.03038193194999041, + "acc_norm": 0.679324894514768, + "acc_norm_stderr": 0.03038193194999041 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37027379400260757, + "acc_stderr": 0.012332930781256725, + "acc_norm": 0.37027379400260757, + "acc_norm_stderr": 0.012332930781256725 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.03471157907953427, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.03471157907953427 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.45173201426001913, + "mc2_stderr": 0.015444530551155628 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4214876033057851, + "acc_stderr": 0.016977101932601518, + "acc_norm": 0.5312868949232585, + "acc_norm_stderr": 0.017156666859785476 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/llama-3-8b-it-kor-extented-chang", + "model_sha": "5358311164404c3e22ecbf5faf11b0c13a752c81", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/llama2-ko-chang-13b-instruct-chat/result_2023-11-08 22:04:06.json b/lcw99/llama2-ko-chang-13b-instruct-chat/result_2023-11-08 22:04:06.json new file mode 100644 index 0000000000000000000000000000000000000000..063bb5d99e47ec05b342aa9dbd006af36d83d04e --- /dev/null +++ b/lcw99/llama2-ko-chang-13b-instruct-chat/result_2023-11-08 22:04:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938215, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.014575583922019677 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4447321250746863, + "acc_stderr": 0.004959204773046197, + "acc_norm": 0.5995817566221868, + "acc_norm_stderr": 0.004889817489739683 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5325670498084292, + "acc_stderr": 0.01784199575052087, + "acc_norm": 0.5325670498084292, + "acc_norm_stderr": 0.01784199575052087 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.036186648199362466, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.036186648199362466 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03255326307272486, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03255326307272486 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3849056603773585, + "acc_stderr": 0.02994649856769995, + "acc_norm": 0.3849056603773585, + "acc_norm_stderr": 0.02994649856769995 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.47761194029850745, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.47761194029850745, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.037242495958177295, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.037242495958177295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5100917431192661, + "acc_stderr": 0.021432956203453327, + "acc_norm": 0.5100917431192661, + "acc_norm_stderr": 0.021432956203453327 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790606, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510468, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510468 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.019576953122088844, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.019576953122088844 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03054674526495319, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03054674526495319 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.027678468642144696, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.027678468642144696 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.011855911587048228, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.011855911587048228 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03484941514429231, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03484941514429231 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03888176921674101, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03888176921674101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559696, + "mc2": 0.47797395322509245, + "mc2_stderr": 0.015295300677969451 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3919716646989374, + "acc_stderr": 0.016784332119424088, + "acc_norm": 0.4852420306965762, + "acc_norm_stderr": 0.017182864434998564 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/llama2-ko-chang-13b-instruct-chat", + "model_sha": "50d21acccdfed4780c8f38892ae3a7dc30bf02b6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/llama2-ko-chang-13b-instruct-chat/result_2023-11-13 02:04:55.json b/lcw99/llama2-ko-chang-13b-instruct-chat/result_2023-11-13 02:04:55.json new file mode 100644 index 0000000000000000000000000000000000000000..15186f8a920ebe8d6d86066d074f9a4dd4e8ef5c --- /dev/null +++ b/lcw99/llama2-ko-chang-13b-instruct-chat/result_2023-11-13 02:04:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938218, + "acc_norm": 0.4667235494880546, + "acc_norm_stderr": 0.014578995859605818 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4404501095399323, + "acc_stderr": 0.004954265595373462, + "acc_norm": 0.5983867755427206, + "acc_norm_stderr": 0.004892226011836585 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.545338441890166, + "acc_stderr": 0.0178063045850526, + "acc_norm": 0.545338441890166, + "acc_norm_stderr": 0.0178063045850526 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.036186648199362466, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.036186648199362466 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.032219436365661956, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.032219436365661956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.02506909438729654, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.02506909438729654 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.028181739720019413, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.028181739720019413 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5726495726495726, + "acc_stderr": 0.03240847393516327, + "acc_norm": 0.5726495726495726, + "acc_norm_stderr": 0.03240847393516327 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230165, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230165 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101813, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101813 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594377, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594377 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353985, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353985 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.01955964680921593, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.01955964680921593 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298804, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298804 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553976, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553976 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.03198761546763126, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.03198761546763126 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30638852672750977, + "acc_stderr": 0.011773980329380726, + "acc_norm": 0.30638852672750977, + "acc_norm_stderr": 0.011773980329380726 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3292533659730722, + "mc1_stderr": 0.016451264440068246, + "mc2": 0.4970299025244721, + "mc2_stderr": 0.01555960496501192 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43683589138134593, + "acc_stderr": 0.01705263355985608, + "acc_norm": 0.5076741440377804, + "acc_norm_stderr": 0.017188329219654273 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/llama2-ko-chang-13b-instruct-chat", + "model_sha": "7eea2a6e0ff1251e701daf9171d72790e7b54c68", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-15 20:02:06.json b/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-15 20:02:06.json new file mode 100644 index 0000000000000000000000000000000000000000..cecac7d25ac732a742eb840c0d73dc96186d790d --- /dev/null +++ b/lcw99/llama2-ko-chang-instruct-chat/result_2023-10-15 20:02:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.013888816286782114, + "acc_norm": 0.39419795221843, + "acc_norm_stderr": 0.014280522667467327 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39852619000199163, + "acc_stderr": 0.004885942040894556, + "acc_norm": 0.5248954391555467, + "acc_norm_stderr": 0.0049835924109341715 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37292464878671777, + "acc_stderr": 0.017292868269453924, + "acc_norm": 0.37292464878671777, + "acc_norm_stderr": 0.017292868269453924 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742399, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.02732107841738753, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.02732107841738753 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.026858825879488558, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.026858825879488558 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.03208779558786751, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.03208779558786751 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.028510251512341926, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.028510251512341926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371372, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371372 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22660098522167488, + "acc_stderr": 0.029454863835292982, + "acc_norm": 0.22660098522167488, + "acc_norm_stderr": 0.029454863835292982 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.267741935483871, + "acc_stderr": 0.025189006660212385, + "acc_norm": 0.267741935483871, + "acc_norm_stderr": 0.025189006660212385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.02700876609070809, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.02700876609070809 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275794, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275794 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.35323383084577115, + "acc_stderr": 0.03379790611796776, + "acc_norm": 0.35323383084577115, + "acc_norm_stderr": 0.03379790611796776 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.033450369167889904, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.033450369167889904 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.020842290930114662, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.020842290930114662 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.025070713719153186, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.025070713719153186 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.02640614597362566, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.02640614597362566 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.033088185944157494, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.033088185944157494 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29357798165137616, + "acc_stderr": 0.019525151122639667, + "acc_norm": 0.29357798165137616, + "acc_norm_stderr": 0.019525151122639667 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.026925654653615693, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.026925654653615693 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275908, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275908 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.22685185185185186, + "acc_stderr": 0.028561650102422273, + "acc_norm": 0.22685185185185186, + "acc_norm_stderr": 0.028561650102422273 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.027971541370170605, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.027971541370170605 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.273142112125163, + "acc_stderr": 0.01138015056783041, + "acc_norm": 0.273142112125163, + "acc_norm_stderr": 0.01138015056783041 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.03283472056108567, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.03283472056108567 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268048, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268048 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842885, + "mc2": 0.4212326635036667, + "mc2_stderr": 0.015192123492522393 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3069657615112161, + "acc_stderr": 0.01585758809536281, + "acc_norm": 0.4025974025974026, + "acc_norm_stderr": 0.01686102048640779 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/llama2-ko-chang-instruct-chat", + "model_sha": "eaba470f33eb377cb27696dbc1f9a76fc03d4fe3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/llama2-ko-chang-instruct-chat/result_2023-11-06 22:33:48.json b/lcw99/llama2-ko-chang-instruct-chat/result_2023-11-06 22:33:48.json new file mode 100644 index 0000000000000000000000000000000000000000..7e2beca96f83f85257f61e54784f5a42d265c647 --- /dev/null +++ b/lcw99/llama2-ko-chang-instruct-chat/result_2023-11-06 22:33:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.013888816286782114, + "acc_norm": 0.39419795221843, + "acc_norm_stderr": 0.014280522667467327 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39852619000199163, + "acc_stderr": 0.004885942040894556, + "acc_norm": 0.5248954391555467, + "acc_norm_stderr": 0.0049835924109341715 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37292464878671777, + "acc_stderr": 0.017292868269453924, + "acc_norm": 0.37292464878671777, + "acc_norm_stderr": 0.017292868269453924 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742399, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.02732107841738753, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.02732107841738753 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.026858825879488558, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.026858825879488558 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.03208779558786751, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.03208779558786751 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.028510251512341926, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.028510251512341926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371372, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371372 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22660098522167488, + "acc_stderr": 0.029454863835292982, + "acc_norm": 0.22660098522167488, + "acc_norm_stderr": 0.029454863835292982 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.267741935483871, + "acc_stderr": 0.025189006660212385, + "acc_norm": 0.267741935483871, + "acc_norm_stderr": 0.025189006660212385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.02700876609070809, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.02700876609070809 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275794, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275794 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.35323383084577115, + "acc_stderr": 0.03379790611796776, + "acc_norm": 0.35323383084577115, + "acc_norm_stderr": 0.03379790611796776 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.033450369167889904, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.033450369167889904 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.020842290930114662, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.020842290930114662 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.025070713719153186, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.025070713719153186 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.02640614597362566, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.02640614597362566 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.033088185944157494, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.033088185944157494 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29357798165137616, + "acc_stderr": 0.019525151122639667, + "acc_norm": 0.29357798165137616, + "acc_norm_stderr": 0.019525151122639667 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.026925654653615693, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.026925654653615693 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275908, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275908 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.22685185185185186, + "acc_stderr": 0.028561650102422273, + "acc_norm": 0.22685185185185186, + "acc_norm_stderr": 0.028561650102422273 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.027971541370170605, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.027971541370170605 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.273142112125163, + "acc_stderr": 0.01138015056783041, + "acc_norm": 0.273142112125163, + "acc_norm_stderr": 0.01138015056783041 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.03283472056108567, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.03283472056108567 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268048, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268048 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842885, + "mc2": 0.4212326635036667, + "mc2_stderr": 0.015192123492522393 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3069657615112161, + "acc_stderr": 0.01585758809536281, + "acc_norm": 0.4025974025974026, + "acc_norm_stderr": 0.01686102048640779 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/llama2-ko-chang-instruct-chat", + "model_sha": "034f986b57b4746cbc6332ac14ff7f0041b66ba3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/polyglot-ko-12.8b-chang-instruct-chat/result_2023-09-27 08:17:24.json b/lcw99/polyglot-ko-12.8b-chang-instruct-chat/result_2023-09-27 08:17:24.json new file mode 100644 index 0000000000000000000000000000000000000000..be5563e1e0d50ee09531b351e3d559de2ffa8ecd --- /dev/null +++ b/lcw99/polyglot-ko-12.8b-chang-instruct-chat/result_2023-09-27 08:17:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2986348122866894, + "acc_stderr": 0.013374078615068756, + "acc_norm": 0.34897610921501704, + "acc_norm_stderr": 0.013928933461382497 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4010157339175463, + "acc_stderr": 0.004891025533633027, + "acc_norm": 0.527185819557857, + "acc_norm_stderr": 0.004982400368939667 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393161, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393161 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26947637292464877, + "acc_stderr": 0.01586624307321506, + "acc_norm": 0.26947637292464877, + "acc_norm_stderr": 0.01586624307321506 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.24680851063829787, + "acc_stderr": 0.0281854413012341, + "acc_norm": 0.24680851063829787, + "acc_norm_stderr": 0.0281854413012341 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.033293941190735296, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.033293941190735296 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3183279742765273, + "acc_stderr": 0.026457225067811025, + "acc_norm": 0.3183279742765273, + "acc_norm_stderr": 0.026457225067811025 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19730941704035873, + "acc_stderr": 0.02670985334496796, + "acc_norm": 0.19730941704035873, + "acc_norm_stderr": 0.02670985334496796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438015, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438015 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171451, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171451 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2184873949579832, + "acc_stderr": 0.02684151432295893, + "acc_norm": 0.2184873949579832, + "acc_norm_stderr": 0.02684151432295893 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094631, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094631 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.267741935483871, + "acc_stderr": 0.025189006660212385, + "acc_norm": 0.267741935483871, + "acc_norm_stderr": 0.025189006660212385 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.025604233470899105, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.025604233470899105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878285, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878285 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184408, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184408 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.20398009950248755, + "acc_stderr": 0.02849317624532609, + "acc_norm": 0.20398009950248755, + "acc_norm_stderr": 0.02849317624532609 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.03214737302029469, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.03214737302029469 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.023266512213730575, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.023266512213730575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.034765901043041336, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.034765901043041336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.036845294917747094, + "acc_norm": 0.16, + "acc_norm_stderr": 0.036845294917747094 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.02361867831006937, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.02361867831006937 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.02508947852376513, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.02508947852376513 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24587155963302754, + "acc_stderr": 0.018461940968708457, + "acc_norm": 0.24587155963302754, + "acc_norm_stderr": 0.018461940968708457 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102148, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102148 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.024848018263875195, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.024848018263875195 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.33884297520661155, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.33884297520661155, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.017740899509177788, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.017740899509177788 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.19148936170212766, + "acc_stderr": 0.023472645247949425, + "acc_norm": 0.19148936170212766, + "acc_norm_stderr": 0.023472645247949425 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.0449394906861354, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.0449394906861354 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.027696910713093936, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.027696910713093936 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225606, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225606 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.027146271936625162, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.027146271936625162 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174934, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174934 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.28270042194092826, + "acc_stderr": 0.029312814153955914, + "acc_norm": 0.28270042194092826, + "acc_norm_stderr": 0.029312814153955914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2542372881355932, + "acc_stderr": 0.011121129007840664, + "acc_norm": 0.2542372881355932, + "acc_norm_stderr": 0.011121129007840664 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350195, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.01578537085839671, + "mc2": 0.4444330897605926, + "mc2_stderr": 0.015483222855074748 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2798110979929162, + "acc_stderr": 0.015433715795427764, + "acc_norm": 0.35182998819362454, + "acc_norm_stderr": 0.016418206451218057 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/polyglot-ko-12.8b-chang-instruct-chat", + "model_sha": "a16de096eb135e66b90314e5ab84116c9f0f9d1b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/zephykor-ko-7b-chang/result_2023-11-01 10:28:02.json b/lcw99/zephykor-ko-7b-chang/result_2023-11-01 10:28:02.json new file mode 100644 index 0000000000000000000000000000000000000000..c4c55b2fe7b77056347782457118eb319a418885 --- /dev/null +++ b/lcw99/zephykor-ko-7b-chang/result_2023-11-01 10:28:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32764505119453924, + "acc_stderr": 0.013715847940719344, + "acc_norm": 0.3728668941979522, + "acc_norm_stderr": 0.014131176760131158 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3679545907189803, + "acc_stderr": 0.004812633280078263, + "acc_norm": 0.48376817367058356, + "acc_norm_stderr": 0.004987151381091178 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.036602988340491624, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.036602988340491624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.042667634040995814, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.042667634040995814 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236923, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236923 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4180064308681672, + "acc_stderr": 0.028013651891995072, + "acc_norm": 0.4180064308681672, + "acc_norm_stderr": 0.028013651891995072 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003336, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3487179487179487, + "acc_stderr": 0.024162780284017717, + "acc_norm": 0.3487179487179487, + "acc_norm_stderr": 0.024162780284017717 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.027666182075539635, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.027666182075539635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5641025641025641, + "acc_stderr": 0.03248577511578401, + "acc_norm": 0.5641025641025641, + "acc_norm_stderr": 0.03248577511578401 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815632, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815632 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.472636815920398, + "acc_stderr": 0.03530235517334682, + "acc_norm": 0.472636815920398, + "acc_norm_stderr": 0.03530235517334682 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112136, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112136 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3786127167630058, + "acc_stderr": 0.026113749361310338, + "acc_norm": 0.3786127167630058, + "acc_norm_stderr": 0.026113749361310338 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.34355828220858897, + "acc_stderr": 0.037311335196738925, + "acc_norm": 0.34355828220858897, + "acc_norm_stderr": 0.037311335196738925 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.027163686038271226, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.027163686038271226 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557673, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4, + "acc_stderr": 0.02100420126042007, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02100420126042007 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.02843109544417664, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.02843109544417664 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4628099173553719, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.4628099173553719, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.03823428969926606, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.03823428969926606 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34477124183006536, + "acc_stderr": 0.019228322018696644, + "acc_norm": 0.34477124183006536, + "acc_norm_stderr": 0.019228322018696644 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2245810055865922, + "acc_stderr": 0.013956803666544636, + "acc_norm": 0.2245810055865922, + "acc_norm_stderr": 0.013956803666544636 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411966, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411966 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3469387755102041, + "acc_stderr": 0.030472526026726503, + "acc_norm": 0.3469387755102041, + "acc_norm_stderr": 0.030472526026726503 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30834419817470665, + "acc_stderr": 0.011794833789715322, + "acc_norm": 0.30834419817470665, + "acc_norm_stderr": 0.011794833789715322 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155062, + "mc2": 0.49077303683687423, + "mc2_stderr": 0.015584509571305388 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.282172373081464, + "acc_stderr": 0.01547327158398843, + "acc_norm": 0.37662337662337664, + "acc_norm_stderr": 0.016658799874051968 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/zephykor-ko-7b-chang", + "model_sha": "417731f0f84b698065589bb915528f30040cd23b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/zephykor-ko-beta-7b-chang/result_2023-11-27 00:57:43.json b/lcw99/zephykor-ko-beta-7b-chang/result_2023-11-27 00:57:43.json new file mode 100644 index 0000000000000000000000000000000000000000..33860b54eb668b74d04cfd01c372cac31275559e --- /dev/null +++ b/lcw99/zephykor-ko-beta-7b-chang/result_2023-11-27 00:57:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31399317406143346, + "acc_stderr": 0.013562691224726288, + "acc_norm": 0.37372013651877134, + "acc_norm_stderr": 0.014137708601759091 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3684524995020912, + "acc_stderr": 0.004813991069808272, + "acc_norm": 0.4765982871937861, + "acc_norm_stderr": 0.00498431320579144 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.037439798259264, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.037439798259264 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48020434227330777, + "acc_stderr": 0.01786594482729162, + "acc_norm": 0.48020434227330777, + "acc_norm_stderr": 0.01786594482729162 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368878, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368878 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596239, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596239 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3954983922829582, + "acc_stderr": 0.027770918531427834, + "acc_norm": 0.3954983922829582, + "acc_norm_stderr": 0.027770918531427834 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3838383838383838, + "acc_stderr": 0.034648816750163375, + "acc_norm": 0.3838383838383838, + "acc_norm_stderr": 0.034648816750163375 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.27310924369747897, + "acc_stderr": 0.028942004040998164, + "acc_norm": 0.27310924369747897, + "acc_norm_stderr": 0.028942004040998164 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3, + "acc_stderr": 0.0232345810884285, + "acc_norm": 0.3, + "acc_norm_stderr": 0.0232345810884285 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3251231527093596, + "acc_stderr": 0.032957975663112704, + "acc_norm": 0.3251231527093596, + "acc_norm_stderr": 0.032957975663112704 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36129032258064514, + "acc_stderr": 0.027327548447957532, + "acc_norm": 0.36129032258064514, + "acc_norm_stderr": 0.027327548447957532 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.032745319388423504, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.032745319388423504 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3283018867924528, + "acc_stderr": 0.02890159361241178, + "acc_norm": 0.3283018867924528, + "acc_norm_stderr": 0.02890159361241178 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505415, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505415 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276612, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276612 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.40298507462686567, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.40298507462686567, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0356760379963917, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0356760379963917 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463084, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463084 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869334, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3786127167630058, + "acc_stderr": 0.02611374936131034, + "acc_norm": 0.3786127167630058, + "acc_norm_stderr": 0.02611374936131034 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.345679012345679, + "acc_stderr": 0.02646248777700187, + "acc_norm": 0.345679012345679, + "acc_norm_stderr": 0.02646248777700187 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3871559633027523, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.3871559633027523, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.027121956071388852, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.027121956071388852 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33169934640522875, + "acc_stderr": 0.01904748523936038, + "acc_norm": 0.33169934640522875, + "acc_norm_stderr": 0.01904748523936038 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590624, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590624 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19907407407407407, + "acc_stderr": 0.027232298462690242, + "acc_norm": 0.19907407407407407, + "acc_norm_stderr": 0.027232298462690242 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29832402234636873, + "acc_stderr": 0.015301840045129267, + "acc_norm": 0.29832402234636873, + "acc_norm_stderr": 0.015301840045129267 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2426470588235294, + "acc_stderr": 0.02604066247420126, + "acc_norm": 0.2426470588235294, + "acc_norm_stderr": 0.02604066247420126 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29465449804432853, + "acc_stderr": 0.01164357676406955, + "acc_norm": 0.29465449804432853, + "acc_norm_stderr": 0.01164357676406955 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.03354092437591519, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.03354092437591519 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.0157853708583967, + "mc2": 0.48693248002205786, + "mc2_stderr": 0.01610559804856284 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2514757969303424, + "acc_stderr": 0.014916462437232242, + "acc_norm": 0.3730814639905549, + "acc_norm_stderr": 0.016627318275137432 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/zephykor-ko-beta-7b-chang", + "model_sha": "16733d9f8333702df52876b684c4927c73882b07", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-02 09:12:17.json b/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-02 09:12:17.json new file mode 100644 index 0000000000000000000000000000000000000000..178e487189aed7ac5bead2421e9cfb677c4354b9 --- /dev/null +++ b/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-02 09:12:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3267918088737201, + "acc_stderr": 0.013706665975587333, + "acc_norm": 0.39419795221843, + "acc_norm_stderr": 0.01428052266746732 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3685520812587134, + "acc_stderr": 0.004814261966376846, + "acc_norm": 0.48665604461262696, + "acc_norm_stderr": 0.004988004122536506 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.01787994891443167, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.01787994891443167 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.03115852213135776, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.03115852213135776 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3890675241157556, + "acc_stderr": 0.027690337536485372, + "acc_norm": 0.3890675241157556, + "acc_norm_stderr": 0.027690337536485372 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.41919191919191917, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.41919191919191917, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.039417076320648906, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.039417076320648906 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31932773109243695, + "acc_stderr": 0.030283995525884396, + "acc_norm": 0.31932773109243695, + "acc_norm_stderr": 0.030283995525884396 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33076923076923076, + "acc_stderr": 0.02385479568097114, + "acc_norm": 0.33076923076923076, + "acc_norm_stderr": 0.02385479568097114 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233484, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233484 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.027791878753132267, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.027791878753132267 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3433962264150943, + "acc_stderr": 0.029224526469124792, + "acc_norm": 0.3433962264150943, + "acc_norm_stderr": 0.029224526469124792 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.04653429807913509, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.04653429807913509 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4527363184079602, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.4527363184079602, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.03533133389323657, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.03533133389323657 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.025070713719153172, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.025070713719153172 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292406, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292406 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3765432098765432, + "acc_stderr": 0.02695934451874778, + "acc_norm": 0.3765432098765432, + "acc_norm_stderr": 0.02695934451874778 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557673, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4073394495412844, + "acc_stderr": 0.021065986244412898, + "acc_norm": 0.4073394495412844, + "acc_norm_stderr": 0.021065986244412898 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3349673202614379, + "acc_stderr": 0.01909422816700031, + "acc_norm": 0.3349673202614379, + "acc_norm_stderr": 0.01909422816700031 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503782, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503782 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.0449394906861354, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.0449394906861354 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.0305467452649532, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.0305467452649532 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261462, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261462 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.02806499816704009, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.02806499816704009 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.031987615467631264, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.031987615467631264 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2966101694915254, + "acc_stderr": 0.011665946586082868, + "acc_norm": 0.2966101694915254, + "acc_norm_stderr": 0.011665946586082868 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03484941514429231, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03484941514429231 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674098, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674098 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396704, + "mc2": 0.4892217653375252, + "mc2_stderr": 0.016000110388200085 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27390791027154665, + "acc_stderr": 0.015332499474791027, + "acc_norm": 0.42266824085005905, + "acc_norm_stderr": 0.0169835060795776 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/zephykor-ko-beta-7b-chang", + "model_sha": "c5c706f4042ccbcd767c157d7046beef1b9f8493", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-04 03:57:05.json b/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-04 03:57:05.json new file mode 100644 index 0000000000000000000000000000000000000000..b1388374be64cbec7aa339edab9338c696dd8454 --- /dev/null +++ b/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-04 03:57:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32081911262798635, + "acc_stderr": 0.013640943091946528, + "acc_norm": 0.3984641638225256, + "acc_norm_stderr": 0.014306946052735569 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3708424616610237, + "acc_stderr": 0.004820431839600025, + "acc_norm": 0.4785899223262298, + "acc_norm_stderr": 0.004985204766555068 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5019157088122606, + "acc_stderr": 0.01787983225902668, + "acc_norm": 0.5019157088122606, + "acc_norm_stderr": 0.01787983225902668 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.028043399858210628, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.028043399858210628 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.03343577705583065, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.03343577705583065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.46464646464646464, + "acc_stderr": 0.03553436368828061, + "acc_norm": 0.46464646464646464, + "acc_norm_stderr": 0.03553436368828061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.03038835355188685, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.03038835355188685 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.03332769068410789, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.03332769068410789 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.02786932057166463, + "acc_norm": 0.4, + "acc_norm_stderr": 0.02786932057166463 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5470085470085471, + "acc_stderr": 0.03261099873098619, + "acc_norm": 0.5470085470085471, + "acc_norm_stderr": 0.03261099873098619 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3622641509433962, + "acc_stderr": 0.029582245128384296, + "acc_norm": 0.3622641509433962, + "acc_norm_stderr": 0.029582245128384296 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.026483392042098177, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.026483392042098177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.02723741509459248, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.02723741509459248 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42385321100917434, + "acc_stderr": 0.02118726320908754, + "acc_norm": 0.42385321100917434, + "acc_norm_stderr": 0.02118726320908754 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238106, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238106 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.044492703500683836, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.044492703500683836 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.01877168389352817, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.01877168389352817 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833585, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833585 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510923, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510923 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26256983240223464, + "acc_stderr": 0.01471682427301776, + "acc_norm": 0.26256983240223464, + "acc_norm_stderr": 0.01471682427301776 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681397, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681397 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5274261603375527, + "acc_stderr": 0.03249822718301304, + "acc_norm": 0.5274261603375527, + "acc_norm_stderr": 0.03249822718301304 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29791395045632335, + "acc_stderr": 0.011680717340400049, + "acc_norm": 0.29791395045632335, + "acc_norm_stderr": 0.011680717340400049 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398395, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398395 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.49768897705057563, + "mc2_stderr": 0.01589857721446022 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2668240850059032, + "acc_stderr": 0.015206575684565904, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.017057753702160287 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/zephykor-ko-beta-7b-chang", + "model_sha": "6958b487ce529ff5114d25b1ba2accc84bf5f8a8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-25 01:20:18.json b/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-25 01:20:18.json new file mode 100644 index 0000000000000000000000000000000000000000..1e70ba5e1ea37184c2fd21e8ca39996e167b9822 --- /dev/null +++ b/lcw99/zephykor-ko-beta-7b-chang/result_2023-12-25 01:20:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.318259385665529, + "acc_stderr": 0.013611993916971453, + "acc_norm": 0.37627986348122866, + "acc_norm_stderr": 0.014157022555407163 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37134037044413465, + "acc_stderr": 0.00482175773415672, + "acc_norm": 0.4766978689504083, + "acc_norm_stderr": 0.004984359669951927 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.048026946982589726, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.048026946982589726 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4840357598978289, + "acc_stderr": 0.017870847506081734, + "acc_norm": 0.4840357598978289, + "acc_norm_stderr": 0.017870847506081734 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368878, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368878 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.031709956060406545, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.031709956060406545 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.38263665594855306, + "acc_stderr": 0.027604689028581982, + "acc_norm": 0.38263665594855306, + "acc_norm_stderr": 0.027604689028581982 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.03318833286217281, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.03318833286217281 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792399, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792399 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3319327731092437, + "acc_stderr": 0.030588697013783663, + "acc_norm": 0.3319327731092437, + "acc_norm_stderr": 0.030588697013783663 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3487179487179487, + "acc_stderr": 0.02416278028401772, + "acc_norm": 0.3487179487179487, + "acc_norm_stderr": 0.02416278028401772 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.030712730070982592, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.030712730070982592 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5470085470085471, + "acc_stderr": 0.03261099873098619, + "acc_norm": 0.5470085470085471, + "acc_norm_stderr": 0.03261099873098619 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776292, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505415, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505415 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.02813325257881564, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.02813325257881564 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.03629146670159664, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.03629146670159664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948375, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948375 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.02723741509459247, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.02723741509459247 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39896373056994816, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.39896373056994816, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518753, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518753 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.42201834862385323, + "acc_stderr": 0.02117499140776317, + "acc_norm": 0.42201834862385323, + "acc_norm_stderr": 0.02117499140776317 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.028275490156791438, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.028275490156791438 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849725, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.018824219512706207, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.018824219512706207 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516992, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516992 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833586, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833586 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828979, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828979 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.02993534270787775, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.02993534270787775 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.011855911587048224, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.011855911587048224 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.015744027248256055, + "mc2": 0.48857937286231085, + "mc2_stderr": 0.015975836453033188 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3140495867768595, + "acc_stderr": 0.01595733243429507, + "acc_norm": 0.45336481700118064, + "acc_norm_stderr": 0.017115418225226865 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lcw99/zephykor-ko-beta-7b-chang", + "model_sha": "67d0bf6e69c6e705ca28b54349429ffc7f473b7a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/leeebs/kollama2_ndap/result_2023-12-07 05:20:29.json b/leeebs/kollama2_ndap/result_2023-12-07 05:20:29.json new file mode 100644 index 0000000000000000000000000000000000000000..bec6bd96f977597f4755abadf64ce6d36f1b30fa --- /dev/null +++ b/leeebs/kollama2_ndap/result_2023-12-07 05:20:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30802047781569963, + "acc_stderr": 0.01349142951729204, + "acc_norm": 0.3677474402730375, + "acc_norm_stderr": 0.014090995618168473 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38189603664608646, + "acc_stderr": 0.004848583243606688, + "acc_norm": 0.4862577175861382, + "acc_norm_stderr": 0.004987896411703674 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3090676883780332, + "acc_stderr": 0.01652498891970219, + "acc_norm": 0.3090676883780332, + "acc_norm_stderr": 0.01652498891970219 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424004, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.026795422327893944, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.026795422327893944 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533086, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533086 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.029719142876342867, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.029719142876342867 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.0219169577092138, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.0219169577092138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678244, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678244 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764805, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764805 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.33760683760683763, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.33760683760683763, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443865, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443865 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.0430911870994646, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.0430911870994646 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823017, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823017 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.0321473730202947, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.0321473730202947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.03396116205845335, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.03396116205845335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.021855255263421806, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.021855255263421806 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2716049382716049, + "acc_stderr": 0.024748624490537368, + "acc_norm": 0.2716049382716049, + "acc_norm_stderr": 0.024748624490537368 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26972477064220185, + "acc_stderr": 0.01902848671111544, + "acc_norm": 0.26972477064220185, + "acc_norm_stderr": 0.01902848671111544 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.03395490020856111, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.03395490020856111 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242557, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242557 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.32231404958677684, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.017704531653250075, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.017704531653250075 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.02841820861940679, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.02841820861940679 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.025409301953225678, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.025409301953225678 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29957805907172996, + "acc_stderr": 0.029818024749753095, + "acc_norm": 0.29957805907172996, + "acc_norm_stderr": 0.029818024749753095 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2620599739243807, + "acc_stderr": 0.011231552795890392, + "acc_norm": 0.2620599739243807, + "acc_norm_stderr": 0.011231552795890392 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02910225438967409, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02910225438967409 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.01500067437357034, + "mc2": 0.39136302132648476, + "mc2_stderr": 0.01593302794463535 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2833530106257379, + "acc_stderr": 0.015492852084597239, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.016068253615813967 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "leeebs/kollama2_ndap", + "model_sha": "a98a2530390c0402e33e35503b05249acf4ef790", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/leeebs/kollama2_ndap/result_2023-12-07 05:25:10.json b/leeebs/kollama2_ndap/result_2023-12-07 05:25:10.json new file mode 100644 index 0000000000000000000000000000000000000000..bec6bd96f977597f4755abadf64ce6d36f1b30fa --- /dev/null +++ b/leeebs/kollama2_ndap/result_2023-12-07 05:25:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30802047781569963, + "acc_stderr": 0.01349142951729204, + "acc_norm": 0.3677474402730375, + "acc_norm_stderr": 0.014090995618168473 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38189603664608646, + "acc_stderr": 0.004848583243606688, + "acc_norm": 0.4862577175861382, + "acc_norm_stderr": 0.004987896411703674 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3090676883780332, + "acc_stderr": 0.01652498891970219, + "acc_norm": 0.3090676883780332, + "acc_norm_stderr": 0.01652498891970219 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424004, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.026795422327893944, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.026795422327893944 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533086, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533086 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.029719142876342867, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.029719142876342867 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.0219169577092138, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.0219169577092138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678244, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678244 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764805, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764805 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.33760683760683763, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.33760683760683763, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443865, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443865 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.0430911870994646, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.0430911870994646 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823017, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823017 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.0321473730202947, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.0321473730202947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.03396116205845335, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.03396116205845335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.021855255263421806, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.021855255263421806 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2716049382716049, + "acc_stderr": 0.024748624490537368, + "acc_norm": 0.2716049382716049, + "acc_norm_stderr": 0.024748624490537368 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26972477064220185, + "acc_stderr": 0.01902848671111544, + "acc_norm": 0.26972477064220185, + "acc_norm_stderr": 0.01902848671111544 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.03395490020856111, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.03395490020856111 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242557, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242557 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.32231404958677684, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.017704531653250075, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.017704531653250075 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.02841820861940679, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.02841820861940679 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.025409301953225678, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.025409301953225678 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29957805907172996, + "acc_stderr": 0.029818024749753095, + "acc_norm": 0.29957805907172996, + "acc_norm_stderr": 0.029818024749753095 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2620599739243807, + "acc_stderr": 0.011231552795890392, + "acc_norm": 0.2620599739243807, + "acc_norm_stderr": 0.011231552795890392 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02910225438967409, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02910225438967409 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.01500067437357034, + "mc2": 0.39136302132648476, + "mc2_stderr": 0.01593302794463535 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2833530106257379, + "acc_stderr": 0.015492852084597239, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.016068253615813967 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "leeebs/kollama2_ndap", + "model_sha": "a98a2530390c0402e33e35503b05249acf4ef790", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/leeebs/llama2-ndap-10-7b/result_2023-12-08 08:04:53.json b/leeebs/llama2-ndap-10-7b/result_2023-12-08 08:04:53.json new file mode 100644 index 0000000000000000000000000000000000000000..a98f61c2ca75fb06dc98ca7a31f04e1024fd64f6 --- /dev/null +++ b/leeebs/llama2-ndap-10-7b/result_2023-12-08 08:04:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32337883959044367, + "acc_stderr": 0.01366942163001212, + "acc_norm": 0.3575085324232082, + "acc_norm_stderr": 0.014005494275916571 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3776140211113324, + "acc_stderr": 0.004837995637638535, + "acc_norm": 0.47998406691894047, + "acc_norm_stderr": 0.0049857816204670205 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.04498676320572924, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.04498676320572924 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.31800766283524906, + "acc_stderr": 0.016653486275615394, + "acc_norm": 0.31800766283524906, + "acc_norm_stderr": 0.016653486275615394 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.03610805018031023, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.03610805018031023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.026664410886937606, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.026664410886937606 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.03664666337225256, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.03664666337225256 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.026653531596715473, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.026653531596715473 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24615384615384617, + "acc_stderr": 0.021840866990423077, + "acc_norm": 0.24615384615384617, + "acc_norm_stderr": 0.021840866990423077 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.03178529710642752, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.03178529710642752 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.02468597928623997, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.02468597928623997 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.31196581196581197, + "acc_stderr": 0.03035152732334495, + "acc_norm": 0.31196581196581197, + "acc_norm_stderr": 0.03035152732334495 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708097, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708097 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072775, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.02578787422095932, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.02578787422095932 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.031871875379197986, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.031871875379197986 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.034624199316156234, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.034624199316156234 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.024922001168886324, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.024922001168886324 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22279792746113988, + "acc_stderr": 0.03003114797764154, + "acc_norm": 0.22279792746113988, + "acc_norm_stderr": 0.03003114797764154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.03646758875075566, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.03646758875075566 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24220183486238533, + "acc_stderr": 0.01836817630659862, + "acc_norm": 0.24220183486238533, + "acc_norm_stderr": 0.01836817630659862 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.032684540130117457, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.032684540130117457 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.025058503316958174, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.025058503316958174 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.039849796533028704, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.039849796533028704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882924, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.018120224251484594, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.018120224251484594 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266736, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266736 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.02541642838876748, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.02541642838876748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20955882352941177, + "acc_stderr": 0.024723110407677062, + "acc_norm": 0.20955882352941177, + "acc_norm_stderr": 0.024723110407677062 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.027833023871399683, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.027833023871399683 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.242503259452412, + "acc_stderr": 0.010946570966348783, + "acc_norm": 0.242503259452412, + "acc_norm_stderr": 0.010946570966348783 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.034531318018854146, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.034531318018854146 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23378212974296206, + "mc1_stderr": 0.014816195991931583, + "mc2": 0.3853810991215767, + "mc2_stderr": 0.015883616014330193 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30342384887839435, + "acc_stderr": 0.01580607271790957, + "acc_norm": 0.31641086186540734, + "acc_norm_stderr": 0.015989617951065474 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "leeebs/llama2-ndap-10-7b", + "model_sha": "89e23643d1cdc9ece9f6c0a2b379dbadb67984f5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lemon-mint/gemma-7b-openhermes-v0.80/result_2024-04-09 08:55:51.json b/lemon-mint/gemma-7b-openhermes-v0.80/result_2024-04-09 08:55:51.json new file mode 100644 index 0000000000000000000000000000000000000000..597d5fc5f5de2d48c7f7908628945ef325388fb4 --- /dev/null +++ b/lemon-mint/gemma-7b-openhermes-v0.80/result_2024-04-09 08:55:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19880546075085323, + "acc_stderr": 0.011662850198175529, + "acc_norm": 0.25426621160409557, + "acc_norm_stderr": 0.012724999945157741 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2605058753236407, + "acc_stderr": 0.004380136468543943, + "acc_norm": 0.26419040031866164, + "acc_norm_stderr": 0.004400000822742063 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.03446296217088426, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.03446296217088426 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1553398058252427, + "acc_stderr": 0.03586594738573975, + "acc_norm": 0.1553398058252427, + "acc_norm_stderr": 0.03586594738573975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.02608270069539966, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.02608270069539966 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.24663677130044842, + "acc_stderr": 0.02893041312091087, + "acc_norm": 0.24663677130044842, + "acc_norm_stderr": 0.02893041312091087 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082395, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082395 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.03831226048850333, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.03831226048850333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277723, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277723 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.02176373368417392, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.02176373368417392 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.1921182266009852, + "acc_stderr": 0.02771931570961478, + "acc_norm": 0.1921182266009852, + "acc_norm_stderr": 0.02771931570961478 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24516129032258063, + "acc_stderr": 0.024472243840895528, + "acc_norm": 0.24516129032258063, + "acc_norm_stderr": 0.024472243840895528 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02723601394619669, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02723601394619669 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.19622641509433963, + "acc_stderr": 0.0244423881311008, + "acc_norm": 0.19622641509433963, + "acc_norm_stderr": 0.0244423881311008 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844075, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.03336767086567977, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.03336767086567977 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788989, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788989 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918417, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.14, + "acc_stderr": 0.03487350880197771, + "acc_norm": 0.14, + "acc_norm_stderr": 0.03487350880197771 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.25722543352601157, + "acc_stderr": 0.02353292543104428, + "acc_norm": 0.25722543352601157, + "acc_norm_stderr": 0.02353292543104428 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.31901840490797545, + "acc_stderr": 0.03661997551073836, + "acc_norm": 0.31901840490797545, + "acc_norm_stderr": 0.03661997551073836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868038, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21243523316062177, + "acc_stderr": 0.02951928261681726, + "acc_norm": 0.21243523316062177, + "acc_norm_stderr": 0.02951928261681726 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21834862385321102, + "acc_stderr": 0.01771260052872273, + "acc_norm": 0.21834862385321102, + "acc_norm_stderr": 0.01771260052872273 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2892561983471074, + "acc_stderr": 0.04139112727635464, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.04139112727635464 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.03197565821032501, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.03197565821032501 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.016906615927288145, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.016906615927288145 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2198581560283688, + "acc_stderr": 0.024706141070705474, + "acc_norm": 0.2198581560283688, + "acc_norm_stderr": 0.024706141070705474 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005333, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005333 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103987, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103987 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16911764705882354, + "acc_stderr": 0.022770868010113014, + "acc_norm": 0.16911764705882354, + "acc_norm_stderr": 0.022770868010113014 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3346938775510204, + "acc_stderr": 0.030209235226242307, + "acc_norm": 0.3346938775510204, + "acc_norm_stderr": 0.030209235226242307 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.32489451476793246, + "acc_stderr": 0.030486039389105313, + "acc_norm": 0.32489451476793246, + "acc_norm_stderr": 0.030486039389105313 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.011005971399927232, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.011005971399927232 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.03096451792692341, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.03096451792692341 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.20606060606060606, + "acc_stderr": 0.0315841532404771, + "acc_norm": 0.20606060606060606, + "acc_norm_stderr": 0.0315841532404771 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32068543451652387, + "mc1_stderr": 0.016339170373280906, + "mc2": 0.5066851771786931, + "mc2_stderr": 0.016699731168220523 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.256198347107438, + "acc_stderr": 0.01500830164471298, + "acc_norm": 0.31641086186540734, + "acc_norm_stderr": 0.015989617951065474 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lemon-mint/gemma-7b-openhermes-v0.80", + "model_sha": "293d0ba1ff93cfdb8975b844d373656e14731208", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lemon-mint/gemma-ko-1.1-2b-it/result_2024-04-26 01:58:07.json b/lemon-mint/gemma-ko-1.1-2b-it/result_2024-04-26 01:58:07.json new file mode 100644 index 0000000000000000000000000000000000000000..189e886fa6576c00f3bae6feb00c6c45d014515e --- /dev/null +++ b/lemon-mint/gemma-ko-1.1-2b-it/result_2024-04-26 01:58:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2030716723549488, + "acc_stderr": 0.011755899303705582, + "acc_norm": 0.26706484641638223, + "acc_norm_stderr": 0.01292893319649635 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2539334793865764, + "acc_stderr": 0.004343704512380098, + "acc_norm": 0.24736108344951205, + "acc_norm_stderr": 0.004305965431515138 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.26900584795321636, + "acc_stderr": 0.034010526201040885, + "acc_norm": 0.26900584795321636, + "acc_norm_stderr": 0.034010526201040885 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.29118773946360155, + "acc_stderr": 0.016246087069701393, + "acc_norm": 0.29118773946360155, + "acc_norm_stderr": 0.016246087069701393 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.02732107841738753, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.02732107841738753 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2289156626506024, + "acc_stderr": 0.03270745277352477, + "acc_norm": 0.2289156626506024, + "acc_norm_stderr": 0.03270745277352477 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.02575586592263294, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.02575586592263294 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19730941704035873, + "acc_stderr": 0.02670985334496796, + "acc_norm": 0.19730941704035873, + "acc_norm_stderr": 0.02670985334496796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728744, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728744 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924811, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924811 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.03618664819936244, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.03618664819936244 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.19747899159663865, + "acc_stderr": 0.025859164122051453, + "acc_norm": 0.19747899159663865, + "acc_norm_stderr": 0.025859164122051453 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.020932445774463185, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.020932445774463185 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.03178529710642749, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.03178529710642749 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.20967741935483872, + "acc_stderr": 0.023157879349083522, + "acc_norm": 0.20967741935483872, + "acc_norm_stderr": 0.023157879349083522 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.029058588303748842, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.029058588303748842 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.0256042334708991, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.0256042334708991 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014676, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014676 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.02264421261552521, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.02264421261552521 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.024105712607754307, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.024105712607754307 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.024922001168886345, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.024922001168886345 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19170984455958548, + "acc_stderr": 0.028408953626245282, + "acc_norm": 0.19170984455958548, + "acc_norm_stderr": 0.028408953626245282 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.2036697247706422, + "acc_stderr": 0.01726674208763079, + "acc_norm": 0.2036697247706422, + "acc_norm_stderr": 0.01726674208763079 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333337, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333337 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.02417084087934101, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.02417084087934101 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25326797385620914, + "acc_stderr": 0.01759348689536683, + "acc_norm": 0.25326797385620914, + "acc_norm_stderr": 0.01759348689536683 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791044, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.01517698502770768, + "mc2": 0.5193766760114558, + "mc2_stderr": 0.01662534303925215 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.1381345926800472, + "acc_stderr": 0.011862760682025674, + "acc_norm": 0.3872491145218418, + "acc_norm_stderr": 0.01674757799164279 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lemon-mint/gemma-ko-1.1-2b-it", + "model_sha": "556cdb395a5dd47505c21cde59474a61d2f17340", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lemon-mint/gemma-ko-7b-instruct-v0.71/result_2024-04-09 02:46:45.json b/lemon-mint/gemma-ko-7b-instruct-v0.71/result_2024-04-09 02:46:45.json new file mode 100644 index 0000000000000000000000000000000000000000..26b5e7c8a6d72d5fa6fc53683dad74a424226ab2 --- /dev/null +++ b/lemon-mint/gemma-ko-7b-instruct-v0.71/result_2024-04-09 02:46:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19197952218430034, + "acc_stderr": 0.011509598906598075, + "acc_norm": 0.24744027303754265, + "acc_norm_stderr": 0.01261035266329267 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25632344154550885, + "acc_stderr": 0.004357101984278614, + "acc_norm": 0.25951005775741887, + "acc_norm_stderr": 0.004374699189284861 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26436781609195403, + "acc_stderr": 0.01576998484069053, + "acc_norm": 0.26436781609195403, + "acc_norm_stderr": 0.01576998484069053 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.027321078417387533, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.027321078417387533 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3054662379421222, + "acc_stderr": 0.026160584450140474, + "acc_norm": 0.3054662379421222, + "acc_norm_stderr": 0.026160584450140474 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.0324430528300873, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.0324430528300873 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.18181818181818182, + "acc_stderr": 0.0274796030105388, + "acc_norm": 0.18181818181818182, + "acc_norm_stderr": 0.0274796030105388 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438015, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438015 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2230769230769231, + "acc_stderr": 0.02110773012724399, + "acc_norm": 0.2230769230769231, + "acc_norm_stderr": 0.02110773012724399 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.19704433497536947, + "acc_stderr": 0.02798672466673622, + "acc_norm": 0.19704433497536947, + "acc_norm_stderr": 0.02798672466673622 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.22258064516129034, + "acc_stderr": 0.023664216671642525, + "acc_norm": 0.22258064516129034, + "acc_norm_stderr": 0.023664216671642525 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.025819233256483727, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.025819233256483727 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2, + "acc_stderr": 0.02461829819586651, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02461829819586651 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2, + "acc_stderr": 0.038313051408846006, + "acc_norm": 0.2, + "acc_norm_stderr": 0.038313051408846006 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184407, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184407 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.03336767086567977, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.03336767086567977 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.03214737302029469, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.03214737302029469 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21693121693121692, + "acc_stderr": 0.02122708244944505, + "acc_norm": 0.21693121693121692, + "acc_norm_stderr": 0.02122708244944505 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624555, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624555 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2085889570552147, + "acc_stderr": 0.03192193448934726, + "acc_norm": 0.2085889570552147, + "acc_norm_stderr": 0.03192193448934726 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036622, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036622 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32124352331606215, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.32124352331606215, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.2018348623853211, + "acc_stderr": 0.017208579357787565, + "acc_norm": 0.2018348623853211, + "acc_norm_stderr": 0.017208579357787565 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516303, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516303 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22712418300653595, + "acc_stderr": 0.01694985327921237, + "acc_norm": 0.22712418300653595, + "acc_norm_stderr": 0.01694985327921237 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.014635185616527817, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.014635185616527817 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24771838331160365, + "acc_stderr": 0.011025499291443738, + "acc_norm": 0.24771838331160365, + "acc_norm_stderr": 0.011025499291443738 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.01529807750948508, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.12514757969303425, + "acc_stderr": 0.011376101146401418, + "acc_norm": 0.4025974025974026, + "acc_norm_stderr": 0.01686102048640778 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lemon-mint/gemma-ko-7b-instruct-v0.71", + "model_sha": "974661727f0fd5497bbcd036e7f2783c62529bda", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/leo911kim/Exodia-7B/result_2023-10-19 12:01:38.json b/leo911kim/Exodia-7B/result_2023-10-19 12:01:38.json new file mode 100644 index 0000000000000000000000000000000000000000..dba9d0dda4d029d863e7a26c19049a34e47a60df --- /dev/null +++ b/leo911kim/Exodia-7B/result_2023-10-19 12:01:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19027303754266212, + "acc_stderr": 0.011470424179225702, + "acc_norm": 0.2363481228668942, + "acc_norm_stderr": 0.012414960524301832 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2703644692292372, + "acc_stderr": 0.004432403734882273, + "acc_norm": 0.2969527982473611, + "acc_norm_stderr": 0.004559817589182076 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1650485436893204, + "acc_stderr": 0.03675668832233188, + "acc_norm": 0.1650485436893204, + "acc_norm_stderr": 0.03675668832233188 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.01598281477469563, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.01598281477469563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785137, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785137 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370519, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370519 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.026003301117885142, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.026003301117885142 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.29596412556053814, + "acc_stderr": 0.030636591348699817, + "acc_norm": 0.29596412556053814, + "acc_norm_stderr": 0.030636591348699817 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728744, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728744 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20202020202020202, + "acc_stderr": 0.028606204289229872, + "acc_norm": 0.20202020202020202, + "acc_norm_stderr": 0.028606204289229872 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135303, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135303 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416542, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416542 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2773109243697479, + "acc_stderr": 0.02907937453948001, + "acc_norm": 0.2773109243697479, + "acc_norm_stderr": 0.02907937453948001 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2743589743589744, + "acc_stderr": 0.02262276576749322, + "acc_norm": 0.2743589743589744, + "acc_norm_stderr": 0.02262276576749322 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.038935425188248475, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.038935425188248475 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.031947400722655395, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.031947400722655395 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.02499305339776482, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.02499305339776482 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32905982905982906, + "acc_stderr": 0.030782321577688163, + "acc_norm": 0.32905982905982906, + "acc_norm_stderr": 0.030782321577688163 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.025604233470899105, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.025604233470899105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916718, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916718 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.17341040462427745, + "acc_stderr": 0.02886810787497064, + "acc_norm": 0.17341040462427745, + "acc_norm_stderr": 0.02886810787497064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2275132275132275, + "acc_stderr": 0.021591269407823774, + "acc_norm": 0.2275132275132275, + "acc_norm_stderr": 0.021591269407823774 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757177, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.024383665531035457, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.024383665531035457 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860695, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860695 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24954128440366974, + "acc_stderr": 0.018553897629501614, + "acc_norm": 0.24954128440366974, + "acc_norm_stderr": 0.018553897629501614 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.12698412698412698, + "acc_stderr": 0.029780417522688434, + "acc_norm": 0.12698412698412698, + "acc_norm_stderr": 0.029780417522688434 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.023929155517351294, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.023929155517351294 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2809917355371901, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.03197565821032501, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.03197565821032501 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2369281045751634, + "acc_stderr": 0.017201662169789796, + "acc_norm": 0.2369281045751634, + "acc_norm_stderr": 0.017201662169789796 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984301, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984301 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.03018753206032938, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.03018753206032938 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.028123429335142773, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.028123429335142773 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24050632911392406, + "acc_stderr": 0.027820781981149685, + "acc_norm": 0.24050632911392406, + "acc_norm_stderr": 0.027820781981149685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.227509778357236, + "acc_stderr": 0.010707188576864226, + "acc_norm": 0.227509778357236, + "acc_norm_stderr": 0.010707188576864226 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501936, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501936 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237269, + "mc2": 0.42019223039185516, + "mc2_stderr": 0.01650268606738961 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21959858323494688, + "acc_stderr": 0.01423274308558026, + "acc_norm": 0.31641086186540734, + "acc_norm_stderr": 0.015989617951065474 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "leo911kim/Exodia-7B", + "model_sha": "b3e1f98b934da7498bb18ce0cb9e0fc857593656", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/leo911kim/Exodia-kor-7B-v2/result_2023-10-19 23:16:19.json b/leo911kim/Exodia-kor-7B-v2/result_2023-10-19 23:16:19.json new file mode 100644 index 0000000000000000000000000000000000000000..a5aeac4d4a8a5709c90c852e418008a82db4930c --- /dev/null +++ b/leo911kim/Exodia-kor-7B-v2/result_2023-10-19 23:16:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2568259385665529, + "acc_stderr": 0.0127669237941168, + "acc_norm": 0.3122866894197952, + "acc_norm_stderr": 0.013542598541688065 + }, + "harness|ko_hellaswag|10": { + "acc": 0.344353714399522, + "acc_stderr": 0.004741859753178411, + "acc_norm": 0.4522007568213503, + "acc_norm_stderr": 0.004966928094797574 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3391812865497076, + "acc_stderr": 0.03631053496488905, + "acc_norm": 0.3391812865497076, + "acc_norm_stderr": 0.03631053496488905 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3524904214559387, + "acc_stderr": 0.017084150244081376, + "acc_norm": 0.3524904214559387, + "acc_norm_stderr": 0.017084150244081376 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.029379170464124818, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.029379170464124818 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.0355092018568963, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.0355092018568963 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3633440514469453, + "acc_stderr": 0.02731684767419272, + "acc_norm": 0.3633440514469453, + "acc_norm_stderr": 0.02731684767419272 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134987, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134987 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.023901157979402544, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.023901157979402544 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536821, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536821 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03010833071801162, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03010833071801162 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34516129032258064, + "acc_stderr": 0.027045746573534323, + "acc_norm": 0.34516129032258064, + "acc_norm_stderr": 0.027045746573534323 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4700854700854701, + "acc_stderr": 0.03269741106812444, + "acc_norm": 0.4700854700854701, + "acc_norm_stderr": 0.03269741106812444 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.04309118709946459, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.04309118709946459 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.02578787422095932, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.02578787422095932 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4228855721393035, + "acc_stderr": 0.03493231777421282, + "acc_norm": 0.4228855721393035, + "acc_norm_stderr": 0.03493231777421282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.034140140070440354, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.034140140070440354 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918424, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3487654320987654, + "acc_stderr": 0.02651759772446501, + "acc_norm": 0.3487654320987654, + "acc_norm_stderr": 0.02651759772446501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38341968911917096, + "acc_stderr": 0.03508984236295341, + "acc_norm": 0.38341968911917096, + "acc_norm_stderr": 0.03508984236295341 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3651376146788991, + "acc_stderr": 0.020642801454384005, + "acc_norm": 0.3651376146788991, + "acc_norm_stderr": 0.020642801454384005 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4380165289256198, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275908, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275908 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101366, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101366 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696042, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696042 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2245810055865922, + "acc_stderr": 0.01395680366654464, + "acc_norm": 0.2245810055865922, + "acc_norm_stderr": 0.01395680366654464 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3306122448979592, + "acc_stderr": 0.030116426296540613, + "acc_norm": 0.3306122448979592, + "acc_norm_stderr": 0.030116426296540613 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.288135593220339, + "acc_stderr": 0.011567140661324561, + "acc_norm": 0.288135593220339, + "acc_norm_stderr": 0.011567140661324561 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139404, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139404 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.01494881267906214, + "mc2": 0.3833926324458877, + "mc2_stderr": 0.015094351709331206 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.19952774498229045, + "acc_stderr": 0.01374009094762133, + "acc_norm": 0.3022432113341204, + "acc_norm_stderr": 0.01578865486302237 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "leo911kim/Exodia-kor-7B-v2", + "model_sha": "f759698eb4ddc2b9afa9d234ee130e10ce92a61a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/leo911kim/Exodia-kor-7b-v2/result_2023-10-19 16:35:19.json b/leo911kim/Exodia-kor-7b-v2/result_2023-10-19 16:35:19.json new file mode 100644 index 0000000000000000000000000000000000000000..6cc628a9b59c05cfe6139937019b895ee3fc125f --- /dev/null +++ b/leo911kim/Exodia-kor-7b-v2/result_2023-10-19 16:35:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2568259385665529, + "acc_stderr": 0.0127669237941168, + "acc_norm": 0.3122866894197952, + "acc_norm_stderr": 0.013542598541688065 + }, + "harness|ko_hellaswag|10": { + "acc": 0.344353714399522, + "acc_stderr": 0.004741859753178411, + "acc_norm": 0.4522007568213503, + "acc_norm_stderr": 0.004966928094797574 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3391812865497076, + "acc_stderr": 0.03631053496488905, + "acc_norm": 0.3391812865497076, + "acc_norm_stderr": 0.03631053496488905 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3524904214559387, + "acc_stderr": 0.017084150244081376, + "acc_norm": 0.3524904214559387, + "acc_norm_stderr": 0.017084150244081376 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.029379170464124818, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.029379170464124818 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.0355092018568963, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.0355092018568963 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3633440514469453, + "acc_stderr": 0.02731684767419272, + "acc_norm": 0.3633440514469453, + "acc_norm_stderr": 0.02731684767419272 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134987, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134987 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3697478991596639, + "acc_stderr": 0.031357095996135904, + "acc_norm": 0.3697478991596639, + "acc_norm_stderr": 0.031357095996135904 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.023901157979402544, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.023901157979402544 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536821, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536821 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03010833071801162, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03010833071801162 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34516129032258064, + "acc_stderr": 0.027045746573534323, + "acc_norm": 0.34516129032258064, + "acc_norm_stderr": 0.027045746573534323 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4700854700854701, + "acc_stderr": 0.03269741106812444, + "acc_norm": 0.4700854700854701, + "acc_norm_stderr": 0.03269741106812444 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.04309118709946459, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.04309118709946459 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.02578787422095932, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.02578787422095932 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4228855721393035, + "acc_stderr": 0.03493231777421282, + "acc_norm": 0.4228855721393035, + "acc_norm_stderr": 0.03493231777421282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.034140140070440354, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.034140140070440354 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918424, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3487654320987654, + "acc_stderr": 0.02651759772446501, + "acc_norm": 0.3487654320987654, + "acc_norm_stderr": 0.02651759772446501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38341968911917096, + "acc_stderr": 0.03508984236295341, + "acc_norm": 0.38341968911917096, + "acc_norm_stderr": 0.03508984236295341 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3651376146788991, + "acc_stderr": 0.020642801454384005, + "acc_norm": 0.3651376146788991, + "acc_norm_stderr": 0.020642801454384005 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4380165289256198, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.018342529845275908, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.018342529845275908 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101366, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101366 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696042, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696042 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2245810055865922, + "acc_stderr": 0.01395680366654464, + "acc_norm": 0.2245810055865922, + "acc_norm_stderr": 0.01395680366654464 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3306122448979592, + "acc_stderr": 0.030116426296540613, + "acc_norm": 0.3306122448979592, + "acc_norm_stderr": 0.030116426296540613 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.288135593220339, + "acc_stderr": 0.011567140661324561, + "acc_norm": 0.288135593220339, + "acc_norm_stderr": 0.011567140661324561 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139404, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139404 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.01494881267906214, + "mc2": 0.3833926324458877, + "mc2_stderr": 0.015094351709331206 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.19952774498229045, + "acc_stderr": 0.01374009094762133, + "acc_norm": 0.3022432113341204, + "acc_norm_stderr": 0.01578865486302237 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "leo911kim/Exodia-kor-7b-v2", + "model_sha": "f759698eb4ddc2b9afa9d234ee130e10ce92a61a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/leo911kim/Exodia-kor-7b/result_2023-10-19 10:10:30.json b/leo911kim/Exodia-kor-7b/result_2023-10-19 10:10:30.json new file mode 100644 index 0000000000000000000000000000000000000000..a40d86080c7cb1c68f7aa2219165356298c1b73c --- /dev/null +++ b/leo911kim/Exodia-kor-7b/result_2023-10-19 10:10:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2636518771331058, + "acc_stderr": 0.012875929151297066, + "acc_norm": 0.3370307167235495, + "acc_norm_stderr": 0.01381347665290227 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35331607249551883, + "acc_stderr": 0.004770229206838891, + "acc_norm": 0.4847639912368054, + "acc_norm_stderr": 0.004987464257999312 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.34502923976608185, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.34502923976608185, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.0398913985953177, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.0398913985953177 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26053639846743293, + "acc_stderr": 0.015696008563807096, + "acc_norm": 0.26053639846743293, + "acc_norm_stderr": 0.015696008563807096 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.035478541985608236, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.035478541985608236 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.21221864951768488, + "acc_stderr": 0.023222756797435122, + "acc_norm": 0.21221864951768488, + "acc_norm_stderr": 0.023222756797435122 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.03210062154134987, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.03210062154134987 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20707070707070707, + "acc_stderr": 0.028869778460267042, + "acc_norm": 0.20707070707070707, + "acc_norm_stderr": 0.028869778460267042 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2689075630252101, + "acc_stderr": 0.028801392193631273, + "acc_norm": 0.2689075630252101, + "acc_norm_stderr": 0.028801392193631273 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.02102067268082791, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.02102067268082791 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.04524596007030049, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.04524596007030049 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2161290322580645, + "acc_stderr": 0.023415293433568525, + "acc_norm": 0.2161290322580645, + "acc_norm_stderr": 0.023415293433568525 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.37606837606837606, + "acc_stderr": 0.031733936329694824, + "acc_norm": 0.37606837606837606, + "acc_norm_stderr": 0.031733936329694824 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.024720713193952172, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.024720713193952172 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.0320384104021332, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.0320384104021332 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.0309528902177499, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.0309528902177499 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.023948512905468348, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.023948512905468348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25308641975308643, + "acc_stderr": 0.024191808600713, + "acc_norm": 0.25308641975308643, + "acc_norm_stderr": 0.024191808600713 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.029778663037752943, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.029778663037752943 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23486238532110093, + "acc_stderr": 0.018175110510343585, + "acc_norm": 0.23486238532110093, + "acc_norm_stderr": 0.018175110510343585 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.02591780611714716, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.02591780611714716 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.30578512396694213, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.017952449196987866, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.017952449196987866 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23049645390070922, + "acc_stderr": 0.025123739226872405, + "acc_norm": 0.23049645390070922, + "acc_norm_stderr": 0.025123739226872405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.17592592592592593, + "acc_stderr": 0.025967420958258526, + "acc_norm": 0.17592592592592593, + "acc_norm_stderr": 0.025967420958258526 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.024562204314142314, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.024562204314142314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174913, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174913 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31645569620253167, + "acc_stderr": 0.030274974880218974, + "acc_norm": 0.31645569620253167, + "acc_norm_stderr": 0.030274974880218974 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2588005215123859, + "acc_stderr": 0.01118610904656461, + "acc_norm": 0.2588005215123859, + "acc_norm_stderr": 0.01118610904656461 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.032876667586034886, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.032876667586034886 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.01510240479735965, + "mc2": 0.39305582191498534, + "mc2_stderr": 0.015037592654153921 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.21133412042502953, + "acc_stderr": 0.014036090342930314, + "acc_norm": 0.3010625737898465, + "acc_norm_stderr": 0.015771113299945457 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "leo911kim/Exodia-kor-7b", + "model_sha": "dfb83ef9894aadda3301f98602d4d45cfd19c192", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lifelongeek/ko-7b-ins/result_2023-10-13 13:58:31.json b/lifelongeek/ko-7b-ins/result_2023-10-13 13:58:31.json new file mode 100644 index 0000000000000000000000000000000000000000..e8dd984ed86a1800b1746ee48283426459a63d3c --- /dev/null +++ b/lifelongeek/ko-7b-ins/result_2023-10-13 13:58:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20051194539249148, + "acc_stderr": 0.01170031805049937, + "acc_norm": 0.2363481228668942, + "acc_norm_stderr": 0.012414960524301811 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2500497908783111, + "acc_stderr": 0.004321564303822431, + "acc_norm": 0.246265684126668, + "acc_norm_stderr": 0.004299546103761434 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396736, + "mc2": 0.4901664286815018, + "mc2_stderr": 0.016461517029586932 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.0743801652892562, + "acc_stderr": 0.009021104510906089, + "acc_norm": 0.3152302243211334, + "acc_norm_stderr": 0.015973534923794486 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lifelongeek/ko-7b-ins", + "model_sha": "4970a8c78104fed617103be2763fb54e8e90ca72", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lightblue/suzume-llama-3-8B-japanese/result_2024-06-19 08:00:32.json b/lightblue/suzume-llama-3-8B-japanese/result_2024-06-19 08:00:32.json new file mode 100644 index 0000000000000000000000000000000000000000..d751b25d553837a90b9ff0dfa78d21410d0309b4 --- /dev/null +++ b/lightblue/suzume-llama-3-8B-japanese/result_2024-06-19 08:00:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.014077223108470139, + "acc_norm": 0.431740614334471, + "acc_norm_stderr": 0.014474591427196199 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3761202947619996, + "acc_stderr": 0.004834207964061322, + "acc_norm": 0.49302927703644694, + "acc_norm_stderr": 0.004989296471157074 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4559386973180077, + "acc_stderr": 0.017810403925435363, + "acc_norm": 0.4559386973180077, + "acc_norm_stderr": 0.017810403925435363 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.03711725190740751, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.03711725190740751 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5530546623794212, + "acc_stderr": 0.028237769422085342, + "acc_norm": 0.5530546623794212, + "acc_norm_stderr": 0.028237769422085342 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.033554765962343545, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.033554765962343545 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5655172413793104, + "acc_stderr": 0.04130740879555498, + "acc_norm": 0.5655172413793104, + "acc_norm_stderr": 0.04130740879555498 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.047240073523838876, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.047240073523838876 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.03228410626716391, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.03228410626716391 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5102564102564102, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.5102564102564102, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978815, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978815 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5161290322580645, + "acc_stderr": 0.028429203176724562, + "acc_norm": 0.5161290322580645, + "acc_norm_stderr": 0.028429203176724562 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.028286324075564407, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.028286324075564407 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465076, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465076 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.037940126746970296, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.037940126746970296 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699958, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699958 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.026830805998952233, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.026830805998952233 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5308641975308642, + "acc_stderr": 0.02776768960683392, + "acc_norm": 0.5308641975308642, + "acc_norm_stderr": 0.02776768960683392 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5834862385321101, + "acc_stderr": 0.021136376504030868, + "acc_norm": 0.5834862385321101, + "acc_norm_stderr": 0.021136376504030868 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138293, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874141, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874141 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.020102583895887184, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.020102583895887184 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.015005762446786156, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.015005762446786156 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36962190352020863, + "acc_stderr": 0.01232844577857526, + "acc_norm": 0.36962190352020863, + "acc_norm_stderr": 0.01232844577857526 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.037131580674819135, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.037131580674819135 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3182374541003672, + "mc1_stderr": 0.016305988648920598, + "mc2": 0.4962746002569961, + "mc2_stderr": 0.015815358258222622 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46635182998819363, + "acc_stderr": 0.01715138411713187, + "acc_norm": 0.512396694214876, + "acc_norm_stderr": 0.01718506973267653 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lightblue/suzume-llama-3-8B-japanese", + "model_sha": "3306e6bac49e1bd80bfe54001994a828ceeb67fa", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-full/result_2024-06-06 18:37:07.json b/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-full/result_2024-06-06 18:37:07.json new file mode 100644 index 0000000000000000000000000000000000000000..b84b95dfefbc7dca618733eb8db30fe566cd4f6b --- /dev/null +++ b/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-full/result_2024-06-06 18:37:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4052901023890785, + "acc_stderr": 0.01434686906022933, + "acc_norm": 0.45307167235494883, + "acc_norm_stderr": 0.014546892052005626 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4001194981079466, + "acc_stderr": 0.004889210628907961, + "acc_norm": 0.5419239195379406, + "acc_norm_stderr": 0.004972210244020569 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.017869330154003705, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.017869330154003705 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424004, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.03314190222110658, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.03314190222110658 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.034889016168527305, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.034889016168527305 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383887, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383887 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5205128205128206, + "acc_stderr": 0.02532966316348994, + "acc_norm": 0.5205128205128206, + "acc_norm_stderr": 0.02532966316348994 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4729064039408867, + "acc_stderr": 0.03512819077876106, + "acc_norm": 0.4729064039408867, + "acc_norm_stderr": 0.03512819077876106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5483870967741935, + "acc_stderr": 0.028310500348568392, + "acc_norm": 0.5483870967741935, + "acc_norm_stderr": 0.028310500348568392 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.030463656747340265, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.030463656747340265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808093, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808093 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3962962962962963, + "acc_stderr": 0.029822619458534, + "acc_norm": 0.3962962962962963, + "acc_norm_stderr": 0.029822619458534 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273957, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273957 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851102, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851102 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5308641975308642, + "acc_stderr": 0.027767689606833925, + "acc_norm": 0.5308641975308642, + "acc_norm_stderr": 0.027767689606833925 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.03541508578884021, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.03541508578884021 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6, + "acc_stderr": 0.02100420126042008, + "acc_norm": 0.6, + "acc_norm_stderr": 0.02100420126042008 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.5, + "acc_stderr": 0.04472135954999579, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04472135954999579 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5592105263157895, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.5592105263157895, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.0198984127176359, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.0198984127176359 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.02788913930053478, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.02788913930053478 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.044642857142857144, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.044642857142857144 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30837988826815643, + "acc_stderr": 0.01544571691099887, + "acc_norm": 0.30837988826815643, + "acc_norm_stderr": 0.01544571691099887 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.031591887529658504, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.031591887529658504 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.333116036505867, + "acc_stderr": 0.01203793045151205, + "acc_norm": 0.333116036505867, + "acc_norm_stderr": 0.01203793045151205 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165633, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165633 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33047735618115054, + "mc1_stderr": 0.016466769613698286, + "mc2": 0.5235759563247893, + "mc2_stderr": 0.016018732746912373 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43919716646989376, + "acc_stderr": 0.017062775744780705, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.017189767032130817 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lightblue/suzume-llama-3-8B-multilingual-orpo-borda-full", + "model_sha": "ac04e23fb8861c188f8ecddfecc4250b40aee04d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-half/result_2024-06-06 18:40:09.json b/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-half/result_2024-06-06 18:40:09.json new file mode 100644 index 0000000000000000000000000000000000000000..96727633b723946bb6219ca955d13e3d437c3b23 --- /dev/null +++ b/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-half/result_2024-06-06 18:40:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938215, + "acc_norm": 0.4564846416382253, + "acc_norm_stderr": 0.014555949760496435 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3973312089225254, + "acc_stderr": 0.00488345518890897, + "acc_norm": 0.5315674168492333, + "acc_norm_stderr": 0.004979826829400771 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.037792759455032014, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.037792759455032014 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.04498676320572921, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.04498676320572921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5108556832694764, + "acc_stderr": 0.017875748840242407, + "acc_norm": 0.5108556832694764, + "acc_norm_stderr": 0.017875748840242407 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.028173917761762906, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.028173917761762906 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.034889016168527305, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.034889016168527305 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.048580835742663454, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.048580835742663454 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5102564102564102, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.5102564102564102, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5806451612903226, + "acc_stderr": 0.028071588901091817, + "acc_norm": 0.5806451612903226, + "acc_norm_stderr": 0.028071588901091817 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748842, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748842 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948492, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.02519710107424648, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.02519710107424648 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.026720034380514998, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.026720034380514998 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.02743162372241501, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.02743162372241501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6269430051813472, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.6269430051813472, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5798165137614679, + "acc_stderr": 0.021162420048273508, + "acc_norm": 0.5798165137614679, + "acc_norm_stderr": 0.021162420048273508 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017087, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017087 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5424836601307189, + "acc_stderr": 0.02852638345214264, + "acc_norm": 0.5424836601307189, + "acc_norm_stderr": 0.02852638345214264 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.44281045751633985, + "acc_stderr": 0.020095083154577347, + "acc_norm": 0.44281045751633985, + "acc_norm_stderr": 0.020095083154577347 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101376, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101376 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.044642857142857144, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.044642857142857144 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364546, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364546 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6122448979591837, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.6122448979591837, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3604954367666232, + "acc_stderr": 0.012263110237299245, + "acc_norm": 0.3604954367666232, + "acc_norm_stderr": 0.012263110237299245 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.037563357751878954, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.037563357751878954 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.01582614243950234, + "mc2": 0.4634654257018926, + "mc2_stderr": 0.01569165667853618 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45218417945690675, + "acc_stderr": 0.017111567130916785, + "acc_norm": 0.5230224321133412, + "acc_norm_stderr": 0.017172121546727634 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lightblue/suzume-llama-3-8B-multilingual-orpo-borda-half", + "model_sha": "b82150a9840ba5ba93918c745adc70afc6ad2ce1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top25/result_2024-06-06 18:41:02.json b/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top25/result_2024-06-06 18:41:02.json new file mode 100644 index 0000000000000000000000000000000000000000..8779ed02fc80ba5482d2dc1506c844806879c6a8 --- /dev/null +++ b/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top25/result_2024-06-06 18:41:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39761092150170646, + "acc_stderr": 0.01430175222327954, + "acc_norm": 0.447098976109215, + "acc_norm_stderr": 0.014529380160526843 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40151364270065726, + "acc_stderr": 0.004892026457294708, + "acc_norm": 0.530372435769767, + "acc_norm_stderr": 0.0049805669077904536 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.03779275945503202, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.03779275945503202 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.046561471100123514, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.046561471100123514 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.01787994891443167, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.01787994891443167 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033582, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033582 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5659163987138264, + "acc_stderr": 0.028150232244535604, + "acc_norm": 0.5659163987138264, + "acc_norm_stderr": 0.028150232244535604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.034889016168527305, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.034889016168527305 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5840336134453782, + "acc_stderr": 0.03201650100739611, + "acc_norm": 0.5840336134453782, + "acc_norm_stderr": 0.03201650100739611 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.02533466708095489, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.02533466708095489 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5967741935483871, + "acc_stderr": 0.027906150826041153, + "acc_norm": 0.5967741935483871, + "acc_norm_stderr": 0.027906150826041153 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.028286324075564404, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.028286324075564404 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851302, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851302 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857403, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857403 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7014925373134329, + "acc_stderr": 0.03235743789355043, + "acc_norm": 0.7014925373134329, + "acc_norm_stderr": 0.03235743789355043 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067877, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067877 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.041406856391115014, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.041406856391115014 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5751445086705202, + "acc_stderr": 0.026613350840261736, + "acc_norm": 0.5751445086705202, + "acc_norm_stderr": 0.026613350840261736 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5771604938271605, + "acc_stderr": 0.027487472980871595, + "acc_norm": 0.5771604938271605, + "acc_norm_stderr": 0.027487472980871595 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6476683937823834, + "acc_stderr": 0.03447478286414357, + "acc_norm": 0.6476683937823834, + "acc_norm_stderr": 0.03447478286414357 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6036697247706422, + "acc_stderr": 0.020971469947900525, + "acc_norm": 0.6036697247706422, + "acc_norm_stderr": 0.020971469947900525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.028431095444176643, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.028431095444176643 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249034, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249034 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.020087362076702853, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.020087362076702853 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.028838921471251458, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.028838921471251458 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.0338517797604481, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.0338517797604481 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468633, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468633 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.03093285879278986, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.03093285879278986 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.030486039389105313, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.030486039389105313 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38070404172099087, + "acc_stderr": 0.012401430654645884, + "acc_norm": 0.38070404172099087, + "acc_norm_stderr": 0.012401430654645884 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6225490196078431, + "acc_stderr": 0.034022720443407026, + "acc_norm": 0.6225490196078431, + "acc_norm_stderr": 0.034022720443407026 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03756335775187896, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03756335775187896 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768545, + "mc2": 0.4845572561550924, + "mc2_stderr": 0.01571961036263795 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47107438016528924, + "acc_stderr": 0.01716156394991635, + "acc_norm": 0.51357733175915, + "acc_norm_stderr": 0.017184015060401455 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top25", + "model_sha": "5a2f17238cc83932e00613d285f8bf6b8f4a0c3a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top75/result_2024-06-06 18:38:59.json b/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top75/result_2024-06-06 18:38:59.json new file mode 100644 index 0000000000000000000000000000000000000000..176e89818d6f3a19bf504843dcd141a1a1361c15 --- /dev/null +++ b/lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top75/result_2024-06-06 18:38:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40102389078498296, + "acc_stderr": 0.014322255790719867, + "acc_norm": 0.4539249146757679, + "acc_norm_stderr": 0.014549221105171874 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3966341366261701, + "acc_stderr": 0.00488199048762892, + "acc_norm": 0.5407289384584744, + "acc_norm_stderr": 0.004973199296339965 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.046561471100123514, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.046561471100123514 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.51213282247765, + "acc_stderr": 0.01787469866749134, + "acc_norm": 0.51213282247765, + "acc_norm_stderr": 0.01787469866749134 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.042561937679014075, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.042561937679014075 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5755627009646302, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.5755627009646302, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5153846153846153, + "acc_stderr": 0.025339003010106498, + "acc_norm": 0.5153846153846153, + "acc_norm_stderr": 0.025339003010106498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.046166311118017125, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.046166311118017125 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.47783251231527096, + "acc_stderr": 0.035145285621750094, + "acc_norm": 0.47783251231527096, + "acc_norm_stderr": 0.035145285621750094 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.603225806451613, + "acc_stderr": 0.027831231605767958, + "acc_norm": 0.603225806451613, + "acc_norm_stderr": 0.027831231605767958 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809446, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809446 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.030770900763851316, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.030770900763851316 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844264, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844264 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.03220024104534205, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.03220024104534205 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41005291005291006, + "acc_stderr": 0.025331202438944437, + "acc_norm": 0.41005291005291006, + "acc_norm_stderr": 0.025331202438944437 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.02678881193156276, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.02678881193156276 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5617283950617284, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.5617283950617284, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6373056994818653, + "acc_stderr": 0.03469713791704372, + "acc_norm": 0.6373056994818653, + "acc_norm_stderr": 0.03469713791704372 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5871559633027523, + "acc_stderr": 0.021109128133413917, + "acc_norm": 0.5871559633027523, + "acc_norm_stderr": 0.021109128133413917 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.028180596328259287, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.028180596328259287 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.040516463428741434, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.040516463428741434 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4297385620915033, + "acc_stderr": 0.020027122784928547, + "acc_norm": 0.4297385620915033, + "acc_norm_stderr": 0.020027122784928547 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29497206703910617, + "acc_stderr": 0.015251931579208185, + "acc_norm": 0.29497206703910617, + "acc_norm_stderr": 0.015251931579208185 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877743, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6081632653061224, + "acc_stderr": 0.03125127591089166, + "acc_norm": 0.6081632653061224, + "acc_norm_stderr": 0.03125127591089166 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36571056062581486, + "acc_stderr": 0.012301028188840563, + "acc_norm": 0.36571056062581486, + "acc_norm_stderr": 0.012301028188840563 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6127450980392157, + "acc_stderr": 0.03418931233833344, + "acc_norm": 0.6127450980392157, + "acc_norm_stderr": 0.03418931233833344 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30966952264381886, + "mc1_stderr": 0.016185744355144905, + "mc2": 0.5014088920494368, + "mc2_stderr": 0.015712979415316686 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4510035419126328, + "acc_stderr": 0.01710761885954935, + "acc_norm": 0.5242030696576151, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lightblue/suzume-llama-3-8B-multilingual-orpo-borda-top75", + "model_sha": "555f4a0092f239557e1aa34f9d489e8156b907bb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lightblue/suzume-llama-3-8B-multilingual/result_2024-05-13 16:36:15.json b/lightblue/suzume-llama-3-8B-multilingual/result_2024-05-13 16:36:15.json new file mode 100644 index 0000000000000000000000000000000000000000..49c023cbc96f10a25e4c0aa288faf85bafe9c842 --- /dev/null +++ b/lightblue/suzume-llama-3-8B-multilingual/result_2024-05-13 16:36:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.378839590443686, + "acc_stderr": 0.014175915490000326, + "acc_norm": 0.4274744027303754, + "acc_norm_stderr": 0.014456862944650645 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38548097988448515, + "acc_stderr": 0.004857140410776745, + "acc_norm": 0.5055765783708425, + "acc_norm_stderr": 0.004989471055090958 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.03815827365913238, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.03815827365913238 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46998722860791825, + "acc_stderr": 0.0178477230866491, + "acc_norm": 0.46998722860791825, + "acc_norm_stderr": 0.0178477230866491 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742399, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5756302521008403, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.5756302521008403, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5076923076923077, + "acc_stderr": 0.02534800603153475, + "acc_norm": 0.5076923076923077, + "acc_norm_stderr": 0.02534800603153475 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5645161290322581, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.5645161290322581, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392926, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392926 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562417, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.04177578950739994, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.04177578950739994 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5895953757225434, + "acc_stderr": 0.02648339204209818, + "acc_norm": 0.5895953757225434, + "acc_norm_stderr": 0.02648339204209818 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.02751374728437942, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.02751374728437942 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5853211009174312, + "acc_stderr": 0.021122903208602592, + "acc_norm": 0.5853211009174312, + "acc_norm_stderr": 0.021122903208602592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5522875816993464, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.5522875816993464, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626057, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626057 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.02006287424353913, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.02006287424353913 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611324, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611324 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.033922384053216174, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.033922384053216174 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210742, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210742 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.0302114796091216, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.0302114796091216 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6163265306122448, + "acc_stderr": 0.031130880396235943, + "acc_norm": 0.6163265306122448, + "acc_norm_stderr": 0.031130880396235943 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030685820596610812, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030685820596610812 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3754889178617992, + "acc_stderr": 0.012367945396728206, + "acc_norm": 0.3754889178617992, + "acc_norm_stderr": 0.012367945396728206 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5637254901960784, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.5637254901960784, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396704, + "mc2": 0.46734826089103265, + "mc2_stderr": 0.0153198021360474 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4651711924439197, + "acc_stderr": 0.017148598015747425, + "acc_norm": 0.5360094451003542, + "acc_norm_stderr": 0.017145715365486664 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lightblue/suzume-llama-3-8B-multilingual", + "model_sha": "c7b55e87c44c7e8d52ead657715c14abd3f9cda9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/liminerity/M7-7b/result_2024-07-13 12:28:31.json b/liminerity/M7-7b/result_2024-07-13 12:28:31.json new file mode 100644 index 0000000000000000000000000000000000000000..1fe4e4a5f31f6852b0248ca8fb099d7c09156a4c --- /dev/null +++ b/liminerity/M7-7b/result_2024-07-13 12:28:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39419795221843, + "acc_stderr": 0.014280522667467325, + "acc_norm": 0.4598976109215017, + "acc_norm_stderr": 0.01456431885692485 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39543915554670384, + "acc_stderr": 0.00487945547466381, + "acc_norm": 0.5256920932085242, + "acc_norm_stderr": 0.004983189711208504 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45721583652618136, + "acc_stderr": 0.017814385238534427, + "acc_norm": 0.45721583652618136, + "acc_norm_stderr": 0.017814385238534427 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.41479099678456594, + "acc_stderr": 0.027982680459759563, + "acc_norm": 0.41479099678456594, + "acc_norm_stderr": 0.027982680459759563 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.041443118108781506, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.041443118108781506 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.040233822736177476, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.040233822736177476 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502737, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.03067609659938917, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.03067609659938917 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616255, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.02507598176760168, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.02507598176760168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05000000000000001, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05000000000000001 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5119266055045871, + "acc_stderr": 0.021431223617362227, + "acc_norm": 0.5119266055045871, + "acc_norm_stderr": 0.021431223617362227 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.044359328928514664, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.044359328928514664 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528784, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528784 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28156424581005585, + "acc_stderr": 0.015042290171866108, + "acc_norm": 0.28156424581005585, + "acc_norm_stderr": 0.015042290171866108 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.028418208619406794, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.028418208619406794 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34224250325945244, + "acc_stderr": 0.01211793999870587, + "acc_norm": 0.34224250325945244, + "acc_norm_stderr": 0.01211793999870587 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.0346022832723917, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.0346022832723917 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4222766217870257, + "mc1_stderr": 0.017290733254248167, + "mc2": 0.5972071871534377, + "mc2_stderr": 0.0162823435793482 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4214876033057851, + "acc_stderr": 0.01697710193260152, + "acc_norm": 0.42739079102715466, + "acc_norm_stderr": 0.017008129844823156 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "liminerity/M7-7b", + "model_sha": "9b8a6b02683dc88777ead09c81baae2a06b14294", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/little-fox/Leonardo/result_2024-02-26 00:16:27.json b/little-fox/Leonardo/result_2024-02-26 00:16:27.json new file mode 100644 index 0000000000000000000000000000000000000000..f55adab6247f8d12495fe9baa199e5a40fac55a1 --- /dev/null +++ b/little-fox/Leonardo/result_2024-02-26 00:16:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38054607508532423, + "acc_stderr": 0.014188277712349814, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.01457558392201967 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4087831109340769, + "acc_stderr": 0.0049060436130133975, + "acc_norm": 0.5534754033061143, + "acc_norm_stderr": 0.004961161589228403 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6053639846743295, + "acc_stderr": 0.01747846430591155, + "acc_norm": 0.6053639846743295, + "acc_norm_stderr": 0.01747846430591155 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.032025630761017346, + "acc_norm": 0.4, + "acc_norm_stderr": 0.032025630761017346 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5530546623794212, + "acc_stderr": 0.028237769422085335, + "acc_norm": 0.5530546623794212, + "acc_norm_stderr": 0.028237769422085335 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.03437305501980619, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.03437305501980619 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993177, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.02533466708095495, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.02533466708095495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809445, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809445 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815642, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815642 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4021164021164021, + "acc_stderr": 0.025253032554997695, + "acc_norm": 0.4021164021164021, + "acc_norm_stderr": 0.025253032554997695 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.03555300319557669, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.03555300319557669 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336938, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336938 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5908256880733945, + "acc_stderr": 0.021080670264433735, + "acc_norm": 0.5908256880733945, + "acc_norm_stderr": 0.021080670264433735 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.02856869975222587, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.02856869975222587 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.019706875804085627, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.019706875804085627 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534774, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534774 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21452513966480447, + "acc_stderr": 0.013728923407828858, + "acc_norm": 0.21452513966480447, + "acc_norm_stderr": 0.013728923407828858 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4530612244897959, + "acc_stderr": 0.03186785930004129, + "acc_norm": 0.4530612244897959, + "acc_norm_stderr": 0.03186785930004129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.03178471874564729, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.03178471874564729 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3200782268578879, + "acc_stderr": 0.011914791947638509, + "acc_norm": 0.3200782268578879, + "acc_norm_stderr": 0.011914791947638509 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30966952264381886, + "mc1_stderr": 0.016185744355144905, + "mc2": 0.46094176669293047, + "mc2_stderr": 0.015745919821827725 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3730814639905549, + "acc_stderr": 0.016627318275137436, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.016876941165045612 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "little-fox/Leonardo", + "model_sha": "65ffd630f5e374d99547f6b8f3fe7baf1a222669", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lmsys/vicuna-13b-v1.5/result_2023-10-29 08:20:20.json b/lmsys/vicuna-13b-v1.5/result_2023-10-29 08:20:20.json new file mode 100644 index 0000000000000000000000000000000000000000..29017402e327122642a67d67dcb83284fdc08833 --- /dev/null +++ b/lmsys/vicuna-13b-v1.5/result_2023-10-29 08:20:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3199658703071672, + "acc_stderr": 0.013631345807016195, + "acc_norm": 0.36177474402730375, + "acc_norm_stderr": 0.014041957945038078 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3619796853216491, + "acc_stderr": 0.004795908282584544, + "acc_norm": 0.45180242979486157, + "acc_norm_stderr": 0.004966544724452228 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4674329501915709, + "acc_stderr": 0.017841995750520857, + "acc_norm": 0.4674329501915709, + "acc_norm_stderr": 0.017841995750520857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4115755627009646, + "acc_stderr": 0.02795048149440127, + "acc_norm": 0.4115755627009646, + "acc_norm_stderr": 0.02795048149440127 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.041443118108781506, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.041443118108781506 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993177, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3717948717948718, + "acc_stderr": 0.024503472557110946, + "acc_norm": 0.3717948717948718, + "acc_norm_stderr": 0.024503472557110946 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.02790615082604114, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.02790615082604114 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791923, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776296, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776296 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02784081149587192, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02784081149587192 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3468208092485549, + "acc_stderr": 0.036291466701596636, + "acc_norm": 0.3468208092485549, + "acc_norm_stderr": 0.036291466701596636 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523864, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.404320987654321, + "acc_stderr": 0.027306625297327698, + "acc_norm": 0.404320987654321, + "acc_norm_stderr": 0.027306625297327698 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.037124548537213684, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.037124548537213684 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41284403669724773, + "acc_stderr": 0.021109128133413906, + "acc_norm": 0.41284403669724773, + "acc_norm_stderr": 0.021109128133413906 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.018999707383162662, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.018999707383162662 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.02872386385328128, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.02872386385328128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.040598672469526864, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.040598672469526864 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005337, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005337 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210749, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210749 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.027365861131513805, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.027365861131513805 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.01198993664066653, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.01198993664066653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.0346022832723917, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.0346022832723917 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155072, + "mc2": 0.4781712790136037, + "mc2_stderr": 0.015927322204823676 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3152302243211334, + "acc_stderr": 0.01597353492379446, + "acc_norm": 0.3565525383707202, + "acc_norm_stderr": 0.01646770698152745 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lmsys/vicuna-13b-v1.5", + "model_sha": "3deb0106f72a3a433f0c6ea0cb978bdf14bcd3a6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/logicker/SkkuDataScience-10.7B-v5/result_2024-01-02 12:43:12.json b/logicker/SkkuDataScience-10.7B-v5/result_2024-01-02 12:43:12.json new file mode 100644 index 0000000000000000000000000000000000000000..88566af52b5bb802fb75a2d795a3fdfba16f1c36 --- /dev/null +++ b/logicker/SkkuDataScience-10.7B-v5/result_2024-01-02 12:43:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4283276450511945, + "acc_stderr": 0.014460496367599017, + "acc_norm": 0.4931740614334471, + "acc_norm_stderr": 0.014610029151379813 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4342760406293567, + "acc_stderr": 0.004946485466544623, + "acc_norm": 0.6011750647281418, + "acc_norm_stderr": 0.004886559008754982 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6398467432950191, + "acc_stderr": 0.017166362471369306, + "acc_norm": 0.6398467432950191, + "acc_norm_stderr": 0.017166362471369306 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6109324758842444, + "acc_stderr": 0.027690337536485376, + "acc_norm": 0.6109324758842444, + "acc_norm_stderr": 0.027690337536485376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786753, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786753 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.03169380235712997, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.03169380235712997 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.541025641025641, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.541025641025641, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.02804098138076153, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.02804098138076153 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.027046857630716663, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.027046857630716663 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389184, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114993, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114993 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935558, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935558 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43386243386243384, + "acc_stderr": 0.02552503438247488, + "acc_norm": 0.43386243386243384, + "acc_norm_stderr": 0.02552503438247488 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4652777777777778, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.4652777777777778, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5751445086705202, + "acc_stderr": 0.026613350840261743, + "acc_norm": 0.5751445086705202, + "acc_norm_stderr": 0.026613350840261743 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.027431623722415012, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.027431623722415012 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.033088185944157494, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.033088185944157494 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.04685473041907789, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.04685473041907789 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6587155963302752, + "acc_stderr": 0.020328612816592442, + "acc_norm": 0.6587155963302752, + "acc_norm_stderr": 0.020328612816592442 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.028180596328259287, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.028180596328259287 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.039849796533028704, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.039849796533028704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4918300653594771, + "acc_stderr": 0.020225134343057265, + "acc_norm": 0.4918300653594771, + "acc_norm_stderr": 0.020225134343057265 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3971631205673759, + "acc_stderr": 0.029189805673587105, + "acc_norm": 0.3971631205673759, + "acc_norm_stderr": 0.029189805673587105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.03400603625538271, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.03400603625538271 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28156424581005585, + "acc_stderr": 0.015042290171866117, + "acc_norm": 0.28156424581005585, + "acc_norm_stderr": 0.015042290171866117 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.031717528240626645, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.031717528240626645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7426160337552743, + "acc_stderr": 0.028458820991460305, + "acc_norm": 0.7426160337552743, + "acc_norm_stderr": 0.028458820991460305 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4152542372881356, + "acc_stderr": 0.012585471793400662, + "acc_norm": 0.4152542372881356, + "acc_norm_stderr": 0.012585471793400662 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.03320574612945431, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.03320574612945431 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.41615667074663404, + "mc1_stderr": 0.017255657502903043, + "mc2": 0.5886342114868912, + "mc2_stderr": 0.015648324717553452 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.615112160566706, + "acc_stderr": 0.01672857970149866, + "acc_norm": 0.6469893742621016, + "acc_norm_stderr": 0.016430745982427147 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "logicker/SkkuDataScience-10.7B-v5", + "model_sha": "762a5fcd2915c9a56f92d0e24a72efea2dd54a8d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/logicker/SkkuDataScience-10.7B-v6/result_2024-01-02 15:20:38.json b/logicker/SkkuDataScience-10.7B-v6/result_2024-01-02 15:20:38.json new file mode 100644 index 0000000000000000000000000000000000000000..afd0f7686fd7c5ef726fcfa7eacff379b33a75ed --- /dev/null +++ b/logicker/SkkuDataScience-10.7B-v6/result_2024-01-02 15:20:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4300341296928328, + "acc_stderr": 0.014467631559137991, + "acc_norm": 0.48464163822525597, + "acc_norm_stderr": 0.014604496129394908 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42959569806811393, + "acc_stderr": 0.004940067402031033, + "acc_norm": 0.5968930491933878, + "acc_norm_stderr": 0.0048951941438926845 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5906432748538012, + "acc_stderr": 0.03771283107626544, + "acc_norm": 0.5906432748538012, + "acc_norm_stderr": 0.03771283107626544 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280042, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280042 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6551724137931034, + "acc_stderr": 0.016997123346113446, + "acc_norm": 0.6551724137931034, + "acc_norm_stderr": 0.016997123346113446 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.032662042990646775, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.032662042990646775 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.02736807824397163, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.02736807824397163 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5650224215246636, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.5650224215246636, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.04243869242230524, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.04243869242230524 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786753, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786753 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.03149930577784906, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.03149930577784906 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5794871794871795, + "acc_stderr": 0.025028610276710866, + "acc_norm": 0.5794871794871795, + "acc_norm_stderr": 0.025028610276710866 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6064516129032258, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.6064516129032258, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.02685345037700914, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.02685345037700914 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5471698113207547, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.5471698113207547, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948485, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555404, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555404 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.03809342081273957, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.03809342081273957 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42328042328042326, + "acc_stderr": 0.02544636563440679, + "acc_norm": 0.42328042328042326, + "acc_norm_stderr": 0.02544636563440679 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6141975308641975, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.6141975308641975, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7357512953367875, + "acc_stderr": 0.03182155050916647, + "acc_norm": 0.7357512953367875, + "acc_norm_stderr": 0.03182155050916647 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6660550458715596, + "acc_stderr": 0.020220554196736407, + "acc_norm": 0.6660550458715596, + "acc_norm_stderr": 0.020220554196736407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5620915032679739, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.5620915032679739, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.040633027314866725, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.040633027314866725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5212418300653595, + "acc_stderr": 0.02020957238860025, + "acc_norm": 0.5212418300653595, + "acc_norm_stderr": 0.02020957238860025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.02949482760014437, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.02949482760014437 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5601851851851852, + "acc_stderr": 0.03385177976044812, + "acc_norm": 0.5601851851851852, + "acc_norm_stderr": 0.03385177976044812 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220508, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220508 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5257352941176471, + "acc_stderr": 0.030332578094555033, + "acc_norm": 0.5257352941176471, + "acc_norm_stderr": 0.030332578094555033 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5959183673469388, + "acc_stderr": 0.03141470802586589, + "acc_norm": 0.5959183673469388, + "acc_norm_stderr": 0.03141470802586589 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036406, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036406 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41264667535853977, + "acc_stderr": 0.012573836633799022, + "acc_norm": 0.41264667535853977, + "acc_norm_stderr": 0.012573836633799022 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6519607843137255, + "acc_stderr": 0.03343311240488419, + "acc_norm": 0.6519607843137255, + "acc_norm_stderr": 0.03343311240488419 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.703030303030303, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.703030303030303, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.36964504283965727, + "mc1_stderr": 0.016898180706973902, + "mc2": 0.5576391989992574, + "mc2_stderr": 0.015690326849132796 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6139315230224321, + "acc_stderr": 0.016738130760321757, + "acc_norm": 0.6375442739079102, + "acc_norm_stderr": 0.016527131240453696 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "logicker/SkkuDataScience-10.7B-v6", + "model_sha": "20a7b276ffaa25e82374e2ccf0877b19a0ffafd4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/logicker/SkkuDataScience-DPO-v2-440-ckpt/result_2024-02-27 17:03:41.json b/logicker/SkkuDataScience-DPO-v2-440-ckpt/result_2024-02-27 17:03:41.json new file mode 100644 index 0000000000000000000000000000000000000000..3f6f3611f5dbb56523fdfd75ad2f8fc5b9875095 --- /dev/null +++ b/logicker/SkkuDataScience-DPO-v2-440-ckpt/result_2024-02-27 17:03:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4684300341296928, + "acc_stderr": 0.01458223646086698, + "acc_norm": 0.5511945392491467, + "acc_norm_stderr": 0.014534599585097664 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4631547500497909, + "acc_stderr": 0.004976214989483499, + "acc_norm": 0.6413065126468831, + "acc_norm_stderr": 0.004786368011500454 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6023391812865497, + "acc_stderr": 0.03753638955761691, + "acc_norm": 0.6023391812865497, + "acc_norm_stderr": 0.03753638955761691 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6679438058748404, + "acc_stderr": 0.016841174655295714, + "acc_norm": 0.6679438058748404, + "acc_norm_stderr": 0.016841174655295714 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.03257901482099836, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.03257901482099836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6302250803858521, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.6302250803858521, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6143497757847534, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.6143497757847534, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6641221374045801, + "acc_stderr": 0.04142313771996664, + "acc_norm": 0.6641221374045801, + "acc_norm_stderr": 0.04142313771996664 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5153846153846153, + "acc_stderr": 0.025339003010106498, + "acc_norm": 0.5153846153846153, + "acc_norm_stderr": 0.025339003010106498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5806451612903226, + "acc_stderr": 0.02807158890109184, + "acc_norm": 0.5806451612903226, + "acc_norm_stderr": 0.02807158890109184 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922754, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922754 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5471698113207547, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.5471698113207547, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652458, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652458 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7014925373134329, + "acc_stderr": 0.03235743789355044, + "acc_norm": 0.7014925373134329, + "acc_norm_stderr": 0.03235743789355044 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.025355741263055256, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.025355741263055256 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5902777777777778, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.5902777777777778, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6184971098265896, + "acc_stderr": 0.026152198619726796, + "acc_norm": 0.6184971098265896, + "acc_norm_stderr": 0.026152198619726796 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6450617283950617, + "acc_stderr": 0.02662415247884585, + "acc_norm": 0.6450617283950617, + "acc_norm_stderr": 0.02662415247884585 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7202072538860104, + "acc_stderr": 0.03239637046735703, + "acc_norm": 0.7202072538860104, + "acc_norm_stderr": 0.03239637046735703 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.04702880432049615, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.04702880432049615 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6990825688073394, + "acc_stderr": 0.019664751366802114, + "acc_norm": 0.6990825688073394, + "acc_norm_stderr": 0.019664751366802114 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.042059539338841226, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.042059539338841226 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.04026097083296563, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.04026097083296563 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5310457516339869, + "acc_stderr": 0.02018880445636189, + "acc_norm": 0.5310457516339869, + "acc_norm_stderr": 0.02018880445636189 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596147, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.03406315360711507, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.03406315360711507 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22905027932960895, + "acc_stderr": 0.014054314935614546, + "acc_norm": 0.22905027932960895, + "acc_norm_stderr": 0.014054314935614546 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.03027332507734575, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.03027332507734575 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6775510204081633, + "acc_stderr": 0.029923100563683906, + "acc_norm": 0.6775510204081633, + "acc_norm_stderr": 0.029923100563683906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7468354430379747, + "acc_stderr": 0.0283046579430353, + "acc_norm": 0.7468354430379747, + "acc_norm_stderr": 0.0283046579430353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4028683181225554, + "acc_stderr": 0.012526955577118007, + "acc_norm": 0.4028683181225554, + "acc_norm_stderr": 0.012526955577118007 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7303921568627451, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.7303921568627451, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7212121212121212, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.7212121212121212, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.379436964504284, + "mc1_stderr": 0.016987039266143, + "mc2": 0.5528898208302689, + "mc2_stderr": 0.015994575628051177 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5914994096812278, + "acc_stderr": 0.016900062879427122, + "acc_norm": 0.6044864226682408, + "acc_norm_stderr": 0.01681081590220604 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "logicker/SkkuDataScience-DPO-v2-440-ckpt", + "model_sha": "95ffca59a50a25760b187943c1473791ecbd5f86", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/logicker/SkkuDataScience-DPO-v2-90-ckpt/result_2024-02-08 06:19:51.json b/logicker/SkkuDataScience-DPO-v2-90-ckpt/result_2024-02-08 06:19:51.json new file mode 100644 index 0000000000000000000000000000000000000000..cb8121f61201cafe84cbd4c35c968a0447c75807 --- /dev/null +++ b/logicker/SkkuDataScience-DPO-v2-90-ckpt/result_2024-02-08 06:19:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4761092150170648, + "acc_stderr": 0.014594701798071654, + "acc_norm": 0.5332764505119454, + "acc_norm_stderr": 0.01457899585960581 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45648277235610435, + "acc_stderr": 0.004970846697552306, + "acc_norm": 0.6337382991435969, + "acc_norm_stderr": 0.004807975515446484 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6781609195402298, + "acc_stderr": 0.016706381415057904, + "acc_norm": 0.6781609195402298, + "acc_norm_stderr": 0.016706381415057904 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033583, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033583 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.038922121953330446, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.038922121953330446 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6430868167202572, + "acc_stderr": 0.02721042037593402, + "acc_norm": 0.6430868167202572, + "acc_norm_stderr": 0.02721042037593402 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6098654708520179, + "acc_stderr": 0.03273766725459156, + "acc_norm": 0.6098654708520179, + "acc_norm_stderr": 0.03273766725459156 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6564885496183206, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.6564885496183206, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.031730712390717244, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.031730712390717244 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5756302521008403, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.5756302521008403, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.025334667080954887, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.025334667080954887 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5741935483870968, + "acc_stderr": 0.028129112709165904, + "acc_norm": 0.5741935483870968, + "acc_norm_stderr": 0.028129112709165904 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.02537213967172293, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.02537213967172293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5509433962264151, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.5509433962264151, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.0467375233367024, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.0467375233367024 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555404, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555404 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.025305906241590636, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.025305906241590636 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6213872832369942, + "acc_stderr": 0.02611374936131034, + "acc_norm": 0.6213872832369942, + "acc_norm_stderr": 0.02611374936131034 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6234567901234568, + "acc_stderr": 0.026959344518747787, + "acc_norm": 0.6234567901234568, + "acc_norm_stderr": 0.026959344518747787 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.03221024508041154, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.03221024508041154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6752293577981652, + "acc_stderr": 0.020077729109310327, + "acc_norm": 0.6752293577981652, + "acc_norm_stderr": 0.020077729109310327 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.02791405551046801, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.02791405551046801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6052631578947368, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.6052631578947368, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.02016552331390791, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.02016552331390791 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40070921985815605, + "acc_stderr": 0.029233465745573086, + "acc_norm": 0.40070921985815605, + "acc_norm_stderr": 0.029233465745573086 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.03409386946992699, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.03409386946992699 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225612, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225612 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768081, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768081 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.03027332507734575, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.03027332507734575 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6857142857142857, + "acc_stderr": 0.029719329422417454, + "acc_norm": 0.6857142857142857, + "acc_norm_stderr": 0.029719329422417454 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7805907172995781, + "acc_stderr": 0.026939106581553945, + "acc_norm": 0.7805907172995781, + "acc_norm_stderr": 0.026939106581553945 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3983050847457627, + "acc_stderr": 0.012503310565166235, + "acc_norm": 0.3983050847457627, + "acc_norm_stderr": 0.012503310565166235 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7156862745098039, + "acc_stderr": 0.03166009679399813, + "acc_norm": 0.7156862745098039, + "acc_norm_stderr": 0.03166009679399813 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7333333333333333, + "acc_stderr": 0.03453131801885416, + "acc_norm": 0.7333333333333333, + "acc_norm_stderr": 0.03453131801885416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3659730722154223, + "mc1_stderr": 0.016862941684088358, + "mc2": 0.5151211173876071, + "mc2_stderr": 0.015498763297677864 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5525383707201889, + "acc_stderr": 0.017095190301500574, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.016977101932601515 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "logicker/SkkuDataScience-DPO-v2-90-ckpt", + "model_sha": "5816b46fc1c1696d90a79ebcc189176638f31efc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/logicker/SkkuDataScience-DPO/result_2024-01-13 09:54:27.json b/logicker/SkkuDataScience-DPO/result_2024-01-13 09:54:27.json new file mode 100644 index 0000000000000000000000000000000000000000..8b90a1045f2b6e608e6ebafc111e92b7badc491f --- /dev/null +++ b/logicker/SkkuDataScience-DPO/result_2024-01-13 09:54:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4206484641638225, + "acc_stderr": 0.0144262112525084, + "acc_norm": 0.48890784982935154, + "acc_norm_stderr": 0.014607794914013044 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4336785500896236, + "acc_stderr": 0.004945691164810071, + "acc_norm": 0.5961959768970324, + "acc_norm_stderr": 0.0048965631261168084 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.03786720706234213, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.03786720706234213 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.644955300127714, + "acc_stderr": 0.01711208577277299, + "acc_norm": 0.644955300127714, + "acc_norm_stderr": 0.01711208577277299 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6870229007633588, + "acc_stderr": 0.04066962905677697, + "acc_norm": 0.6870229007633588, + "acc_norm_stderr": 0.04066962905677697 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6868686868686869, + "acc_stderr": 0.033042050878136525, + "acc_norm": 0.6868686868686869, + "acc_norm_stderr": 0.033042050878136525 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6050420168067226, + "acc_stderr": 0.031753678460966245, + "acc_norm": 0.6050420168067226, + "acc_norm_stderr": 0.031753678460966245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5307692307692308, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.5307692307692308, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.027869320571664618, + "acc_norm": 0.6, + "acc_norm_stderr": 0.027869320571664618 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.02624677294689047, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.02624677294689047 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851302, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851302 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857406, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857406 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.032658195885126966, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.032658195885126966 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273958, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273958 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520193, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520193 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.77, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5751445086705202, + "acc_stderr": 0.026613350840261736, + "acc_norm": 0.5751445086705202, + "acc_norm_stderr": 0.026613350840261736 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.02723741509459248, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.02723741509459248 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.033088185944157494, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.033088185944157494 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366596, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.636697247706422, + "acc_stderr": 0.020620603919625804, + "acc_norm": 0.636697247706422, + "acc_norm_stderr": 0.020620603919625804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.028332397483664278, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.028332397483664278 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041019, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041019 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.04026097083296563, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.04026097083296563 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.020219083895133924, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.020219083895133924 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0286638201471995, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0286638201471995 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5509259259259259, + "acc_stderr": 0.03392238405321616, + "acc_norm": 0.5509259259259259, + "acc_norm_stderr": 0.03392238405321616 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.014333522059217892, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.014333522059217892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5183823529411765, + "acc_stderr": 0.030352303395351964, + "acc_norm": 0.5183823529411765, + "acc_norm_stderr": 0.030352303395351964 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.031601069934496004, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.031601069934496004 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7046413502109705, + "acc_stderr": 0.02969633871342288, + "acc_norm": 0.7046413502109705, + "acc_norm_stderr": 0.02969633871342288 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3898305084745763, + "acc_stderr": 0.012456386619082596, + "acc_norm": 0.3898305084745763, + "acc_norm_stderr": 0.012456386619082596 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6078431372549019, + "acc_stderr": 0.03426712349247272, + "acc_norm": 0.6078431372549019, + "acc_norm_stderr": 0.03426712349247272 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.01578537085839671, + "mc2": 0.4277160952430054, + "mc2_stderr": 0.015428787350322755 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.564344746162928, + "acc_stderr": 0.017047415229476316, + "acc_norm": 0.6127508854781583, + "acc_norm_stderr": 0.016747577991642792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "logicker/SkkuDataScience-DPO", + "model_sha": "40753070c64c4085602bb2d11ee2f7cb7bb606cc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lookuss/test-llilu/result_2024-06-16 15:58:24.json b/lookuss/test-llilu/result_2024-06-16 15:58:24.json new file mode 100644 index 0000000000000000000000000000000000000000..532d7d3857d1bc7fc764249d43ff2dc4ecc98beb --- /dev/null +++ b/lookuss/test-llilu/result_2024-06-16 15:58:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35921501706484643, + "acc_stderr": 0.014020224155839157, + "acc_norm": 0.4180887372013652, + "acc_norm_stderr": 0.014413988396996088 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3822943636725752, + "acc_stderr": 0.004849547819134474, + "acc_norm": 0.4985062736506672, + "acc_norm_stderr": 0.004989759144812287 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.03786720706234214, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.03786720706234214 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280041, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280041 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5363984674329502, + "acc_stderr": 0.017832524079593258, + "acc_norm": 0.5363984674329502, + "acc_norm_stderr": 0.017832524079593258 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.02834504586484068, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.02834504586484068 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087313, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087313 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.032477343344481116, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.032477343344481116 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.025317649726448642, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.025317649726448642 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749475, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749475 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066475, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066475 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.041614023984032786, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.041614023984032786 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756646, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756646 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.046151869625837026, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.046151869625837026 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5394495412844037, + "acc_stderr": 0.021370494609995103, + "acc_norm": 0.5394495412844037, + "acc_norm_stderr": 0.021370494609995103 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749255, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.040633027314866704, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.040633027314866704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4297385620915033, + "acc_stderr": 0.020027122784928547, + "acc_norm": 0.4297385620915033, + "acc_norm_stderr": 0.020027122784928547 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169938, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169938 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3005586592178771, + "acc_stderr": 0.015334566806251157, + "acc_norm": 0.3005586592178771, + "acc_norm_stderr": 0.015334566806251157 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.02747227447323382, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.02747227447323382 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33702737940026073, + "acc_stderr": 0.012072836273691327, + "acc_norm": 0.33702737940026073, + "acc_norm_stderr": 0.012072836273691327 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.03495624522015476, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.03495624522015476 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070264, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070264 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015016, + "mc2": 0.4245355769586951, + "mc2_stderr": 0.015422397947662557 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48288075560802834, + "acc_stderr": 0.017180275246085626, + "acc_norm": 0.5230224321133412, + "acc_norm_stderr": 0.01717212154672763 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lookuss/test-llilu", + "model_sha": "41d8e6fa230806c20a6fe0e1f3ac7c460ea08e54", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lwef/llm-bench-upload-1/result_2024-07-13 06:08:24.json b/lwef/llm-bench-upload-1/result_2024-07-13 06:08:24.json new file mode 100644 index 0000000000000000000000000000000000000000..3823a0397742239aa8388c382e43dcac6cb74360 --- /dev/null +++ b/lwef/llm-bench-upload-1/result_2024-07-13 06:08:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36945392491467577, + "acc_stderr": 0.014104578366491894, + "acc_norm": 0.4496587030716723, + "acc_norm_stderr": 0.014537144444284738 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38936466839275047, + "acc_stderr": 0.00486609688094144, + "acc_norm": 0.5210117506472814, + "acc_norm_stderr": 0.00498537355077511 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5146871008939975, + "acc_stderr": 0.017872248024429122, + "acc_norm": 0.5146871008939975, + "acc_norm_stderr": 0.017872248024429122 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.02834504586484069, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.02834504586484069 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.03515520728670417, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.03515520728670417 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929777, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.02524277098712617, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.02524277098712617 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419873, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419873 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.028438677998909558, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.028438677998909558 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768817, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768817 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253252, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253252 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.40397350993377484, + "acc_stderr": 0.040064856853653415, + "acc_norm": 0.40397350993377484, + "acc_norm_stderr": 0.040064856853653415 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.0343751933733825, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.0343751933733825 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.0240268463928735, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.0240268463928735 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.02686462436675666, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.02686462436675666 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.02770122846854259, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.02770122846854259 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5174311926605505, + "acc_stderr": 0.021424291871853157, + "acc_norm": 0.5174311926605505, + "acc_norm_stderr": 0.021424291871853157 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.019542101564854128, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.019542101564854128 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611324, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611324 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833587, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833587 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409163, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409163 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.028888193103988637, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.028888193103988637 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3455019556714472, + "acc_stderr": 0.012145303004087204, + "acc_norm": 0.3455019556714472, + "acc_norm_stderr": 0.012145303004087204 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457038, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457038 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2460220318237454, + "mc1_stderr": 0.015077219200662574, + "mc2": 0.4118433559608098, + "mc2_stderr": 0.01511281299740463 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42621015348288077, + "acc_stderr": 0.017002122609489256, + "acc_norm": 0.564344746162928, + "acc_norm_stderr": 0.01704741522947632 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lwef/llm-bench-upload-1", + "model_sha": "5330609dd6135562b12899c670339d642cb391a3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/lwef/llm-bench-upload-1/result_2024-07-13 06:32:05.json b/lwef/llm-bench-upload-1/result_2024-07-13 06:32:05.json new file mode 100644 index 0000000000000000000000000000000000000000..ed104aac2cba35c9231b6973a5c838b00ae971ea --- /dev/null +++ b/lwef/llm-bench-upload-1/result_2024-07-13 06:32:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36689419795221845, + "acc_stderr": 0.014084133118104298, + "acc_norm": 0.447098976109215, + "acc_norm_stderr": 0.014529380160526847 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3891655048795061, + "acc_stderr": 0.004865645485910437, + "acc_norm": 0.5216092411870146, + "acc_norm_stderr": 0.004985119183640756 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.017869330154003705, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.017869330154003705 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.02834504586484069, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.02834504586484069 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.03515520728670417, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.03515520728670417 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929777, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.034912078574865175, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.034912078574865175 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.028438677998909558, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.028438677998909558 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253252, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253252 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.039955240076816806, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.039955240076816806 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.0343751933733825, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.0343751933733825 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.02686462436675666, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.02686462436675666 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656206, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656206 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5174311926605505, + "acc_stderr": 0.021424291871853157, + "acc_norm": 0.5174311926605505, + "acc_norm_stderr": 0.021424291871853157 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215927, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215927 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.032149521478027486, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.032149521478027486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966339, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966339 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824862, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824862 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827424, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827424 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3468057366362451, + "acc_stderr": 0.012156071332318708, + "acc_norm": 0.3468057366362451, + "acc_norm_stderr": 0.012156071332318708 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457038, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457038 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715004, + "mc2": 0.41197181993299653, + "mc2_stderr": 0.015110830604964627 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42857142857142855, + "acc_stderr": 0.01701403811929748, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.01705775370216029 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "lwef/llm-bench-upload-1", + "model_sha": "5330609dd6135562b12899c670339d642cb391a3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/macadeliccc/Samantha-Qwen-2-7B/result_2024-07-16 18:16:35.json b/macadeliccc/Samantha-Qwen-2-7B/result_2024-07-16 18:16:35.json new file mode 100644 index 0000000000000000000000000000000000000000..4c1641f51a46c04e7395ace6ce029bdf690bc6fb --- /dev/null +++ b/macadeliccc/Samantha-Qwen-2-7B/result_2024-07-16 18:16:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37627986348122866, + "acc_stderr": 0.014157022555407173, + "acc_norm": 0.4283276450511945, + "acc_norm_stderr": 0.014460496367599027 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2504481179047998, + "acc_stderr": 0.004323856300539177, + "acc_norm": 0.2504481179047998, + "acc_norm_stderr": 0.004323856300539177 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036155076303109344, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036155076303109344 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7572815533980582, + "acc_stderr": 0.04245022486384493, + "acc_norm": 0.7572815533980582, + "acc_norm_stderr": 0.04245022486384493 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6424010217113666, + "acc_stderr": 0.017139488998803274, + "acc_norm": 0.6424010217113666, + "acc_norm_stderr": 0.017139488998803274 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5531914893617021, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.5531914893617021, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6237942122186495, + "acc_stderr": 0.02751392568354943, + "acc_norm": 0.6237942122186495, + "acc_norm_stderr": 0.02751392568354943 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5919282511210763, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.5919282511210763, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.648854961832061, + "acc_stderr": 0.0418644516301375, + "acc_norm": 0.648854961832061, + "acc_norm_stderr": 0.0418644516301375 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.031730712390717244, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.031730712390717244 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6068965517241379, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.6068965517241379, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6205128205128205, + "acc_stderr": 0.02460362692409741, + "acc_norm": 0.6205128205128205, + "acc_norm_stderr": 0.02460362692409741 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.5123152709359606, + "acc_stderr": 0.035169204442208966, + "acc_norm": 0.5123152709359606, + "acc_norm_stderr": 0.035169204442208966 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6161290322580645, + "acc_stderr": 0.02766618207553966, + "acc_norm": 0.6161290322580645, + "acc_norm_stderr": 0.02766618207553966 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.811965811965812, + "acc_stderr": 0.025598193686652247, + "acc_norm": 0.811965811965812, + "acc_norm_stderr": 0.025598193686652247 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5924528301886792, + "acc_stderr": 0.030242233800854498, + "acc_norm": 0.5924528301886792, + "acc_norm_stderr": 0.030242233800854498 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.030484701665084362, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.030484701665084362 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.03203841040213322, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.03203841040213322 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.03804749744364763, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.03804749744364763 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5211640211640212, + "acc_stderr": 0.025728230952130726, + "acc_norm": 0.5211640211640212, + "acc_norm_stderr": 0.025728230952130726 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6329479768786127, + "acc_stderr": 0.025950054337654075, + "acc_norm": 0.6329479768786127, + "acc_norm_stderr": 0.025950054337654075 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6073619631901841, + "acc_stderr": 0.038367409078310294, + "acc_norm": 0.6073619631901841, + "acc_norm_stderr": 0.038367409078310294 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.027339546640662734, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.027339546640662734 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4824561403508772, + "acc_stderr": 0.04700708033551038, + "acc_norm": 0.4824561403508772, + "acc_norm_stderr": 0.04700708033551038 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6403669724770642, + "acc_stderr": 0.020575234660123787, + "acc_norm": 0.6403669724770642, + "acc_norm_stderr": 0.020575234660123787 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.04463112720677171, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.04463112720677171 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6045751633986928, + "acc_stderr": 0.027996723180631455, + "acc_norm": 0.6045751633986928, + "acc_norm_stderr": 0.027996723180631455 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7603305785123967, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.7603305785123967, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6710526315789473, + "acc_stderr": 0.03823428969926603, + "acc_norm": 0.6710526315789473, + "acc_norm_stderr": 0.03823428969926603 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.020212274976302957, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.020212274976302957 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4574468085106383, + "acc_stderr": 0.02971928127223684, + "acc_norm": 0.4574468085106383, + "acc_norm_stderr": 0.02971928127223684 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372432, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372432 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.73, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.73, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6448979591836734, + "acc_stderr": 0.03063565515038763, + "acc_norm": 0.6448979591836734, + "acc_norm_stderr": 0.03063565515038763 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.029818024749753102, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.029818024749753102 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39895697522816165, + "acc_stderr": 0.012506757655293669, + "acc_norm": 0.39895697522816165, + "acc_norm_stderr": 0.012506757655293669 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.696078431372549, + "acc_stderr": 0.032282103870378914, + "acc_norm": 0.696078431372549, + "acc_norm_stderr": 0.032282103870378914 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3292533659730722, + "mc1_stderr": 0.016451264440068246, + "mc2": 0.508487632831226, + "mc2_stderr": 0.01578392890528046 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5832349468713105, + "acc_stderr": 0.01695048914610883, + "acc_norm": 0.6056670602125147, + "acc_norm_stderr": 0.016802090674893206 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "macadeliccc/Samantha-Qwen-2-7B", + "model_sha": "59058972fa9b56d132d04589eb17cbba277c2826", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/madatnlp/marcoroni-7b-v3-safetensor/result_2023-12-12 05:14:51.json b/madatnlp/marcoroni-7b-v3-safetensor/result_2023-12-12 05:14:51.json new file mode 100644 index 0000000000000000000000000000000000000000..2a53e4601c33dc4f2c9c495d94ed1d9762823816 --- /dev/null +++ b/madatnlp/marcoroni-7b-v3-safetensor/result_2023-12-12 05:14:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3643344709897611, + "acc_stderr": 0.014063260279882417, + "acc_norm": 0.4249146757679181, + "acc_norm_stderr": 0.014445698968520769 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39245170284803826, + "acc_stderr": 0.004872984492967996, + "acc_norm": 0.5104560844453296, + "acc_norm_stderr": 0.004988690229505665 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107675, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107675 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4763729246487867, + "acc_stderr": 0.017859989765176453, + "acc_norm": 0.4763729246487867, + "acc_norm_stderr": 0.017859989765176453 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.03515520728670417, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.03515520728670417 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643945, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643945 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933914, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933914 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417604, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857416, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857416 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159788, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159788 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05000000000000001, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05000000000000001 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.026882643434022895, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.026882643434022895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833946, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833946 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.03606065001832917, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.03606065001832917 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5119266055045871, + "acc_stderr": 0.021431223617362233, + "acc_norm": 0.5119266055045871, + "acc_norm_stderr": 0.021431223617362233 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437538, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437538 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475358, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475358 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983576, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983576 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.012106817203067208, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.012106817203067208 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3561811505507956, + "mc1_stderr": 0.016763790728446342, + "mc2": 0.5364735673869772, + "mc2_stderr": 0.015999759828332336 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4014167650531287, + "acc_stderr": 0.01685290785872906, + "acc_norm": 0.41440377804014167, + "acc_norm_stderr": 0.016936583383943615 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "madatnlp/marcoroni-7b-v3-safetensor", + "model_sha": "20702b50c9eee355bfae17aab64276e2c8da420f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/madatnlp/mist-enko-lora-2950/result_2023-12-17 00:06:10.json b/madatnlp/mist-enko-lora-2950/result_2023-12-17 00:06:10.json new file mode 100644 index 0000000000000000000000000000000000000000..917e5df86b93b429fe91efa2290061f5e79f94f5 --- /dev/null +++ b/madatnlp/mist-enko-lora-2950/result_2023-12-17 00:06:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34812286689419797, + "acc_stderr": 0.013921008595179352, + "acc_norm": 0.39505119453924914, + "acc_norm_stderr": 0.014285898292938175 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37651862178848833, + "acc_stderr": 0.0048352227940065195, + "acc_norm": 0.4856602270464051, + "acc_norm_stderr": 0.004987728900897595 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4904214559386973, + "acc_stderr": 0.01787668227534088, + "acc_norm": 0.4904214559386973, + "acc_norm_stderr": 0.01787668227534088 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.042561937679014075, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.042561937679014075 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085335, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085335 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.0416656757710158, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.0416656757710158 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.025339003010106522, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.025339003010106522 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431194, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817729, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817729 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159788, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159788 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.02677299065336183, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.02677299065336183 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362227, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362227 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.019821843688271758, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.019821843688271758 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596143, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596143 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553988, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553988 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.02952009569768776, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.02952009569768776 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.031601069934496004, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.031601069934496004 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5949367088607594, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.5949367088607594, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.01210681720306721, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.01210681720306721 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.47030792501762314, + "mc2_stderr": 0.01548100060962531 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46162927981109797, + "acc_stderr": 0.01713966022184556, + "acc_norm": 0.5159386068476978, + "acc_norm_stderr": 0.017181617837190195 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "madatnlp/mist-enko-lora-2950", + "model_sha": "5f993597257141d297766ddc3578576a236cdd43", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maum-ai/llamaum-13b-chat-qlora-s/result_2023-10-01 03:59:55.json b/maum-ai/llamaum-13b-chat-qlora-s/result_2023-10-01 03:59:55.json new file mode 100644 index 0000000000000000000000000000000000000000..fc3c6df5eccc35bae19c959041ded34411a46491 --- /dev/null +++ b/maum-ai/llamaum-13b-chat-qlora-s/result_2023-10-01 03:59:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3370307167235495, + "acc_stderr": 0.013813476652902279, + "acc_norm": 0.39419795221843, + "acc_norm_stderr": 0.014280522667467325 + }, + "harness|ko_hellaswag|10": { + "acc": 0.364070902210715, + "acc_stderr": 0.004801852881329742, + "acc_norm": 0.462158932483569, + "acc_norm_stderr": 0.004975470690867166 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.017852981266633955, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.017852981266633955 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413925, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413925 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849727, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849727 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.030242233800854498, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.030242233800854498 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815642, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815642 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307702, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307702 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43853211009174314, + "acc_stderr": 0.021274713073954562, + "acc_norm": 0.43853211009174314, + "acc_norm_stderr": 0.021274713073954562 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.02843109544417664, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.02843109544417664 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3545751633986928, + "acc_stderr": 0.019353360547553697, + "acc_norm": 0.3545751633986928, + "acc_norm_stderr": 0.019353360547553697 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828979, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828979 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23128491620111732, + "acc_stderr": 0.014102223623152567, + "acc_norm": 0.23128491620111732, + "acc_norm_stderr": 0.014102223623152567 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3014705882352941, + "acc_stderr": 0.027875982114273168, + "acc_norm": 0.3014705882352941, + "acc_norm_stderr": 0.027875982114273168 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585899, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585899 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03471157907953426, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03471157907953426 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766372, + "mc2": 0.4146930075606435, + "mc2_stderr": 0.015301613292343582 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4037780401416765, + "acc_stderr": 0.01686903154029863, + "acc_norm": 0.4557260920897285, + "acc_norm_stderr": 0.017122829143292644 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maum-ai/llamaum-13b-chat-qlora-s", + "model_sha": "209891592ed47343e7654b1b7fdc1a514089df3b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maum-ai/llamaum-13b-instruct-s/result_2023-10-11 14:37:45.json b/maum-ai/llamaum-13b-instruct-s/result_2023-10-11 14:37:45.json new file mode 100644 index 0000000000000000000000000000000000000000..9d744386ad10c2beaefa77c6bb7d36544c0f542d --- /dev/null +++ b/maum-ai/llamaum-13b-instruct-s/result_2023-10-11 14:37:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28668941979522183, + "acc_stderr": 0.013214986329274762, + "acc_norm": 0.35665529010238906, + "acc_norm_stderr": 0.013998056902620196 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35570603465445133, + "acc_stderr": 0.00477748315963403, + "acc_norm": 0.4393547102170882, + "acc_norm_stderr": 0.004952942072999276 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066165, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066165 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.042450224863844956, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.042450224863844956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3652618135376756, + "acc_stderr": 0.01721853002883864, + "acc_norm": 0.3652618135376756, + "acc_norm_stderr": 0.01721853002883864 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.029513196625539345, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.029513196625539345 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.23493975903614459, + "acc_stderr": 0.03300533186128922, + "acc_norm": 0.23493975903614459, + "acc_norm_stderr": 0.03300533186128922 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.026795422327893947, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.026795422327893947 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.32286995515695066, + "acc_stderr": 0.031381476375754995, + "acc_norm": 0.32286995515695066, + "acc_norm_stderr": 0.031381476375754995 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728743, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728743 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.032087795587867514, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.032087795587867514 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.03831226048850333, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.03831226048850333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416544, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416544 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380565, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380565 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.25384615384615383, + "acc_stderr": 0.022066054378726257, + "acc_norm": 0.25384615384615383, + "acc_norm_stderr": 0.022066054378726257 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03255086769970103, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03255086769970103 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.02528441611490016, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.02528441611490016 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.032485775115783995, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.032485775115783995 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.026341480371118345, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.026341480371118345 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302506, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302506 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804725, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804725 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.373134328358209, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.373134328358209, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047875, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047875 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.023393826500484875, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.023393826500484875 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.32515337423312884, + "acc_stderr": 0.03680350371286462, + "acc_norm": 0.32515337423312884, + "acc_norm_stderr": 0.03680350371286462 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.02610567386140981, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.02610567386140981 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.03324837939758159, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.03324837939758159 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26788990825688075, + "acc_stderr": 0.018987462257978652, + "acc_norm": 0.26788990825688075, + "acc_norm_stderr": 0.018987462257978652 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.034550710191021496, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.034550710191021496 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.026256053835718968, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.026256053835718968 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.018311653053648222, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.018311653053648222 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993666, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993666 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19852941176470587, + "acc_stderr": 0.024231013370541097, + "acc_norm": 0.19852941176470587, + "acc_norm_stderr": 0.024231013370541097 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.02737294220178816, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.02737294220178816 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27249022164276404, + "acc_stderr": 0.01137165829431153, + "acc_norm": 0.27249022164276404, + "acc_norm_stderr": 0.01137165829431153 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03308611113236436, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03308611113236436 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396708, + "mc2": 0.4469469691662156, + "mc2_stderr": 0.015668694918169947 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2939787485242031, + "acc_stderr": 0.015663242569091115, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.016884749503191392 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maum-ai/llamaum-13b-instruct-s", + "model_sha": "d9a9f9c019908c2d302da856473891095ad81940", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maum-ai/llamaum-13b-instruct-v1/result_2023-10-11 06:55:52.json b/maum-ai/llamaum-13b-instruct-v1/result_2023-10-11 06:55:52.json new file mode 100644 index 0000000000000000000000000000000000000000..ea8863aa26057faf61ae8e47d57a3c9e067aa047 --- /dev/null +++ b/maum-ai/llamaum-13b-instruct-v1/result_2023-10-11 06:55:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3967576791808874, + "acc_stderr": 0.014296513020180639, + "acc_norm": 0.45819112627986347, + "acc_norm_stderr": 0.014560220308714702 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41047600079665403, + "acc_stderr": 0.004909148239488287, + "acc_norm": 0.5376419040031866, + "acc_norm_stderr": 0.004975621147406092 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5006385696040868, + "acc_stderr": 0.01787994891443169, + "acc_norm": 0.5006385696040868, + "acc_norm_stderr": 0.01787994891443169 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480864, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480864 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.03124532520276193, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.03124532520276193 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562786, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562786 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.35172413793103446, + "acc_stderr": 0.0397923663749741, + "acc_norm": 0.35172413793103446, + "acc_norm_stderr": 0.0397923663749741 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102315, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102315 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.02790615082604114, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.02790615082604114 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.030052580579557845, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.030052580579557845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696545, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696545 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123935, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123935 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523867, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523867 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03942082639927214, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03942082639927214 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238106, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238106 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.027780141207023344, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.027780141207023344 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.04060127035236397, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.04060127035236397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.019506291693954854, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.019506291693954854 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025425, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025425 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681407, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681407 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.363265306122449, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.363265306122449, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937598, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937598 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330373, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330373 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777308, + "mc2": 0.4426389060165117, + "mc2_stderr": 0.015221328776941925 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33412042502951594, + "acc_stderr": 0.01621676330423968, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.01701984753597221 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maum-ai/llamaum-13b-instruct-v1", + "model_sha": "10d1ae8e0155ba956a1e4cb16dd3b35415dea098", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Jolteon-Instruct-13B-alpha/result_2024-03-22 03:13:49.json b/maywell/Jolteon-Instruct-13B-alpha/result_2024-03-22 03:13:49.json new file mode 100644 index 0000000000000000000000000000000000000000..585962f2aa345970a44ed4202b61765a0dc02483 --- /dev/null +++ b/maywell/Jolteon-Instruct-13B-alpha/result_2024-03-22 03:13:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4189419795221843, + "acc_stderr": 0.014418106953639013, + "acc_norm": 0.45307167235494883, + "acc_norm_stderr": 0.014546892052005628 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3859788886675961, + "acc_stderr": 0.004858306877874618, + "acc_norm": 0.4933280223063135, + "acc_norm_stderr": 0.004989337148572078 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356388, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356388 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.561941251596424, + "acc_stderr": 0.017742232238257254, + "acc_norm": 0.561941251596424, + "acc_norm_stderr": 0.017742232238257254 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.0281739177617629, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.0281739177617629 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.034648816750163375, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.034648816750163375 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5672268907563025, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.5672268907563025, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.025334667080954897, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.025334667080954897 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356462, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356462 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5451612903225806, + "acc_stderr": 0.028327743091561074, + "acc_norm": 0.5451612903225806, + "acc_norm_stderr": 0.028327743091561074 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.0282863240755644, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.0282863240755644 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006114, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006114 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.02479606060269994, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.02479606060269994 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.026788811931562757, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.026788811931562757 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138936, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138936 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.02780749004427619, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.02780749004427619 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6528497409326425, + "acc_stderr": 0.03435696168361355, + "acc_norm": 0.6528497409326425, + "acc_norm_stderr": 0.03435696168361355 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5798165137614679, + "acc_stderr": 0.021162420048273504, + "acc_norm": 0.5798165137614679, + "acc_norm_stderr": 0.021162420048273504 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.019997973035458333, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.019997973035458333 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966737, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966737 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.49107142857142855, + "acc_stderr": 0.04745033255489123, + "acc_norm": 0.49107142857142855, + "acc_norm_stderr": 0.04745033255489123 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19217877094972066, + "acc_stderr": 0.013177759505210091, + "acc_norm": 0.19217877094972066, + "acc_norm_stderr": 0.013177759505210091 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596455, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596455 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5877551020408164, + "acc_stderr": 0.031512360446742695, + "acc_norm": 0.5877551020408164, + "acc_norm_stderr": 0.031512360446742695 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.03058732629470236, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.03058732629470236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38070404172099087, + "acc_stderr": 0.012401430654645879, + "acc_norm": 0.38070404172099087, + "acc_norm_stderr": 0.012401430654645879 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.034924061041636124, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.034924061041636124 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.03843566993588718, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.03843566993588718 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3317013463892289, + "mc1_stderr": 0.016482148810241456, + "mc2": 0.49151591487192825, + "mc2_stderr": 0.016134487387888623 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3364817001180638, + "acc_stderr": 0.016245085294386556, + "acc_norm": 0.3754427390791027, + "acc_norm_stderr": 0.016648411589511098 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Jolteon-Instruct-13B-alpha", + "model_sha": "047bc03d927bd461660fe76e117a60f995db1ae4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Llama-3-Ko-8B-Instruct/result_2024-04-28 04:49:05.json b/maywell/Llama-3-Ko-8B-Instruct/result_2024-04-28 04:49:05.json new file mode 100644 index 0000000000000000000000000000000000000000..ffe99347e33d4715ad1b86bbaca71c265b4e8277 --- /dev/null +++ b/maywell/Llama-3-Ko-8B-Instruct/result_2024-04-28 04:49:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3225255972696246, + "acc_stderr": 0.013659980894277371, + "acc_norm": 0.34982935153583616, + "acc_norm_stderr": 0.01393680921215828 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3381796454889464, + "acc_stderr": 0.004721231637092728, + "acc_norm": 0.4182433778131846, + "acc_norm_stderr": 0.004922624636945241 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.037439798259264016, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.037439798259264016 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.04498676320572924, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.04498676320572924 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3652618135376756, + "acc_stderr": 0.017218530028838636, + "acc_norm": 0.3652618135376756, + "acc_norm_stderr": 0.017218530028838636 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.02964400657700962, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.02964400657700962 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.03610805018031023, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.03610805018031023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3633440514469453, + "acc_stderr": 0.027316847674192717, + "acc_norm": 0.3633440514469453, + "acc_norm_stderr": 0.027316847674192717 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459156, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459156 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.031544498882702866, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.031544498882702866 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.03996629574876718, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.03996629574876718 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3, + "acc_stderr": 0.023234581088428498, + "acc_norm": 0.3, + "acc_norm_stderr": 0.023234581088428498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.031447125816782405, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.031447125816782405 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.32903225806451614, + "acc_stderr": 0.026729499068349965, + "acc_norm": 0.32903225806451614, + "acc_norm_stderr": 0.026729499068349965 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.49145299145299143, + "acc_stderr": 0.032751303000970296, + "acc_norm": 0.49145299145299143, + "acc_norm_stderr": 0.032751303000970296 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3169811320754717, + "acc_stderr": 0.02863723563980092, + "acc_norm": 0.3169811320754717, + "acc_norm_stderr": 0.02863723563980092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910507, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910507 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766114, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.36318407960199006, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.36318407960199006, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776564, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776564 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.315028901734104, + "acc_stderr": 0.02500931379006971, + "acc_norm": 0.315028901734104, + "acc_norm_stderr": 0.02500931379006971 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724147, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724147 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.345679012345679, + "acc_stderr": 0.026462487777001893, + "acc_norm": 0.345679012345679, + "acc_norm_stderr": 0.026462487777001893 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32124352331606215, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.32124352331606215, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27889908256880735, + "acc_stderr": 0.01922746887646352, + "acc_norm": 0.27889908256880735, + "acc_norm_stderr": 0.01922746887646352 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011744, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.04507732278775094, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.04507732278775094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2236842105263158, + "acc_stderr": 0.03391160934343602, + "acc_norm": 0.2236842105263158, + "acc_norm_stderr": 0.03391160934343602 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.018492596536396955, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.018492596536396955 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022128, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.17592592592592593, + "acc_stderr": 0.025967420958258533, + "acc_norm": 0.17592592592592593, + "acc_norm_stderr": 0.025967420958258533 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23016759776536314, + "acc_stderr": 0.014078339253425809, + "acc_norm": 0.23016759776536314, + "acc_norm_stderr": 0.014078339253425809 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.024562204314142314, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.024562204314142314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.025991117672813292, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.025991117672813292 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4177215189873418, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.4177215189873418, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27183833116036504, + "acc_stderr": 0.011363135278651418, + "acc_norm": 0.27183833116036504, + "acc_norm_stderr": 0.011363135278651418 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923403, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923403 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.4632805233224405, + "mc2_stderr": 0.01565468325038931 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.282172373081464, + "acc_stderr": 0.01547327158398843, + "acc_norm": 0.3482880755608028, + "acc_norm_stderr": 0.016379926739148037 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Llama-3-Ko-8B-Instruct", + "model_sha": "1d4c12d072bb781b9cc7c20db77b3b948955f3b6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Mini_Synatra_SFT/result_2024-01-09 00:26:08.json b/maywell/Mini_Synatra_SFT/result_2024-01-09 00:26:08.json new file mode 100644 index 0000000000000000000000000000000000000000..8047baebb04e94ba428c6b71c3ba10efffbea507 --- /dev/null +++ b/maywell/Mini_Synatra_SFT/result_2024-01-09 00:26:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.447098976109215, + "acc_stderr": 0.014529380160526848, + "acc_norm": 0.49829351535836175, + "acc_norm_stderr": 0.014611305705056987 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41963752240589525, + "acc_stderr": 0.004924910433106359, + "acc_norm": 0.5562636924915355, + "acc_norm_stderr": 0.004958089432669991 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.561941251596424, + "acc_stderr": 0.01774223223825725, + "acc_norm": 0.561941251596424, + "acc_norm_stderr": 0.01774223223825725 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789959, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789959 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5025641025641026, + "acc_stderr": 0.025350672979412184, + "acc_norm": 0.5025641025641026, + "acc_norm_stderr": 0.025350672979412184 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264716, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962956, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.025355741263055273, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.025355741263055273 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206188, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206188 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502707, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502707 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5651376146788991, + "acc_stderr": 0.02125463146560928, + "acc_norm": 0.5651376146788991, + "acc_norm_stderr": 0.02125463146560928 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.028568699752225868, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.028568699752225868 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.040633027314866725, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.040633027314866725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.019835176484375387, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.019835176484375387 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.0338517797604481, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.0338517797604481 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20893854748603352, + "acc_stderr": 0.013597079518495252, + "acc_norm": 0.20893854748603352, + "acc_norm_stderr": 0.013597079518495252 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.02976826352893311, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.02976826352893311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6163265306122448, + "acc_stderr": 0.031130880396235946, + "acc_norm": 0.6163265306122448, + "acc_norm_stderr": 0.031130880396235946 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702365, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702365 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214936, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214936 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.34149326805385555, + "mc1_stderr": 0.016600688619950822, + "mc2": 0.5081663446840529, + "mc2_stderr": 0.015858266635572223 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46162927981109797, + "acc_stderr": 0.017139660221845557, + "acc_norm": 0.4769775678866588, + "acc_norm_stderr": 0.017172121546727634 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Mini_Synatra_SFT", + "model_sha": "fc042f671dc0c94b21a6107eda75a6f9c8d44f2d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Mistral-ko-7B-v0.1/result_2023-11-26 09:26:16.json b/maywell/Mistral-ko-7B-v0.1/result_2023-11-26 09:26:16.json new file mode 100644 index 0000000000000000000000000000000000000000..2f57ed5d77b252b4e2c9fbfba738d40dead03f88 --- /dev/null +++ b/maywell/Mistral-ko-7B-v0.1/result_2023-11-26 09:26:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27047781569965873, + "acc_stderr": 0.012980954547659556, + "acc_norm": 0.31143344709897613, + "acc_norm_stderr": 0.013532472099850947 + }, + "harness|ko_hellaswag|10": { + "acc": 0.28759211312487554, + "acc_stderr": 0.004517148434180507, + "acc_norm": 0.31428002389962156, + "acc_norm_stderr": 0.00463279737528977 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041694, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041694 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.33077905491698595, + "acc_stderr": 0.016824818462563746, + "acc_norm": 0.33077905491698595, + "acc_norm_stderr": 0.016824818462563746 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.036643147772880864, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.036643147772880864 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3247588424437299, + "acc_stderr": 0.026596782287697043, + "acc_norm": 0.3247588424437299, + "acc_norm_stderr": 0.026596782287697043 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.273542600896861, + "acc_stderr": 0.02991858670779883, + "acc_norm": 0.273542600896861, + "acc_norm_stderr": 0.02991858670779883 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.042607351576445594, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.042607351576445594 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2878787878787879, + "acc_stderr": 0.03225883512300992, + "acc_norm": 0.2878787878787879, + "acc_norm_stderr": 0.03225883512300992 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.040824829046386284, + "acc_norm": 0.4, + "acc_norm_stderr": 0.040824829046386284 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.03086868260412163, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.03086868260412163 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.30256410256410254, + "acc_stderr": 0.023290888053772725, + "acc_norm": 0.30256410256410254, + "acc_norm_stderr": 0.023290888053772725 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.03282649385304151, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.03282649385304151 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.35161290322580646, + "acc_stderr": 0.02716253782694846, + "acc_norm": 0.35161290322580646, + "acc_norm_stderr": 0.02716253782694846 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4658119658119658, + "acc_stderr": 0.03267942734081227, + "acc_norm": 0.4658119658119658, + "acc_norm_stderr": 0.03267942734081227 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30566037735849055, + "acc_stderr": 0.028353298073322666, + "acc_norm": 0.30566037735849055, + "acc_norm_stderr": 0.028353298073322666 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425464, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425464 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3283582089552239, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.3283582089552239, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.035149425512674394, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.035149425512674394 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.0240268463928735, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.0240268463928735 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3439306358381503, + "acc_stderr": 0.025574123786546648, + "acc_norm": 0.3439306358381503, + "acc_norm_stderr": 0.025574123786546648 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.026406145973625658, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.026406145973625658 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29533678756476683, + "acc_stderr": 0.032922966391551414, + "acc_norm": 0.29533678756476683, + "acc_norm_stderr": 0.032922966391551414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28256880733944956, + "acc_stderr": 0.019304243497707152, + "acc_norm": 0.28256880733944956, + "acc_norm_stderr": 0.019304243497707152 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.02768418188330288, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.02768418188330288 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4793388429752066, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.4793388429752066, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.038234289699266046, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.038234289699266046 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.01855063450295296, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.01855063450295296 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534792, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534792 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915185, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915185 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.03154696285656629, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.03154696285656629 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.33755274261603374, + "acc_stderr": 0.030781549102026223, + "acc_norm": 0.33755274261603374, + "acc_norm_stderr": 0.030781549102026223 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2926988265971317, + "acc_stderr": 0.011620949195849536, + "acc_norm": 0.2926988265971317, + "acc_norm_stderr": 0.011620949195849536 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.31862745098039214, + "acc_stderr": 0.03270287181482079, + "acc_norm": 0.31862745098039214, + "acc_norm_stderr": 0.03270287181482079 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520702, + "mc2": 0.4233292952140553, + "mc2_stderr": 0.015664004103265215 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.19008264462809918, + "acc_stderr": 0.013489827742736773, + "acc_norm": 0.30814639905548996, + "acc_norm_stderr": 0.01587451515629839 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Mistral-ko-7B-v0.1", + "model_sha": "01bdf68f5185b57eac642128c0940bf926c4d473", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/PiVoT-0.1-early/result_2023-11-24 07:54:04.json b/maywell/PiVoT-0.1-early/result_2023-11-24 07:54:04.json new file mode 100644 index 0000000000000000000000000000000000000000..b6099da4b2811a1dad5f47349eadcd8bb1c66c3a --- /dev/null +++ b/maywell/PiVoT-0.1-early/result_2023-11-24 07:54:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4206484641638225, + "acc_stderr": 0.01442621125250841, + "acc_norm": 0.47525597269624575, + "acc_norm_stderr": 0.014593487694937738 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41286596295558653, + "acc_stderr": 0.004913429010559069, + "acc_norm": 0.538338976299542, + "acc_norm_stderr": 0.004975091055697189 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5747126436781609, + "acc_stderr": 0.01767922548943146, + "acc_norm": 0.5747126436781609, + "acc_norm_stderr": 0.01767922548943146 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.031709956060406545, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.031709956060406545 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.038194861407583984, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.038194861407583984 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234355, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234355 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03481285338232962, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03481285338232962 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.02530295889085015, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.02530295889085015 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4967741935483871, + "acc_stderr": 0.02844341422643833, + "acc_norm": 0.4967741935483871, + "acc_norm_stderr": 0.02844341422643833 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541198, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541198 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851316, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851316 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066492, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41005291005291006, + "acc_stderr": 0.025331202438944433, + "acc_norm": 0.41005291005291006, + "acc_norm_stderr": 0.025331202438944433 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.03561587327685884, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.03561587327685884 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5486238532110091, + "acc_stderr": 0.021335714711268786, + "acc_norm": 0.5486238532110091, + "acc_norm_stderr": 0.021335714711268786 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617157, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.040633027314866725, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.040633027314866725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.01979448890002412, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.01979448890002412 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590954, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590954 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219588, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219588 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176853, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176853 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20446927374301677, + "acc_stderr": 0.013488813404711914, + "acc_norm": 0.20446927374301677, + "acc_norm_stderr": 0.013488813404711914 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.03186785930004128, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.03186785930004128 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3396349413298566, + "acc_stderr": 0.012095592506931973, + "acc_norm": 0.3396349413298566, + "acc_norm_stderr": 0.012095592506931973 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33659730722154224, + "mc1_stderr": 0.01654241280949487, + "mc2": 0.5139701018144873, + "mc2_stderr": 0.01637268513678025 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40968122786304606, + "acc_stderr": 0.016907568192219478, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.01701984753597221 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/PiVoT-0.1-early", + "model_sha": "6eeae58a1a292a1d7f989952a07aead6d5da3c69", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/PiVoT-10.7B-Mistral-v0.2/result_2023-12-16 03:28:52.json b/maywell/PiVoT-10.7B-Mistral-v0.2/result_2023-12-16 03:28:52.json new file mode 100644 index 0000000000000000000000000000000000000000..79e89916793488609af0b7b2600fc178934d5c4f --- /dev/null +++ b/maywell/PiVoT-10.7B-Mistral-v0.2/result_2023-12-16 03:28:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3651877133105802, + "acc_stderr": 0.0140702655192688, + "acc_norm": 0.4232081911262799, + "acc_norm_stderr": 0.014438036220848022 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3717386974706234, + "acc_stderr": 0.004822814501358897, + "acc_norm": 0.474407488548098, + "acc_norm_stderr": 0.0049832407441013785 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.037792759455032014, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.037792759455032014 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4776500638569604, + "acc_stderr": 0.01786209177850787, + "acc_norm": 0.4776500638569604, + "acc_norm_stderr": 0.01786209177850787 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357766, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357766 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.036807836907275814, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.036807836907275814 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30808080808080807, + "acc_stderr": 0.03289477330098615, + "acc_norm": 0.30808080808080807, + "acc_norm_stderr": 0.03289477330098615 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182087, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182087 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938145, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938145 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.02790615082604114, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.02790615082604114 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.032366121762202014, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.032366121762202014 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.035333892347392454, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.035333892347392454 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.035995863012470784, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.035995863012470784 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699954, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699954 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.02656417811142261, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.02656417811142261 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33641975308641975, + "acc_stderr": 0.026289734945952926, + "acc_norm": 0.33641975308641975, + "acc_norm_stderr": 0.026289734945952926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022058, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022058 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47522935779816516, + "acc_stderr": 0.021410999753635914, + "acc_norm": 0.47522935779816516, + "acc_norm_stderr": 0.021410999753635914 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403165, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403165 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.01939305840235544, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.01939305840235544 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639896, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639896 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19776536312849163, + "acc_stderr": 0.013321620594050947, + "acc_norm": 0.19776536312849163, + "acc_norm_stderr": 0.013321620594050947 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.02850145286039656, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.02850145286039656 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.42616033755274263, + "acc_stderr": 0.032190357031317736, + "acc_norm": 0.42616033755274263, + "acc_norm_stderr": 0.032190357031317736 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32920469361147325, + "acc_stderr": 0.012002091666902312, + "acc_norm": 0.32920469361147325, + "acc_norm_stderr": 0.012002091666902312 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524753, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524753 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155062, + "mc2": 0.48328951508321544, + "mc2_stderr": 0.015862522599324993 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4025974025974026, + "acc_stderr": 0.016861020486407786, + "acc_norm": 0.4167650531286895, + "acc_norm_stderr": 0.016950489146108833 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/PiVoT-10.7B-Mistral-v0.2", + "model_sha": "a496457d0743b6030ffbb96dad2dc6a62d143943", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Qwen2-7B-Multilingual-RP/result_2024-06-24 23:33:09.json b/maywell/Qwen2-7B-Multilingual-RP/result_2024-06-24 23:33:09.json new file mode 100644 index 0000000000000000000000000000000000000000..8cf77b5355951433416a724da956af4f181d1ac8 --- /dev/null +++ b/maywell/Qwen2-7B-Multilingual-RP/result_2024-06-24 23:33:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.014049106564955007, + "acc_norm": 0.4121160409556314, + "acc_norm_stderr": 0.014383915302225408 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2504481179047998, + "acc_stderr": 0.004323856300539177, + "acc_norm": 0.2504481179047998, + "acc_norm_stderr": 0.004323856300539177 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.03722965741385539, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.03722965741385539 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7864077669902912, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.7864077669902912, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.611749680715198, + "acc_stderr": 0.017427673295544323, + "acc_norm": 0.611749680715198, + "acc_norm_stderr": 0.017427673295544323 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5148936170212766, + "acc_stderr": 0.03267151848924776, + "acc_norm": 0.5148936170212766, + "acc_norm_stderr": 0.03267151848924776 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5594855305466238, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.5594855305466238, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5919282511210763, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.5919282511210763, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262973, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262973 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6206896551724138, + "acc_stderr": 0.040434618619167466, + "acc_norm": 0.6206896551724138, + "acc_norm_stderr": 0.040434618619167466 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.030388353551886786, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.030388353551886786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5974358974358974, + "acc_stderr": 0.024864995159767745, + "acc_norm": 0.5974358974358974, + "acc_norm_stderr": 0.024864995159767745 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720685, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720685 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6064516129032258, + "acc_stderr": 0.02779187875313226, + "acc_norm": 0.6064516129032258, + "acc_norm_stderr": 0.02779187875313226 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.02685345037700915, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.02685345037700915 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6150943396226415, + "acc_stderr": 0.02994649856769995, + "acc_norm": 0.6150943396226415, + "acc_norm_stderr": 0.02994649856769995 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731573, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731573 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.48518518518518516, + "acc_stderr": 0.030472153249328584, + "acc_norm": 0.48518518518518516, + "acc_norm_stderr": 0.030472153249328584 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.039955240076816806, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.039955240076816806 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6965174129353234, + "acc_stderr": 0.03251006816458618, + "acc_norm": 0.6965174129353234, + "acc_norm_stderr": 0.03251006816458618 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.03804749744364763, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.03804749744364763 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5661375661375662, + "acc_stderr": 0.025525034382474894, + "acc_norm": 0.5661375661375662, + "acc_norm_stderr": 0.025525034382474894 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.026636539741116076, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.026636539741116076 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.027513747284379424, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.027513747284379424 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6495412844036698, + "acc_stderr": 0.020456077599824454, + "acc_norm": 0.6495412844036698, + "acc_norm_stderr": 0.020456077599824454 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.49206349206349204, + "acc_stderr": 0.044715725362943486, + "acc_norm": 0.49206349206349204, + "acc_norm_stderr": 0.044715725362943486 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6013071895424836, + "acc_stderr": 0.028036092273891772, + "acc_norm": 0.6013071895424836, + "acc_norm_stderr": 0.028036092273891772 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.040261875275912046, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.040261875275912046 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5, + "acc_stderr": 0.020227834851568375, + "acc_norm": 0.5, + "acc_norm_stderr": 0.020227834851568375 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.42907801418439717, + "acc_stderr": 0.02952591430255856, + "acc_norm": 0.42907801418439717, + "acc_norm_stderr": 0.02952591430255856 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.329608938547486, + "acc_stderr": 0.01572153107518388, + "acc_norm": 0.329608938547486, + "acc_norm_stderr": 0.01572153107518388 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4742647058823529, + "acc_stderr": 0.03033257809455504, + "acc_norm": 0.4742647058823529, + "acc_norm_stderr": 0.03033257809455504 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6, + "acc_stderr": 0.031362502409358936, + "acc_norm": 0.6, + "acc_norm_stderr": 0.031362502409358936 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598035, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598035 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37614080834419816, + "acc_stderr": 0.012372214430599814, + "acc_norm": 0.37614080834419816, + "acc_norm_stderr": 0.012372214430599814 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.032962451101722294, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.032962451101722294 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31946144430844553, + "mc1_stderr": 0.016322644182960498, + "mc2": 0.4923365120333761, + "mc2_stderr": 0.015813286652984274 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5466351829988194, + "acc_stderr": 0.01711541822522686, + "acc_norm": 0.58913813459268, + "acc_norm_stderr": 0.016914972767841055 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Qwen2-7B-Multilingual-RP", + "model_sha": "487e8f0498419e4d1188f661dbb63bd629be4638", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-10.7B-v0.4/result_2023-12-27 12:59:51.json b/maywell/Synatra-10.7B-v0.4/result_2023-12-27 12:59:51.json new file mode 100644 index 0000000000000000000000000000000000000000..f940c28c4c558331bf47962678366e92b9132caa --- /dev/null +++ b/maywell/Synatra-10.7B-v0.4/result_2023-12-27 12:59:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46501706484641636, + "acc_stderr": 0.014575583922019667, + "acc_norm": 0.507679180887372, + "acc_norm_stderr": 0.014609667440892574 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44592710615415254, + "acc_stderr": 0.004960516570284905, + "acc_norm": 0.6014738099980084, + "acc_norm_stderr": 0.004885942040894561 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6513409961685823, + "acc_stderr": 0.017041243143490977, + "acc_norm": 0.6513409961685823, + "acc_norm_stderr": 0.017041243143490977 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5819935691318328, + "acc_stderr": 0.028013651891995072, + "acc_norm": 0.5819935691318328, + "acc_norm_stderr": 0.028013651891995072 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.043285772152629735, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.043285772152629735 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6616161616161617, + "acc_stderr": 0.033711241426263035, + "acc_norm": 0.6616161616161617, + "acc_norm_stderr": 0.033711241426263035 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.02528558599001783, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.02528558599001783 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5774193548387097, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.5774193548387097, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.02813325257881564, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.02813325257881564 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.037786210790920545, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.037786210790920545 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752045, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752045 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.041406856391115014, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.041406856391115014 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756643, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756643 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5524691358024691, + "acc_stderr": 0.027667138569422697, + "acc_norm": 0.5524691358024691, + "acc_norm_stderr": 0.027667138569422697 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.032922966391551414, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.032922966391551414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070435, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070435 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6128440366972477, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.6128440366972477, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.028590752958852387, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.028590752958852387 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5592105263157895, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.5592105263157895, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.020206653187884786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.020206653187884786 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997865, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997865 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.16871508379888267, + "acc_stderr": 0.012525156087191954, + "acc_norm": 0.16871508379888267, + "acc_norm_stderr": 0.012525156087191954 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.03160106993449601, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.03160106993449601 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7172995780590717, + "acc_stderr": 0.029312814153955934, + "acc_norm": 0.7172995780590717, + "acc_norm_stderr": 0.029312814153955934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3683181225554107, + "acc_stderr": 0.012319403369564642, + "acc_norm": 0.3683181225554107, + "acc_norm_stderr": 0.012319403369564642 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.0341078533890472, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.0341078533890472 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.01582614243950234, + "mc2": 0.4537433695691716, + "mc2_stderr": 0.01529287884999072 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48760330578512395, + "acc_stderr": 0.017185069732676524, + "acc_norm": 0.5407319952774499, + "acc_norm_stderr": 0.01713321827653767 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-10.7B-v0.4", + "model_sha": "a311ddd48d56f9451c96f88e8a79fad6faba476f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-11B-Tb2M_SM/result_2023-10-16 01:20:57.json b/maywell/Synatra-11B-Tb2M_SM/result_2023-10-16 01:20:57.json new file mode 100644 index 0000000000000000000000000000000000000000..f1ec3e80bdee5ac807137a77888bda3a5b656998 --- /dev/null +++ b/maywell/Synatra-11B-Tb2M_SM/result_2023-10-16 01:20:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1766211604095563, + "acc_stderr": 0.011144042769316503, + "acc_norm": 0.24146757679180889, + "acc_norm_stderr": 0.012506564839739432 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2528380800637323, + "acc_stderr": 0.004337506344899919, + "acc_norm": 0.24965146385182235, + "acc_norm_stderr": 0.004319267432460665 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28735632183908044, + "acc_stderr": 0.0161824107306827, + "acc_norm": 0.28735632183908044, + "acc_norm_stderr": 0.0161824107306827 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426115, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426115 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924811, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924811 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.021020672680827912, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.021020672680827912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.02860595370200424, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.02860595370200424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.029252823291803627, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.029252823291803627 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.031546980450822305, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.031546980450822305 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.017667841612378984, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.017667841612378984 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.0358870281282637, + "acc_norm": 0.15, + "acc_norm_stderr": 0.0358870281282637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.015000674373570345, + "mc2": 0.4752303618111022, + "mc2_stderr": 0.01719345285029173 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09327036599763873, + "acc_stderr": 0.009998286190276725, + "acc_norm": 0.3742621015348288, + "acc_norm_stderr": 0.01663791778979874 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-11B-Tb2M_SM", + "model_sha": "7f2867881e6ebd2f1383a3d0be8b5573dd4897ad", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-11B-Testbench-2/result_2023-10-16 00:23:46.json b/maywell/Synatra-11B-Testbench-2/result_2023-10-16 00:23:46.json new file mode 100644 index 0000000000000000000000000000000000000000..cd083d19189a415db51a90215fdf9142963dcb0b --- /dev/null +++ b/maywell/Synatra-11B-Testbench-2/result_2023-10-16 00:23:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145685, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.01442218122630302 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37731527584146585, + "acc_stderr": 0.00483724201519111, + "acc_norm": 0.48775144393547104, + "acc_norm_stderr": 0.0049882839816310495 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49680715197956576, + "acc_stderr": 0.01787959894593307, + "acc_norm": 0.49680715197956576, + "acc_norm_stderr": 0.01787959894593307 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.02832032583010591, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.02832032583010591 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330314, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330314 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179327, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179327 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.02827241018621491, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.02827241018621491 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518028, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518028 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307688, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307688 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303118, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303118 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.038367409078310294, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.038367409078310294 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.404320987654321, + "acc_stderr": 0.027306625297327684, + "acc_norm": 0.404320987654321, + "acc_norm_stderr": 0.027306625297327684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48990825688073397, + "acc_stderr": 0.021432956203453316, + "acc_norm": 0.48990825688073397, + "acc_norm_stderr": 0.021432956203453316 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.04343525428949097, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.04343525428949097 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355442, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.046840993210771065, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.046840993210771065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27932960893854747, + "acc_stderr": 0.015005762446786154, + "acc_norm": 0.27932960893854747, + "acc_norm_stderr": 0.015005762446786154 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877746, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2985658409387223, + "acc_stderr": 0.011688060141794224, + "acc_norm": 0.2985658409387223, + "acc_norm_stderr": 0.011688060141794224 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.037937131711656344, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.037937131711656344 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842881, + "mc2": 0.4342691202696536, + "mc2_stderr": 0.015037727340783071 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3860684769775679, + "acc_stderr": 0.016738130760321743, + "acc_norm": 0.4510035419126328, + "acc_norm_stderr": 0.017107618859549357 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-11B-Testbench-2", + "model_sha": "50c90dfe257d5c5ad4c3c6a1fb29f6a5066c085a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-11B-Testbench/result_2023-10-15 12:35:17.json b/maywell/Synatra-11B-Testbench/result_2023-10-15 12:35:17.json new file mode 100644 index 0000000000000000000000000000000000000000..018fafb18aca26e5fe66cebaf5f1462ce779a38e --- /dev/null +++ b/maywell/Synatra-11B-Testbench/result_2023-10-15 12:35:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3378839590443686, + "acc_stderr": 0.013822047922283509, + "acc_norm": 0.3856655290102389, + "acc_norm_stderr": 0.014224250973257177 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37024497112129057, + "acc_stderr": 0.004818833521340358, + "acc_norm": 0.4742083250348536, + "acc_norm_stderr": 0.00498313847960438 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5019157088122606, + "acc_stderr": 0.017879832259026677, + "acc_norm": 0.5019157088122606, + "acc_norm_stderr": 0.017879832259026677 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.03088161852067694, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.03088161852067694 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.02821768355665232, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.02821768355665232 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842822, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842822 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.02510682066053975, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.02510682066053975 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.03070948699255655, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.03070948699255655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.027339546640662727, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.027339546640662727 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995093, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995093 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779207, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779207 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.01924978569171721, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.01924978569171721 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650154, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650154 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.01493131670322051, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.01493131670322051 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687758, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687758 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29986962190352023, + "acc_stderr": 0.011702660860193986, + "acc_norm": 0.29986962190352023, + "acc_norm_stderr": 0.011702660860193986 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.034542365853806094, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.034542365853806094 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3878787878787879, + "acc_stderr": 0.038049136539710114, + "acc_norm": 0.3878787878787879, + "acc_norm_stderr": 0.038049136539710114 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326902, + "mc2": 0.4475458217061865, + "mc2_stderr": 0.015253457911461817 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38961038961038963, + "acc_stderr": 0.0167661616718935, + "acc_norm": 0.4628099173553719, + "acc_norm_stderr": 0.017142736117643297 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-11B-Testbench", + "model_sha": "9399ea6c2a1d955e31d6b4d68b2b86115aea0e59", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-42dot-1.3B/result_2023-12-01 02:14:27.json b/maywell/Synatra-42dot-1.3B/result_2023-12-01 02:14:27.json new file mode 100644 index 0000000000000000000000000000000000000000..37144160eff5f55822e98d6be366492f712cf6a3 --- /dev/null +++ b/maywell/Synatra-42dot-1.3B/result_2023-12-01 02:14:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26706484641638223, + "acc_stderr": 0.012928933196496342, + "acc_norm": 0.34897610921501704, + "acc_norm_stderr": 0.0139289334613825 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3573989245170285, + "acc_stderr": 0.004782542754102088, + "acc_norm": 0.45439155546703847, + "acc_norm_stderr": 0.004968979259738335 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21455938697318008, + "acc_stderr": 0.014680033956893346, + "acc_norm": 0.21455938697318008, + "acc_norm_stderr": 0.014680033956893346 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617721, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617721 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.02964400657700962, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.02964400657700962 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2508038585209003, + "acc_stderr": 0.024619771956697165, + "acc_norm": 0.2508038585209003, + "acc_norm_stderr": 0.024619771956697165 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2645739910313901, + "acc_stderr": 0.02960510321703835, + "acc_norm": 0.2645739910313901, + "acc_norm_stderr": 0.02960510321703835 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306085, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306085 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.02835962087053395, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.02835962087053395 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.022556551010132368, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.022556551010132368 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733552, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733552 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885193, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.029872577708891155, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.029872577708891155 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2037735849056604, + "acc_stderr": 0.02479078450177541, + "acc_norm": 0.2037735849056604, + "acc_norm_stderr": 0.02479078450177541 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721375, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721375 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275805, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275805 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.030965903123573033, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.030965903123573033 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.19653179190751446, + "acc_stderr": 0.030299574664788137, + "acc_norm": 0.19653179190751446, + "acc_norm_stderr": 0.030299574664788137 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.021679219663693152, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.021679219663693152 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071134, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071134 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02492200116888633, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02492200116888633 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178267, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178267 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.2036697247706422, + "acc_stderr": 0.017266742087630797, + "acc_norm": 0.2036697247706422, + "acc_norm_stderr": 0.017266742087630797 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21241830065359477, + "acc_stderr": 0.02342037547829613, + "acc_norm": 0.21241830065359477, + "acc_norm_stderr": 0.02342037547829613 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2892561983471074, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.03197565821032501, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.03197565821032501 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2434640522875817, + "acc_stderr": 0.017362473762146623, + "acc_norm": 0.2434640522875817, + "acc_norm_stderr": 0.017362473762146623 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.024987106365642962, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.024987106365642962 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697626, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697626 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.026537045312145287, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.026537045312145287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29535864978902954, + "acc_stderr": 0.029696338713422882, + "acc_norm": 0.29535864978902954, + "acc_norm_stderr": 0.029696338713422882 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2438070404172099, + "acc_stderr": 0.010966507972178475, + "acc_norm": 0.2438070404172099, + "acc_norm_stderr": 0.010966507972178475 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.4294408765617315, + "mc2_stderr": 0.015039627065597595 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26564344746162927, + "acc_stderr": 0.015185107107791255, + "acc_norm": 0.4002361275088548, + "acc_norm_stderr": 0.016844693510505056 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-42dot-1.3B", + "model_sha": "8342dd3132ec87f12a229f83828f55bfcc0e5814", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-7B-Instruct-v0.2/result_2023-10-12 03:42:18.json b/maywell/Synatra-7B-Instruct-v0.2/result_2023-10-12 03:42:18.json new file mode 100644 index 0000000000000000000000000000000000000000..467327f67fbcac98194815a0ac75d8ade72fbdc8 --- /dev/null +++ b/maywell/Synatra-7B-Instruct-v0.2/result_2023-10-12 03:42:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.014049106564955003, + "acc_norm": 0.4180887372013652, + "acc_norm_stderr": 0.014413988396996084 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38149770961959767, + "acc_stderr": 0.00484761521647345, + "acc_norm": 0.49352718581955785, + "acc_norm_stderr": 0.004989363276955168 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5070242656449553, + "acc_stderr": 0.017878199003432214, + "acc_norm": 0.5070242656449553, + "acc_norm_stderr": 0.017878199003432214 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.03318833286217281, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.03318833286217281 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643945, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643945 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.02524277098712617, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.02524277098712617 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.02827241018621491, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.02827241018621491 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.03077090076385131, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.03077090076385131 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887468, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887468 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028414, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028414 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.036080032255696545, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.036080032255696545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.042663394431593955, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.042663394431593955 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5100917431192661, + "acc_stderr": 0.021432956203453316, + "acc_norm": 0.5100917431192661, + "acc_norm_stderr": 0.021432956203453316 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094593, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094593 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3139664804469274, + "acc_stderr": 0.015521923933523635, + "acc_norm": 0.3139664804469274, + "acc_norm_stderr": 0.015521923933523635 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.03000856284500348, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.03000856284500348 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.01175993961808546, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.01175993961808546 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.4577444189927008, + "mc2_stderr": 0.015214396697030213 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3754427390791027, + "acc_stderr": 0.016648411589511095, + "acc_norm": 0.43919716646989376, + "acc_norm_stderr": 0.0170627757447807 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-7B-Instruct-v0.2", + "model_sha": "5ca980b650d75e7611bcb9299948bd86dd7bc381", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-7B-Instruct-v0.3-pre/result_2023-10-28 02:31:59.json b/maywell/Synatra-7B-Instruct-v0.3-pre/result_2023-10-28 02:31:59.json new file mode 100644 index 0000000000000000000000000000000000000000..f04d67fe768ba451086fe5aef8887fc8df3d97b6 --- /dev/null +++ b/maywell/Synatra-7B-Instruct-v0.3-pre/result_2023-10-28 02:31:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40955631399317405, + "acc_stderr": 0.014370358632472447, + "acc_norm": 0.4726962457337884, + "acc_norm_stderr": 0.014589589101986 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4041027683728341, + "acc_stderr": 0.004897146690596247, + "acc_norm": 0.525592511451902, + "acc_norm_stderr": 0.004983240744101376 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041696, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5721583652618135, + "acc_stderr": 0.017692787927803724, + "acc_norm": 0.5721583652618135, + "acc_norm_stderr": 0.017692787927803724 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653315, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004257, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004257 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4021164021164021, + "acc_stderr": 0.02525303255499768, + "acc_norm": 0.4021164021164021, + "acc_norm_stderr": 0.02525303255499768 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.026772990653361816, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.026772990653361816 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5669724770642202, + "acc_stderr": 0.021244146569074345, + "acc_norm": 0.5669724770642202, + "acc_norm_stderr": 0.021244146569074345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42320261437908496, + "acc_stderr": 0.01998780976948206, + "acc_norm": 0.42320261437908496, + "acc_norm_stderr": 0.01998780976948206 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963768, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963768 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5, + "acc_stderr": 0.04745789978762494, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04745789978762494 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20558659217877095, + "acc_stderr": 0.013516116210724202, + "acc_norm": 0.20558659217877095, + "acc_norm_stderr": 0.013516116210724202 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6962025316455697, + "acc_stderr": 0.029936696387138625, + "acc_norm": 0.6962025316455697, + "acc_norm_stderr": 0.029936696387138625 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741523, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741523 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630573, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630573 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3157894736842105, + "mc1_stderr": 0.01627228795791694, + "mc2": 0.47669627022567646, + "mc2_stderr": 0.015363718738683547 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4557260920897285, + "acc_stderr": 0.017122829143292648, + "acc_norm": 0.4852420306965762, + "acc_norm_stderr": 0.017182864434998564 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-7B-Instruct-v0.3-pre", + "model_sha": "273566d120a8db90bc734aba20ef6e553ed9a9ab", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-7B-Instruct-v0.3-pre2/result_2023-10-29 03:09:00.json b/maywell/Synatra-7B-Instruct-v0.3-pre2/result_2023-10-29 03:09:00.json new file mode 100644 index 0000000000000000000000000000000000000000..9e6a81783a63417afac190ba48bbf960a9fe510b --- /dev/null +++ b/maywell/Synatra-7B-Instruct-v0.3-pre2/result_2023-10-29 03:09:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41467576791808874, + "acc_stderr": 0.01439707056440917, + "acc_norm": 0.46075085324232085, + "acc_norm_stderr": 0.014566303676636588 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40440151364270066, + "acc_stderr": 0.0048977283707372365, + "acc_norm": 0.5306711810396335, + "acc_norm_stderr": 0.004980384575535378 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299794, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299794 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.031410821975962386, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.031410821975962386 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056128, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056128 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846475, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846475 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431187, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431187 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02831753349606649, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02831753349606649 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42328042328042326, + "acc_stderr": 0.025446365634406776, + "acc_norm": 0.42328042328042326, + "acc_norm_stderr": 0.025446365634406776 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334384, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334384 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5308641975308642, + "acc_stderr": 0.027767689606833918, + "acc_norm": 0.5308641975308642, + "acc_norm_stderr": 0.027767689606833918 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353996, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353996 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576073, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576073 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626057, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626057 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.01978046595477752, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.01978046595477752 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176853, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176853 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.18659217877094972, + "acc_stderr": 0.013029631416358352, + "acc_norm": 0.18659217877094972, + "acc_norm_stderr": 0.013029631416358352 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.031137304297185798, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.031137304297185798 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35723598435462844, + "acc_stderr": 0.012238615750316506, + "acc_norm": 0.35723598435462844, + "acc_norm_stderr": 0.012238615750316506 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386844, + "mc2": 0.4635110137476736, + "mc2_stderr": 0.015409043308668458 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42857142857142855, + "acc_stderr": 0.017014038119297484, + "acc_norm": 0.4651711924439197, + "acc_norm_stderr": 0.017148598015747422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-7B-Instruct-v0.3-pre2", + "model_sha": "a1d319cbfba59887acde520207c79d8057711a13", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-7B-Instruct-v0.3/result_2023-10-29 07:25:02.json b/maywell/Synatra-7B-Instruct-v0.3/result_2023-10-29 07:25:02.json new file mode 100644 index 0000000000000000000000000000000000000000..f9a8423dfa5b3c3cda68300a2e60411ea203d039 --- /dev/null +++ b/maywell/Synatra-7B-Instruct-v0.3/result_2023-10-29 07:25:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3967576791808874, + "acc_stderr": 0.014296513020180635, + "acc_norm": 0.454778156996587, + "acc_norm_stderr": 0.014551507060836357 + }, + "harness|ko_hellaswag|10": { + "acc": 0.391256721768572, + "acc_stderr": 0.004870342592915051, + "acc_norm": 0.5191196972714599, + "acc_norm_stderr": 0.004986131919673969 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.017779225233394216, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.017779225233394216 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056128, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056128 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539743, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539743 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4967741935483871, + "acc_stderr": 0.02844341422643833, + "acc_norm": 0.4967741935483871, + "acc_norm_stderr": 0.02844341422643833 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.02535574126305527, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.02535574126305527 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756653, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756653 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5100917431192661, + "acc_stderr": 0.021432956203453313, + "acc_norm": 0.5100917431192661, + "acc_norm_stderr": 0.021432956203453313 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.019576953122088837, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.019576953122088837 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275941, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275941 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19776536312849163, + "acc_stderr": 0.013321620594050947, + "acc_norm": 0.19776536312849163, + "acc_norm_stderr": 0.013321620594050947 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031218, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031218 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3285528031290743, + "acc_stderr": 0.01199602724750293, + "acc_norm": 0.3285528031290743, + "acc_norm_stderr": 0.01199602724750293 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627877, + "mc2": 0.4513846769181087, + "mc2_stderr": 0.015331162068993385 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.34238488783943327, + "acc_stderr": 0.016313907844146373, + "acc_norm": 0.39315230224321135, + "acc_norm_stderr": 0.016793262801287078 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-7B-Instruct-v0.3", + "model_sha": "2d31bde8f1bfedb47c3761918b6e3189e3f61acd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-7B-v0.3-QA/result_2023-12-26 07:29:10.json b/maywell/Synatra-7B-v0.3-QA/result_2023-12-26 07:29:10.json new file mode 100644 index 0000000000000000000000000000000000000000..45ac6c95cbca7cd4692d58f5196b1cf790e008a2 --- /dev/null +++ b/maywell/Synatra-7B-v0.3-QA/result_2023-12-26 07:29:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4180887372013652, + "acc_stderr": 0.014413988396996076, + "acc_norm": 0.46757679180887374, + "acc_norm_stderr": 0.01458063756999542 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4078868751244772, + "acc_stderr": 0.004904375631128869, + "acc_norm": 0.5302728540131448, + "acc_norm_stderr": 0.004980627287147575 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.03833185275213025, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.03833185275213025 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.0177122289392998, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.0177122289392998 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.03343577705583065, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.03343577705583065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.040925639582376556, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.040925639582376556 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.032473902765696686, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.032473902765696686 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933917, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933917 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.02837228779796295, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.02837228779796295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41534391534391535, + "acc_stderr": 0.02537952491077838, + "acc_norm": 0.41534391534391535, + "acc_norm_stderr": 0.02537952491077838 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.558282208588957, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.558282208588957, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353996, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353996 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225868, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225868 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.040633027314866725, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.040633027314866725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401154, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401154 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639882, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639882 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.49107142857142855, + "acc_stderr": 0.04745033255489122, + "acc_norm": 0.49107142857142855, + "acc_norm_stderr": 0.04745033255489122 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289804, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289804 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.18324022346368715, + "acc_stderr": 0.01293864561306638, + "acc_norm": 0.18324022346368715, + "acc_norm_stderr": 0.01293864561306638 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411945, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411945 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.031717528240626645, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.031717528240626645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.03058732629470236, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.03058732629470236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34028683181225555, + "acc_stderr": 0.012101217610223793, + "acc_norm": 0.34028683181225555, + "acc_norm_stderr": 0.012101217610223793 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.035091433756067866, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.035091433756067866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133033, + "mc2": 0.47247339232752167, + "mc2_stderr": 0.015527772167329246 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42266824085005905, + "acc_stderr": 0.0169835060795776, + "acc_norm": 0.4427390791027155, + "acc_norm_stderr": 0.01707725413155622 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-7B-v0.3-QA", + "model_sha": "28bb95667c88f4c80b3903cfb0c3a7433f821311", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-7B-v0.3-RP/result_2024-05-13 21:03:02.json b/maywell/Synatra-7B-v0.3-RP/result_2024-05-13 21:03:02.json new file mode 100644 index 0000000000000000000000000000000000000000..d8893c0c3268e0aaaa0f5dd1f212a62974ce4aff --- /dev/null +++ b/maywell/Synatra-7B-v0.3-RP/result_2024-05-13 21:03:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3967576791808874, + "acc_stderr": 0.014296513020180635, + "acc_norm": 0.4564846416382253, + "acc_norm_stderr": 0.014555949760496435 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39145588528181635, + "acc_stderr": 0.004870785036708286, + "acc_norm": 0.5191196972714599, + "acc_norm_stderr": 0.004986131919673969 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.01778403453499243, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.01778403453499243 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056128, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056128 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539743, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539743 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4967741935483871, + "acc_stderr": 0.02844341422643833, + "acc_norm": 0.4967741935483871, + "acc_norm_stderr": 0.02844341422643833 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.02974504857267406, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.02974504857267406 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02831753349606649, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02831753349606649 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41534391534391535, + "acc_stderr": 0.02537952491077839, + "acc_norm": 0.41534391534391535, + "acc_norm_stderr": 0.02537952491077839 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756653, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756653 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5100917431192661, + "acc_stderr": 0.021432956203453313, + "acc_norm": 0.5100917431192661, + "acc_norm_stderr": 0.021432956203453313 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577443, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577443 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275941, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275941 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19888268156424582, + "acc_stderr": 0.013349892983092517, + "acc_norm": 0.19888268156424582, + "acc_norm_stderr": 0.013349892983092517 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.011989936640666544, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.011989936640666544 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.015702107090627877, + "mc2": 0.45138126517969945, + "mc2_stderr": 0.015332079215584724 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3447461629279811, + "acc_stderr": 0.016340649905418683, + "acc_norm": 0.39433293978748524, + "acc_norm_stderr": 0.016802090674893213 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-7B-v0.3-RP", + "model_sha": "a994747e68972f9018cd454730174211f9e46736", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-7B-v0.3-Translation/result_2023-11-18 00:55:26.json b/maywell/Synatra-7B-v0.3-Translation/result_2023-11-18 00:55:26.json new file mode 100644 index 0000000000000000000000000000000000000000..8ddab9f79c320d2c9118054e1ef177b3f086a564 --- /dev/null +++ b/maywell/Synatra-7B-v0.3-Translation/result_2023-11-18 00:55:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3361774744027304, + "acc_stderr": 0.01380485502620576, + "acc_norm": 0.39334470989761094, + "acc_norm_stderr": 0.014275101465693028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34993029277036447, + "acc_stderr": 0.004759729267943188, + "acc_norm": 0.45498904600677154, + "acc_norm_stderr": 0.004969521827957945 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5236270753512133, + "acc_stderr": 0.017859989765176457, + "acc_norm": 0.5236270753512133, + "acc_norm_stderr": 0.017859989765176457 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.0424463323835323, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.0424463323835323 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.028173917761762875, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.028173917761762875 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.031499305777849054, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.031499305777849054 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938145, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938145 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.03035152732334495, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.03035152732334495 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.0302422338008545, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.0302422338008545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228412, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228412 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.036430371689585496, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.036430371689585496 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596423, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596423 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.407514450867052, + "acc_stderr": 0.026454578146931494, + "acc_norm": 0.407514450867052, + "acc_norm_stderr": 0.026454578146931494 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.038258255488486076, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.038258255488486076 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413317, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413317 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4, + "acc_stderr": 0.021004201260420075, + "acc_norm": 0.4, + "acc_norm_stderr": 0.021004201260420075 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424523, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424523 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36437908496732024, + "acc_stderr": 0.019469518221573695, + "acc_norm": 0.36437908496732024, + "acc_norm_stderr": 0.019469518221573695 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.029886910547626964, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.029886910547626964 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2770949720670391, + "acc_stderr": 0.014968772435812145, + "acc_norm": 0.2770949720670391, + "acc_norm_stderr": 0.014968772435812145 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.026679252270103135, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.026679252270103135 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.03200682020163909, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.03200682020163909 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31290743155149936, + "acc_stderr": 0.011842529823063004, + "acc_norm": 0.31290743155149936, + "acc_norm_stderr": 0.011842529823063004 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22276621787025705, + "mc1_stderr": 0.014566506961396756, + "mc2": 0.3845751039570116, + "mc2_stderr": 0.015265157059591356 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30814639905548996, + "acc_stderr": 0.01587451515629839, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.016819438642971408 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-7B-v0.3-Translation", + "model_sha": "fab3f68b4fb414d481167677d660e0fc29a47ec4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-7B-v0.3-dpo/result_2023-11-08 10:27:00.json b/maywell/Synatra-7B-v0.3-dpo/result_2023-11-08 10:27:00.json new file mode 100644 index 0000000000000000000000000000000000000000..f3ab91d5bb76dcaab4acf093f4d30acb3e8e12ec --- /dev/null +++ b/maywell/Synatra-7B-v0.3-dpo/result_2023-11-08 10:27:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4180887372013652, + "acc_stderr": 0.014413988396996074, + "acc_norm": 0.47013651877133106, + "acc_norm_stderr": 0.014585305840007107 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40659231228838877, + "acc_stderr": 0.00490193651154613, + "acc_norm": 0.5323640709022107, + "acc_norm_stderr": 0.004979317515432522 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.038110796698335316, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.038110796698335316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5798212005108557, + "acc_stderr": 0.017650651363078033, + "acc_norm": 0.5798212005108557, + "acc_norm_stderr": 0.017650651363078033 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977112, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977112 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.028397944907806612, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.028397944907806612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.032473902765696686, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.032473902765696686 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.025279850397404904, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.025279850397404904 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.02683080599895223, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.02683080599895223 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5504587155963303, + "acc_stderr": 0.021327881417823366, + "acc_norm": 0.5504587155963303, + "acc_norm_stderr": 0.021327881417823366 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061177, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.49107142857142855, + "acc_stderr": 0.04745033255489123, + "acc_norm": 0.49107142857142855, + "acc_norm_stderr": 0.04745033255489123 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21340782122905028, + "acc_stderr": 0.013702859932196098, + "acc_norm": 0.21340782122905028, + "acc_norm_stderr": 0.013702859932196098 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.02972215209928006, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.02972215209928006 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34485006518904826, + "acc_stderr": 0.012139881006287063, + "acc_norm": 0.34485006518904826, + "acc_norm_stderr": 0.012139881006287063 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.34394124847001223, + "mc1_stderr": 0.016629087514276754, + "mc2": 0.5118549299169702, + "mc2_stderr": 0.015922621928954366 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4510035419126328, + "acc_stderr": 0.017107618859549353, + "acc_norm": 0.4639905548996458, + "acc_norm_stderr": 0.017145715365486657 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-7B-v0.3-dpo", + "model_sha": "405a4f1e6513cd1b8de5eb4e003bb49cc86d1f8a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-Mixtral-8x7B/result_2024-01-22 05:20:04.json b/maywell/Synatra-Mixtral-8x7B/result_2024-01-22 05:20:04.json new file mode 100644 index 0000000000000000000000000000000000000000..3cee3dba2e17fa7f377ceedbb5edc518256bbe76 --- /dev/null +++ b/maywell/Synatra-Mixtral-8x7B/result_2024-01-22 05:20:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4351535836177474, + "acc_stderr": 0.014487986197186045, + "acc_norm": 0.49829351535836175, + "acc_norm_stderr": 0.01461130570505698 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4229237203744274, + "acc_stderr": 0.004930138842768214, + "acc_norm": 0.5637323242381995, + "acc_norm_stderr": 0.004949080334816018 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5906432748538012, + "acc_stderr": 0.03771283107626546, + "acc_norm": 0.5906432748538012, + "acc_norm_stderr": 0.03771283107626546 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6245210727969349, + "acc_stderr": 0.01731661319718279, + "acc_norm": 0.6245210727969349, + "acc_norm_stderr": 0.01731661319718279 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742401, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742401 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5617021276595745, + "acc_stderr": 0.03243618636108099, + "acc_norm": 0.5617021276595745, + "acc_norm_stderr": 0.03243618636108099 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.0274666102131401, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.0274666102131401 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5605381165919282, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.5605381165919282, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.031730712390717244, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.031730712390717244 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.04122737111370332, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.04122737111370332 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.0322529423239964, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.0322529423239964 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5282051282051282, + "acc_stderr": 0.025310639254933855, + "acc_norm": 0.5282051282051282, + "acc_norm_stderr": 0.025310639254933855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5483870967741935, + "acc_stderr": 0.028310500348568378, + "acc_norm": 0.5483870967741935, + "acc_norm_stderr": 0.028310500348568378 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8376068376068376, + "acc_stderr": 0.024161618127987745, + "acc_norm": 0.8376068376068376, + "acc_norm_stderr": 0.024161618127987745 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.569811320754717, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.569811320754717, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.047245774057315726, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.047245774057315726 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131137, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131137 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.44370860927152317, + "acc_stderr": 0.04056527902281732, + "acc_norm": 0.44370860927152317, + "acc_norm_stderr": 0.04056527902281732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7014925373134329, + "acc_stderr": 0.032357437893550445, + "acc_norm": 0.7014925373134329, + "acc_norm_stderr": 0.032357437893550445 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.455026455026455, + "acc_stderr": 0.02564692836104939, + "acc_norm": 0.455026455026455, + "acc_norm_stderr": 0.02564692836104939 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.041614023984032786, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.041614023984032786 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5838150289017341, + "acc_stderr": 0.026538189104705477, + "acc_norm": 0.5838150289017341, + "acc_norm_stderr": 0.026538189104705477 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.558282208588957, + "acc_stderr": 0.03901591825836185, + "acc_norm": 0.558282208588957, + "acc_norm_stderr": 0.03901591825836185 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5524691358024691, + "acc_stderr": 0.027667138569422704, + "acc_norm": 0.5524691358024691, + "acc_norm_stderr": 0.027667138569422704 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6839378238341969, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.6839378238341969, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.045981880578165414, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.045981880578165414 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6385321100917432, + "acc_stderr": 0.02059808200993737, + "acc_norm": 0.6385321100917432, + "acc_norm_stderr": 0.02059808200993737 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5915032679738562, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.5915032679738562, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591205, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591205 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.625, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.625, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.020200164564804588, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.020200164564804588 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4148936170212766, + "acc_stderr": 0.0293922365846125, + "acc_norm": 0.4148936170212766, + "acc_norm_stderr": 0.0293922365846125 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.49107142857142855, + "acc_stderr": 0.04745033255489123, + "acc_norm": 0.49107142857142855, + "acc_norm_stderr": 0.04745033255489123 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31620111731843575, + "acc_stderr": 0.015551673652172544, + "acc_norm": 0.31620111731843575, + "acc_norm_stderr": 0.015551673652172544 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.029674288281311172, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.029674288281311172 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6693877551020408, + "acc_stderr": 0.030116426296540606, + "acc_norm": 0.6693877551020408, + "acc_norm_stderr": 0.030116426296540606 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036406, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036406 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3891786179921773, + "acc_stderr": 0.012452613934287017, + "acc_norm": 0.3891786179921773, + "acc_norm_stderr": 0.012452613934287017 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6323529411764706, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.6323529411764706, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386847, + "mc2": 0.46840649193349704, + "mc2_stderr": 0.015319566577346917 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5171192443919717, + "acc_stderr": 0.017180275246085622, + "acc_norm": 0.5430932703659976, + "acc_norm_stderr": 0.017126389093086777 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-Mixtral-8x7B", + "model_sha": "d00e65f6bc7f11573018b449cba8b5be59e72cf0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-RP-Orca-2-7b-v0.1/result_2023-11-25 01:18:50.json b/maywell/Synatra-RP-Orca-2-7b-v0.1/result_2023-11-25 01:18:50.json new file mode 100644 index 0000000000000000000000000000000000000000..c5724c031e4c4f706d103d5236b921f2ddc1a6f2 --- /dev/null +++ b/maywell/Synatra-RP-Orca-2-7b-v0.1/result_2023-11-25 01:18:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3003412969283277, + "acc_stderr": 0.013395909309957, + "acc_norm": 0.34726962457337884, + "acc_norm_stderr": 0.013913034529620442 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3313085042820155, + "acc_stderr": 0.004697217912462989, + "acc_norm": 0.39762995419239194, + "acc_norm_stderr": 0.004884079750433874 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273482, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273482 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4112388250319285, + "acc_stderr": 0.017595971908056573, + "acc_norm": 0.4112388250319285, + "acc_norm_stderr": 0.017595971908056573 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36977491961414793, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.36977491961414793, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262971, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262971 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36923076923076925, + "acc_stderr": 0.024468615241478902, + "acc_norm": 0.36923076923076925, + "acc_norm_stderr": 0.024468615241478902 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998573, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998573 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3419354838709677, + "acc_stderr": 0.02698528957655274, + "acc_norm": 0.3419354838709677, + "acc_norm_stderr": 0.02698528957655274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.03187195347942466, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.03187195347942466 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.38113207547169814, + "acc_stderr": 0.029890609686286616, + "acc_norm": 0.38113207547169814, + "acc_norm_stderr": 0.029890609686286616 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.046313813194254635, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.046313813194254635 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683522, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683522 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.03531987930208731, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.03531987930208731 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047736, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047736 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4190751445086705, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.4190751445086705, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.02723741509459247, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.02723741509459247 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.03578038165008585, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.03578038165008585 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489359, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489359 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3743119266055046, + "acc_stderr": 0.020748959408988313, + "acc_norm": 0.3743119266055046, + "acc_norm_stderr": 0.020748959408988313 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.027530078447110317, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.027530078447110317 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.018550634502952964, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.018550634502952964 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022128, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.02971127586000534, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.02971127586000534 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966346, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966346 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.027033041151681456, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.027033041151681456 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4767932489451477, + "acc_stderr": 0.03251215201141018, + "acc_norm": 0.4767932489451477, + "acc_norm_stderr": 0.03251215201141018 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2803129074315515, + "acc_stderr": 0.011471555944958623, + "acc_norm": 0.2803129074315515, + "acc_norm_stderr": 0.011471555944958623 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.034602283272391704, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.034602283272391704 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4121212121212121, + "acc_stderr": 0.03843566993588718, + "acc_norm": 0.4121212121212121, + "acc_norm_stderr": 0.03843566993588718 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326902, + "mc2": 0.45578640975104057, + "mc2_stderr": 0.016180400915364303 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3270365997638725, + "acc_stderr": 0.016129047485457022, + "acc_norm": 0.3565525383707202, + "acc_norm_stderr": 0.01646770698152745 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-RP-Orca-2-7b-v0.1", + "model_sha": "da80bc823c407c28c464cc0547a8ed9e0ca82f79", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-V0.1-7B-Instruct/result_2023-10-10 09:40:24.json b/maywell/Synatra-V0.1-7B-Instruct/result_2023-10-10 09:40:24.json new file mode 100644 index 0000000000000000000000000000000000000000..b5f21bbbe65fa550bef7d9fd527b8d292fd37f41 --- /dev/null +++ b/maywell/Synatra-V0.1-7B-Instruct/result_2023-10-10 09:40:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3609215017064846, + "acc_stderr": 0.014034761386175452, + "acc_norm": 0.41723549488054607, + "acc_norm_stderr": 0.01440982551840308 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3813981278629755, + "acc_stderr": 0.004847372670134637, + "acc_norm": 0.49283011352320255, + "acc_norm_stderr": 0.004989268362968721 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.04931801994220416, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.04931801994220416 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468544, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468544 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.0435644720266507, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.0435644720266507 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.0354413249194797, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.0354413249194797 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461224, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461224 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438804, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438804 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808086, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808086 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833713, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833713 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123935, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123935 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983067, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983067 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03942082639927213, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03942082639927213 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.02690290045866664, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.02690290045866664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.038566721635489125, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.038566721635489125 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422697, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422697 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384486, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384486 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47522935779816516, + "acc_stderr": 0.02141099975363592, + "acc_norm": 0.47522935779816516, + "acc_norm_stderr": 0.02141099975363592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138293, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981749, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981749 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35130718954248363, + "acc_stderr": 0.01931267606578656, + "acc_norm": 0.35130718954248363, + "acc_norm_stderr": 0.01931267606578656 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2759776536312849, + "acc_stderr": 0.014950103002475363, + "acc_norm": 0.2759776536312849, + "acc_norm_stderr": 0.014950103002475363 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.02916312857067073, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.02916312857067073 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3135593220338983, + "acc_stderr": 0.011849234291459329, + "acc_norm": 0.3135593220338983, + "acc_norm_stderr": 0.011849234291459329 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03484941514429231, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03484941514429231 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766373, + "mc2": 0.43748297535795655, + "mc2_stderr": 0.015378495166878805 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3530106257378985, + "acc_stderr": 0.01643074598242713, + "acc_norm": 0.44155844155844154, + "acc_norm_stderr": 0.0170725258755631 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-V0.1-7B-Instruct", + "model_sha": "ad4a0c24363b0b0b12f883c7e9e2b7d3c0667fb7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-Yi-Ko-6B/result_2023-12-04 20:13:32.json b/maywell/Synatra-Yi-Ko-6B/result_2023-12-04 20:13:32.json new file mode 100644 index 0000000000000000000000000000000000000000..58cb1414b0032f7a2fe048fdc0fbce12501b91ed --- /dev/null +++ b/maywell/Synatra-Yi-Ko-6B/result_2023-12-04 20:13:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3575085324232082, + "acc_stderr": 0.014005494275916576, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398324 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37731527584146585, + "acc_stderr": 0.004837242015191119, + "acc_norm": 0.4814777932682733, + "acc_norm_stderr": 0.004986356526063965 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5874840357598978, + "acc_stderr": 0.01760414910867193, + "acc_norm": 0.5874840357598978, + "acc_norm_stderr": 0.01760414910867193 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.038743715565879536, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.038743715565879536 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.0323854694875898, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.0323854694875898 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4897435897435897, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.4897435897435897, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5032258064516129, + "acc_stderr": 0.028443414226438306, + "acc_norm": 0.5032258064516129, + "acc_norm_stderr": 0.028443414226438306 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.02999695185834948, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.02999695185834948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075657, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075657 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230175, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230175 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02764847787741332, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02764847787741332 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579858, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579858 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6256880733944954, + "acc_stderr": 0.020748959408988316, + "acc_norm": 0.6256880733944954, + "acc_norm_stderr": 0.020748959408988316 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42320261437908496, + "acc_stderr": 0.01998780976948206, + "acc_norm": 0.42320261437908496, + "acc_norm_stderr": 0.01998780976948206 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260657, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260657 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016633, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016633 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5224489795918368, + "acc_stderr": 0.03197694118713672, + "acc_norm": 0.5224489795918368, + "acc_norm_stderr": 0.03197694118713672 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811224, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811224 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.011971507294982784, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.011971507294982784 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31946144430844553, + "mc1_stderr": 0.0163226441829605, + "mc2": 0.47037314932231167, + "mc2_stderr": 0.015667471817844107 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3907910271546635, + "acc_stderr": 0.01677529846510825, + "acc_norm": 0.47461629279811096, + "acc_norm_stderr": 0.017168187201429246 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-Yi-Ko-6B", + "model_sha": "3a572e4ac6159199eec6844716963088b74330de", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-Zephyr-7B-v0.01/result_2023-11-01 00:27:10.json b/maywell/Synatra-Zephyr-7B-v0.01/result_2023-11-01 00:27:10.json new file mode 100644 index 0000000000000000000000000000000000000000..cd78023d75d8512be674fb2a90696f43872654f8 --- /dev/null +++ b/maywell/Synatra-Zephyr-7B-v0.01/result_2023-11-01 00:27:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910473, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.01449442158425651 + }, + "harness|ko_hellaswag|10": { + "acc": 0.393945429197371, + "acc_stderr": 0.004876243842318603, + "acc_norm": 0.5140410276837284, + "acc_norm_stderr": 0.004987813548019091 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4955300127713921, + "acc_stderr": 0.01787924897058439, + "acc_norm": 0.4955300127713921, + "acc_norm_stderr": 0.01787924897058439 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197604, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49743589743589745, + "acc_stderr": 0.025350672979412205, + "acc_norm": 0.49743589743589745, + "acc_norm_stderr": 0.025350672979412205 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.02838474778881333, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.02838474778881333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431187, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431187 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389174, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389174 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815632, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815632 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658752, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658752 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.02501074911613759, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.02501074911613759 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842425, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842425 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005135, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005135 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171566, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171566 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35947712418300654, + "acc_stderr": 0.01941253924203216, + "acc_norm": 0.35947712418300654, + "acc_norm_stderr": 0.01941253924203216 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966734, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966734 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2782122905027933, + "acc_stderr": 0.014987325439963554, + "acc_norm": 0.2782122905027933, + "acc_norm_stderr": 0.014987325439963554 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3272490221642764, + "acc_stderr": 0.01198381980646477, + "acc_norm": 0.3272490221642764, + "acc_norm_stderr": 0.01198381980646477 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.03426712349247272, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.03426712349247272 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4121212121212121, + "acc_stderr": 0.03843566993588718, + "acc_norm": 0.4121212121212121, + "acc_norm_stderr": 0.03843566993588718 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.01613222972815506, + "mc2": 0.4754957310754645, + "mc2_stderr": 0.01547689288911229 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3907910271546635, + "acc_stderr": 0.01677529846510825, + "acc_norm": 0.4309327036599764, + "acc_norm_stderr": 0.017025558196043136 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-Zephyr-7B-v0.01", + "model_sha": "d6fba97659714b6fcb81b15acb9b5729ffada374", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra-Zephyr-7B-v0.02/result_2023-11-02 02:28:13.json b/maywell/Synatra-Zephyr-7B-v0.02/result_2023-11-02 02:28:13.json new file mode 100644 index 0000000000000000000000000000000000000000..0ec9c41d7291f0da31a487c8fd7bb16d515941dd --- /dev/null +++ b/maywell/Synatra-Zephyr-7B-v0.02/result_2023-11-02 02:28:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3993174061433447, + "acc_stderr": 0.0143120945579467, + "acc_norm": 0.4616040955631399, + "acc_norm_stderr": 0.014568245550296356 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3943437562238598, + "acc_stderr": 0.004877104939356235, + "acc_norm": 0.5110535749850628, + "acc_norm_stderr": 0.004988561944277397 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258975, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258975 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5070242656449553, + "acc_stderr": 0.017878199003432217, + "acc_norm": 0.5070242656449553, + "acc_norm_stderr": 0.017878199003432217 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5858585858585859, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.5858585858585859, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.029996951858349472, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.029996951858349472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.02501074911613759, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.02501074911613759 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.02677299065336182, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.02677299065336182 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.027563010971606676, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.027563010971606676 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5064220183486239, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.5064220183486239, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171566, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171566 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449845, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449845 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320207, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320207 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176851, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176851 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22905027932960895, + "acc_stderr": 0.014054314935614553, + "acc_norm": 0.22905027932960895, + "acc_norm_stderr": 0.014054314935614553 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2977941176470588, + "acc_stderr": 0.027778298701545443, + "acc_norm": 0.2977941176470588, + "acc_norm_stderr": 0.027778298701545443 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.031996152328062855, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.031996152328062855 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214933, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214933 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394816, + "mc2": 0.4569644659417388, + "mc2_stderr": 0.01530492284436498 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42384887839433294, + "acc_stderr": 0.016989810834628256, + "acc_norm": 0.4592680047225502, + "acc_norm_stderr": 0.017133218276537673 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra-Zephyr-7B-v0.02", + "model_sha": "5b6d8ea1b45ed20c3ecf0ddb6cc41141250352ca", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra_TbST02M_IN01/result_2023-10-16 09:50:44.json b/maywell/Synatra_TbST02M_IN01/result_2023-10-16 09:50:44.json new file mode 100644 index 0000000000000000000000000000000000000000..9d580520178c1f1e95d1221a9d99b9033dd0679b --- /dev/null +++ b/maywell/Synatra_TbST02M_IN01/result_2023-10-16 09:50:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3097269624573379, + "acc_stderr": 0.01351205841523836, + "acc_norm": 0.38310580204778155, + "acc_norm_stderr": 0.014206472661672877 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35331607249551883, + "acc_stderr": 0.004770229206838901, + "acc_norm": 0.4451304521011751, + "acc_norm_stderr": 0.004959645263390238 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49169859514687103, + "acc_stderr": 0.017877498991072008, + "acc_norm": 0.49169859514687103, + "acc_norm_stderr": 0.017877498991072008 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.03314190222110658, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.03314190222110658 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03521224908841586, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03521224908841586 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.03011821010694265, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.03011821010694265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.03528131472933607, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.03528131472933607 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.037585177754049466, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.037585177754049466 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.023973861998992062, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.023973861998992062 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5064220183486239, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.5064220183486239, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5, + "acc_stderr": 0.028629916715693413, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028629916715693413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.040260970832965585, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.040260970832965585 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.01950629169395486, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.01950629169395486 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.033851779760448106, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.033851779760448106 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3039106145251397, + "acc_stderr": 0.01538284558758452, + "acc_norm": 0.3039106145251397, + "acc_norm_stderr": 0.01538284558758452 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3011734028683181, + "acc_stderr": 0.01171714875164844, + "acc_norm": 0.3011734028683181, + "acc_norm_stderr": 0.01171714875164844 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0390369864774844, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0390369864774844 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33047735618115054, + "mc1_stderr": 0.016466769613698293, + "mc2": 0.5058685155948915, + "mc2_stderr": 0.01583111147395693 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29043683589138136, + "acc_stderr": 0.01560760256981463, + "acc_norm": 0.38134592680047225, + "acc_norm_stderr": 0.01669930176882808 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra_TbST02M_IN01", + "model_sha": "ba8eef9720471e65dc86e856d2a3812da8b53527", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/Synatra_TbST11B_EP01/result_2023-10-18 07:35:40.json b/maywell/Synatra_TbST11B_EP01/result_2023-10-18 07:35:40.json new file mode 100644 index 0000000000000000000000000000000000000000..3768b9cd40b203d165bdf3045bf51101b0ef7856 --- /dev/null +++ b/maywell/Synatra_TbST11B_EP01/result_2023-10-18 07:35:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34812286689419797, + "acc_stderr": 0.013921008595179344, + "acc_norm": 0.40784982935153585, + "acc_norm_stderr": 0.014361097288449691 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36875124477195775, + "acc_stderr": 0.0048148030984368085, + "acc_norm": 0.4722166899024099, + "acc_norm_stderr": 0.004982072108448084 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4840357598978289, + "acc_stderr": 0.01787084750608173, + "acc_norm": 0.4840357598978289, + "acc_norm_stderr": 0.01787084750608173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.03777798822748016, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.03777798822748016 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.045338381959297736, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.045338381959297736 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.432258064516129, + "acc_stderr": 0.02818173972001941, + "acc_norm": 0.432258064516129, + "acc_norm_stderr": 0.02818173972001941 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138653, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138653 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.030767394707808107, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.030767394707808107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815642, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815642 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602841997, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602841997 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.039015918258361836, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.039015918258361836 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442205, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442205 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362223, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362223 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147124, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147124 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.02856869975222588, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.02856869975222588 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483184, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483184 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529658, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529658 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534792, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534792 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23016759776536314, + "acc_stderr": 0.014078339253425807, + "acc_norm": 0.23016759776536314, + "acc_norm_stderr": 0.014078339253425807 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3155149934810952, + "acc_stderr": 0.011869184843058643, + "acc_norm": 0.3155149934810952, + "acc_norm_stderr": 0.011869184843058643 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326912, + "mc2": 0.4484601943910918, + "mc2_stderr": 0.015458891626438749 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3482880755608028, + "acc_stderr": 0.01637992673914804, + "acc_norm": 0.39787485242030696, + "acc_norm_stderr": 0.016827959054733388 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/Synatra_TbST11B_EP01", + "model_sha": "76b40393481aa567733eff5107dd4b6944862fdd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/TinyWand-DPO/result_2024-01-04 06:52:31.json b/maywell/TinyWand-DPO/result_2024-01-04 06:52:31.json new file mode 100644 index 0000000000000000000000000000000000000000..1dd0045035b7f2766f91f9abc01c1ac8896a94b2 --- /dev/null +++ b/maywell/TinyWand-DPO/result_2024-01-04 06:52:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2721843003412969, + "acc_stderr": 0.01300660040642371, + "acc_norm": 0.3097269624573379, + "acc_norm_stderr": 0.013512058415238363 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3408683529177455, + "acc_stderr": 0.004730324556624142, + "acc_norm": 0.40450109539932283, + "acc_norm_stderr": 0.004897921845492104 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3391812865497076, + "acc_stderr": 0.03631053496488905, + "acc_norm": 0.3391812865497076, + "acc_norm_stderr": 0.03631053496488905 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24776500638569604, + "acc_stderr": 0.015438083080568958, + "acc_norm": 0.24776500638569604, + "acc_norm_stderr": 0.015438083080568958 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066656, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066656 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.22127659574468084, + "acc_stderr": 0.027136349602424063, + "acc_norm": 0.22127659574468084, + "acc_norm_stderr": 0.027136349602424063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.03484331592680588, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.03484331592680588 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.27009646302250806, + "acc_stderr": 0.02521804037341063, + "acc_norm": 0.27009646302250806, + "acc_norm_stderr": 0.02521804037341063 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.24663677130044842, + "acc_stderr": 0.028930413120910877, + "acc_norm": 0.24663677130044842, + "acc_norm_stderr": 0.028930413120910877 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677698, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677698 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3025210084033613, + "acc_stderr": 0.02983796238829193, + "acc_norm": 0.3025210084033613, + "acc_norm_stderr": 0.02983796238829193 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31025641025641026, + "acc_stderr": 0.02345467488940429, + "acc_norm": 0.31025641025641026, + "acc_norm_stderr": 0.02345467488940429 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21674876847290642, + "acc_stderr": 0.028990331252516235, + "acc_norm": 0.21674876847290642, + "acc_norm_stderr": 0.028990331252516235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885196, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885196 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23504273504273504, + "acc_stderr": 0.02777883590493543, + "acc_norm": 0.23504273504273504, + "acc_norm_stderr": 0.02777883590493543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.22264150943396227, + "acc_stderr": 0.0256042334708991, + "acc_norm": 0.22264150943396227, + "acc_norm_stderr": 0.0256042334708991 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.041723430387053825, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.041723430387053825 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507384, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507384 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008937, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008937 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916707, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916707 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641144, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641144 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566016, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816503, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816503 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2398843930635838, + "acc_stderr": 0.022989592543123563, + "acc_norm": 0.2398843930635838, + "acc_norm_stderr": 0.022989592543123563 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.23765432098765432, + "acc_stderr": 0.023683591837008557, + "acc_norm": 0.23765432098765432, + "acc_norm_stderr": 0.023683591837008557 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29015544041450775, + "acc_stderr": 0.03275264467791515, + "acc_norm": 0.29015544041450775, + "acc_norm_stderr": 0.03275264467791515 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.03775205013583638, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.03775205013583638 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.20733944954128442, + "acc_stderr": 0.017381415563608664, + "acc_norm": 0.20733944954128442, + "acc_norm_stderr": 0.017381415563608664 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2809917355371901, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.16447368421052633, + "acc_stderr": 0.03016753346863268, + "acc_norm": 0.16447368421052633, + "acc_norm_stderr": 0.03016753346863268 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.016992723465466222, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.016992723465466222 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590627, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590627 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298804, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298804 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331149, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331149 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249765, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249765 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658335, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23272490221642764, + "acc_stderr": 0.010792595553888475, + "acc_norm": 0.23272490221642764, + "acc_norm_stderr": 0.010792595553888475 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.18181818181818182, + "acc_stderr": 0.03011768892950359, + "acc_norm": 0.18181818181818182, + "acc_norm_stderr": 0.03011768892950359 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.01594506858123662, + "mc2": 0.4757373738339913, + "mc2_stderr": 0.01599620170938502 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24911452184179456, + "acc_stderr": 0.014869665316691259, + "acc_norm": 0.29161747343565525, + "acc_norm_stderr": 0.01562627669007024 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/TinyWand-DPO", + "model_sha": "f99e694e6a252f7a4b5ab4828121c434fd873391", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/TinyWand-SFT/result_2024-01-04 01:03:10.json b/maywell/TinyWand-SFT/result_2024-01-04 01:03:10.json new file mode 100644 index 0000000000000000000000000000000000000000..a9ff7d14344949aed3ec81464edc1c885836157c --- /dev/null +++ b/maywell/TinyWand-SFT/result_2024-01-04 01:03:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26621160409556316, + "acc_stderr": 0.012915774781523207, + "acc_norm": 0.30802047781569963, + "acc_norm_stderr": 0.013491429517292038 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33300139414459273, + "acc_stderr": 0.004703238534045805, + "acc_norm": 0.40529774945230035, + "acc_norm_stderr": 0.004899462111832337 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209195, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209195 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2567049808429119, + "acc_stderr": 0.01562048026306454, + "acc_norm": 0.2567049808429119, + "acc_norm_stderr": 0.01562048026306454 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800254, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800254 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.030472973363380052, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.030472973363380052 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.29260450160771706, + "acc_stderr": 0.025839898334877983, + "acc_norm": 0.29260450160771706, + "acc_norm_stderr": 0.025839898334877983 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.03219079200419996, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.03219079200419996 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.19696969696969696, + "acc_stderr": 0.028335609732463345, + "acc_norm": 0.19696969696969696, + "acc_norm_stderr": 0.028335609732463345 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924811, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924811 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868966, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868966 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3, + "acc_stderr": 0.0232345810884285, + "acc_norm": 0.3, + "acc_norm_stderr": 0.0232345810884285 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.02967833314144446, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.02967833314144446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.024892469172462843, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.024892469172462843 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.25213675213675213, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.25213675213675213, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815642, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815642 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.032578473844367746, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.032578473844367746 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641144, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641144 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774709, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774709 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816503, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816503 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.22254335260115607, + "acc_stderr": 0.02239421566194282, + "acc_norm": 0.22254335260115607, + "acc_norm_stderr": 0.02239421566194282 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.024383665531035457, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.024383665531035457 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0383515395439942, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0383515395439942 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22935779816513763, + "acc_stderr": 0.018025349724618688, + "acc_norm": 0.22935779816513763, + "acc_norm_stderr": 0.018025349724618688 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.037184890068181146, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.037184890068181146 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242564, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242564 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21405228758169934, + "acc_stderr": 0.016593429662329028, + "acc_norm": 0.21405228758169934, + "acc_norm_stderr": 0.016593429662329028 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590624, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590624 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.03128039084329879, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.03128039084329879 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331149, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331149 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898452, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898452 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3306122448979592, + "acc_stderr": 0.0301164262965406, + "acc_norm": 0.3306122448979592, + "acc_norm_stderr": 0.0301164262965406 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842555, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842555 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24119947848761408, + "acc_stderr": 0.010926496102034952, + "acc_norm": 0.24119947848761408, + "acc_norm_stderr": 0.010926496102034952 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591362, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591362 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.034531318018854146, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.034531318018854146 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22643818849449204, + "mc1_stderr": 0.014651337324602578, + "mc2": 0.4564824257149778, + "mc2_stderr": 0.016998445695245345 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24911452184179456, + "acc_stderr": 0.014869665316691271, + "acc_norm": 0.3435655253837072, + "acc_norm_stderr": 0.016327334806429138 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/TinyWand-SFT", + "model_sha": "ac1dffae8e8a8324fdac7a266a8ce82e6d033577", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/koOpenChat-sft/result_2023-11-14 13:43:42.json b/maywell/koOpenChat-sft/result_2023-11-14 13:43:42.json new file mode 100644 index 0000000000000000000000000000000000000000..28bd2ede500410d61b52f46d41cb1714f99f3a48 --- /dev/null +++ b/maywell/koOpenChat-sft/result_2023-11-14 13:43:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39419795221843, + "acc_stderr": 0.014280522667467328, + "acc_norm": 0.44880546075085326, + "acc_norm_stderr": 0.014534599585097669 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37860983867755427, + "acc_stderr": 0.004840493603166217, + "acc_norm": 0.4903405696076479, + "acc_norm_stderr": 0.004988850185477489 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5044699872286079, + "acc_stderr": 0.017879248970584356, + "acc_norm": 0.5044699872286079, + "acc_norm_stderr": 0.017879248970584356 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079023, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5434083601286174, + "acc_stderr": 0.028290869054197598, + "acc_norm": 0.5434083601286174, + "acc_norm_stderr": 0.028290869054197598 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234353, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234353 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.02891120880274946, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.02891120880274946 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808086, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808086 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.029560707392465718, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.029560707392465718 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.02507598176760168, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.02507598176760168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.035339990940656964, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.035339990940656964 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5559633027522936, + "acc_stderr": 0.021302621211654518, + "acc_norm": 0.5559633027522936, + "acc_norm_stderr": 0.021302621211654518 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.01978046595477752, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.01978046595477752 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5, + "acc_stderr": 0.04745789978762494, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04745789978762494 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.033812000056435254, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.033812000056435254 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.01414957534897627, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.01414957534897627 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411127, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411127 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741523, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741523 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.015945068581236614, + "mc2": 0.4711085048103087, + "mc2_stderr": 0.015340563202217064 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32113341204250295, + "acc_stderr": 0.016052762579111573, + "acc_norm": 0.36835891381345925, + "acc_norm_stderr": 0.016583858982639074 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/koOpenChat-sft", + "model_sha": "47472b36e181694422564b130ee075ffa596537d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/ko_ocgn_ep0-4/result_2023-11-11 23:51:11.json b/maywell/ko_ocgn_ep0-4/result_2023-11-11 23:51:11.json new file mode 100644 index 0000000000000000000000000000000000000000..413674674ea4c8697176915c1d5b6cf3c832ab79 --- /dev/null +++ b/maywell/ko_ocgn_ep0-4/result_2023-11-11 23:51:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.371160409556314, + "acc_stderr": 0.014117971901142818, + "acc_norm": 0.43600682593856654, + "acc_norm_stderr": 0.014491225699230916 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36058554072893845, + "acc_stderr": 0.0047918906258342, + "acc_norm": 0.45558653654650466, + "acc_norm_stderr": 0.004970057183367316 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.01787469866749135, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.01787469866749135 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.04049122041702506, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.04049122041702506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758398, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758398 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.02804339985821063, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.02804339985821063 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056128, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056128 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4282051282051282, + "acc_stderr": 0.02508830145469484, + "acc_norm": 0.4282051282051282, + "acc_norm_stderr": 0.02508830145469484 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112728, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112728 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942638, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942638 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.02882088466625326, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.02882088466625326 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067877, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067877 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379414, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379414 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.03602573571288442, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.03602573571288442 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5229357798165137, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.5229357798165137, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.01957695312208884, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.01957695312208884 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614095, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21452513966480447, + "acc_stderr": 0.013728923407828846, + "acc_norm": 0.21452513966480447, + "acc_norm_stderr": 0.013728923407828846 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3226857887874837, + "acc_stderr": 0.011940264193195983, + "acc_norm": 0.3226857887874837, + "acc_norm_stderr": 0.011940264193195983 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015473, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015473 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03815494308688929, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03815494308688929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713613, + "mc2": 0.47666728212495557, + "mc2_stderr": 0.015425276853126389 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3789846517119244, + "acc_stderr": 0.01667926068422929, + "acc_norm": 0.40968122786304606, + "acc_norm_stderr": 0.016907568192219474 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/ko_ocgn_ep0-4", + "model_sha": "77ac30be06896d5a51a90fe5c36bcab90b26f202", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/ko_ocgn_ep1/result_2023-11-12 23:44:36.json b/maywell/ko_ocgn_ep1/result_2023-11-12 23:44:36.json new file mode 100644 index 0000000000000000000000000000000000000000..19d5e92304ed5aebdf0db5a79f1c703ef5be5f49 --- /dev/null +++ b/maywell/ko_ocgn_ep1/result_2023-11-12 23:44:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3967576791808874, + "acc_stderr": 0.014296513020180639, + "acc_norm": 0.45563139931740615, + "acc_norm_stderr": 0.014553749939306863 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37661820354511055, + "acc_stderr": 0.004835475957610931, + "acc_norm": 0.4816769567815176, + "acc_norm_stderr": 0.004986429808146771 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4955300127713921, + "acc_stderr": 0.017879248970584384, + "acc_norm": 0.4955300127713921, + "acc_norm_stderr": 0.017879248970584384 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.03240038086792747, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.03240038086792747 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.03464881675016339, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.03464881675016339 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761005, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761005 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.02974504857267408, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.02974504857267408 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.02497695405315525, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.02497695405315525 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02764847787741332, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02764847787741332 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.0358701498607566, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.0358701498607566 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5522935779816514, + "acc_stderr": 0.021319754962425455, + "acc_norm": 0.5522935779816514, + "acc_norm_stderr": 0.021319754962425455 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556054, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556054 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094597, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094597 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650158, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650158 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.01450897945355398, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.01450897945355398 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776132, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776132 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33833116036505867, + "acc_stderr": 0.012084265626344211, + "acc_norm": 0.33833116036505867, + "acc_norm_stderr": 0.012084265626344211 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219374, + "mc2": 0.4458984547809422, + "mc2_stderr": 0.015245099119242699 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3872491145218418, + "acc_stderr": 0.01674757799164279, + "acc_norm": 0.4167650531286895, + "acc_norm_stderr": 0.016950489146108826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/ko_ocgn_ep1", + "model_sha": "ebbb63249672295f49e791b11c7204582bfb5383", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/maywell/synatra_V0.01/result_2023-10-07 13:31:59.json b/maywell/synatra_V0.01/result_2023-10-07 13:31:59.json new file mode 100644 index 0000000000000000000000000000000000000000..be02eab2a5a952083cd5d7c790b14a8dae4d02bc --- /dev/null +++ b/maywell/synatra_V0.01/result_2023-10-07 13:31:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2901023890784983, + "acc_stderr": 0.013261573677520769, + "acc_norm": 0.3412969283276451, + "acc_norm_stderr": 0.01385583128749772 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33210515833499304, + "acc_stderr": 0.00470005967137463, + "acc_norm": 0.41585341565425216, + "acc_norm_stderr": 0.004918612098944034 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38011695906432746, + "acc_stderr": 0.03722965741385539, + "acc_norm": 0.38011695906432746, + "acc_norm_stderr": 0.03722965741385539 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3997445721583653, + "acc_stderr": 0.01751684790705327, + "acc_norm": 0.3997445721583653, + "acc_norm_stderr": 0.01751684790705327 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610334, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610334 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029319, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029319 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4180064308681672, + "acc_stderr": 0.02801365189199507, + "acc_norm": 0.4180064308681672, + "acc_norm_stderr": 0.02801365189199507 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.34080717488789236, + "acc_stderr": 0.03181149747055359, + "acc_norm": 0.34080717488789236, + "acc_norm_stderr": 0.03181149747055359 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201943, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201943 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182087, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182087 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38974358974358975, + "acc_stderr": 0.024726967886647074, + "acc_norm": 0.38974358974358975, + "acc_norm_stderr": 0.024726967886647074 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.38387096774193546, + "acc_stderr": 0.027666182075539635, + "acc_norm": 0.38387096774193546, + "acc_norm_stderr": 0.027666182075539635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37735849056603776, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.37735849056603776, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.047245774057315705, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.047245774057315705 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815642, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815642 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.02455229220934266, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.02455229220934266 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.36419753086419754, + "acc_stderr": 0.026774929899722327, + "acc_norm": 0.36419753086419754, + "acc_norm_stderr": 0.026774929899722327 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40414507772020725, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.40414507772020725, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4055045871559633, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.4055045871559633, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.02778014120702333, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.02778014120702333 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.018875682938069443, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.018875682938069443 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3106145251396648, + "acc_stderr": 0.015476515438005566, + "acc_norm": 0.3106145251396648, + "acc_norm_stderr": 0.015476515438005566 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.03175195237583322, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.03175195237583322 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4767932489451477, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.4767932489451477, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28683181225554105, + "acc_stderr": 0.011551504781176933, + "acc_norm": 0.28683181225554105, + "acc_norm_stderr": 0.011551504781176933 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.03713158067481913, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.03713158067481913 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32313341493268055, + "mc1_stderr": 0.016371836286454614, + "mc2": 0.4992370707389853, + "mc2_stderr": 0.01568220201461622 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3293978748524203, + "acc_stderr": 0.016158746868147143, + "acc_norm": 0.43211334120425027, + "acc_norm_stderr": 0.017031170198851753 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "maywell/synatra_V0.01", + "model_sha": "c27df4dbc7624ea0bcbf0b0ff149d49b58713a4e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/megastudy/M-SOLAR-10.7B-v1.3/result_2024-01-04 08:31:10.json b/megastudy/M-SOLAR-10.7B-v1.3/result_2024-01-04 08:31:10.json new file mode 100644 index 0000000000000000000000000000000000000000..43075ea67e1bb65cf89618aa74df039761871d2e --- /dev/null +++ b/megastudy/M-SOLAR-10.7B-v1.3/result_2024-01-04 08:31:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4684300341296928, + "acc_stderr": 0.014582236460866977, + "acc_norm": 0.5136518771331058, + "acc_norm_stderr": 0.014605943429860942 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4441346345349532, + "acc_stderr": 0.004958537988993583, + "acc_norm": 0.6093407687711612, + "acc_norm_stderr": 0.004869010152280749 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6549707602339181, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.6549707602339181, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.04689765937278132, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.04689765937278132 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6590038314176245, + "acc_stderr": 0.016951781383223317, + "acc_norm": 0.6590038314176245, + "acc_norm_stderr": 0.016951781383223317 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5106382978723404, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.5106382978723404, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.02760468902858199, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.02760468902858199 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.57847533632287, + "acc_stderr": 0.03314190222110658, + "acc_norm": 0.57847533632287, + "acc_norm_stderr": 0.03314190222110658 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985905, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985905 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7525252525252525, + "acc_stderr": 0.030746300742124522, + "acc_norm": 0.7525252525252525, + "acc_norm_stderr": 0.030746300742124522 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6428571428571429, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.6428571428571429, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5743589743589743, + "acc_stderr": 0.02506909438729652, + "acc_norm": 0.5743589743589743, + "acc_norm_stderr": 0.02506909438729652 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.02786932057166462, + "acc_norm": 0.6, + "acc_norm_stderr": 0.02786932057166462 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.026453508054040353, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.026453508054040353 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5547169811320755, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.5547169811320755, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.0467375233367024, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.0467375233367024 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.029723278961476664, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.029723278961476664 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43915343915343913, + "acc_stderr": 0.02555992055053101, + "acc_norm": 0.43915343915343913, + "acc_norm_stderr": 0.02555992055053101 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6127167630057804, + "acc_stderr": 0.02622615860512465, + "acc_norm": 0.6127167630057804, + "acc_norm_stderr": 0.02622615860512465 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6141975308641975, + "acc_stderr": 0.027085401226132146, + "acc_norm": 0.6141975308641975, + "acc_norm_stderr": 0.027085401226132146 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7409326424870466, + "acc_stderr": 0.03161877917935411, + "acc_norm": 0.7409326424870466, + "acc_norm_stderr": 0.03161877917935411 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.047028804320496165, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.047028804320496165 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6880733944954128, + "acc_stderr": 0.019862967976707245, + "acc_norm": 0.6880733944954128, + "acc_norm_stderr": 0.019862967976707245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.02835895631342355, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.02835895631342355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.039418975265163046, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.039418975265163046 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.02021703065318646, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.02021703065318646 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41843971631205673, + "acc_stderr": 0.02942799403941999, + "acc_norm": 0.41843971631205673, + "acc_norm_stderr": 0.02942799403941999 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.046695106638751926, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.046695106638751926 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5324074074074074, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.5324074074074074, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20335195530726258, + "acc_stderr": 0.013461351487507515, + "acc_norm": 0.20335195530726258, + "acc_norm_stderr": 0.013461351487507515 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.030320243265004137, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.030320243265004137 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5959183673469388, + "acc_stderr": 0.03141470802586589, + "acc_norm": 0.5959183673469388, + "acc_norm_stderr": 0.03141470802586589 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7848101265822784, + "acc_stderr": 0.026750826994676152, + "acc_norm": 0.7848101265822784, + "acc_norm_stderr": 0.026750826994676152 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.40808344198174706, + "acc_stderr": 0.01255259895856367, + "acc_norm": 0.40808344198174706, + "acc_norm_stderr": 0.01255259895856367 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6911764705882353, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.6911764705882353, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3268053855569155, + "mc1_stderr": 0.016419874731135035, + "mc2": 0.4844570721536012, + "mc2_stderr": 0.01544667219327155 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6221959858323495, + "acc_stderr": 0.016669082840694977, + "acc_norm": 0.6753246753246753, + "acc_norm_stderr": 0.016098883939346453 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "megastudy/M-SOLAR-10.7B-v1.3", + "model_sha": "167e074c8939df1fc4cb338a8fae83e7261c8b1c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/megastudyedu/M-SOLAR-10.7B-v1.3-dpo/result_2024-01-31 04:59:35.json b/megastudyedu/M-SOLAR-10.7B-v1.3-dpo/result_2024-01-31 04:59:35.json new file mode 100644 index 0000000000000000000000000000000000000000..7375a87868a5099c66ea468bcbb59d9dbdc74093 --- /dev/null +++ b/megastudyedu/M-SOLAR-10.7B-v1.3-dpo/result_2024-01-31 04:59:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4718430034129693, + "acc_stderr": 0.014588204105102203, + "acc_norm": 0.5196245733788396, + "acc_norm_stderr": 0.014600132075947094 + }, + "harness|ko_hellaswag|10": { + "acc": 0.454690300736905, + "acc_stderr": 0.004969251445596324, + "acc_norm": 0.6229834694284008, + "acc_norm_stderr": 0.004836486437527261 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6491228070175439, + "acc_stderr": 0.036602988340491624, + "acc_norm": 0.6491228070175439, + "acc_norm_stderr": 0.036602988340491624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6628352490421456, + "acc_stderr": 0.016905207420803547, + "acc_norm": 0.6628352490421456, + "acc_norm_stderr": 0.016905207420803547 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368878, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368878 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6077170418006431, + "acc_stderr": 0.02773125864701199, + "acc_norm": 0.6077170418006431, + "acc_norm_stderr": 0.02773125864701199 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6053811659192825, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.6053811659192825, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262973, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262973 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999998, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999998 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.03115626951964685, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.03115626951964685 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105653, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.634453781512605, + "acc_stderr": 0.03128217706368462, + "acc_norm": 0.634453781512605, + "acc_norm_stderr": 0.03128217706368462 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5948717948717949, + "acc_stderr": 0.024890471769938142, + "acc_norm": 0.5948717948717949, + "acc_norm_stderr": 0.024890471769938142 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.02786932057166462, + "acc_norm": 0.6, + "acc_norm_stderr": 0.02786932057166462 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.026853450377009147, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.026853450377009147 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.569811320754717, + "acc_stderr": 0.030471445867183238, + "acc_norm": 0.569811320754717, + "acc_norm_stderr": 0.030471445867183238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630882, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630882 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02568056464005688, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02568056464005688 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5486111111111112, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.5486111111111112, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6213872832369942, + "acc_stderr": 0.02611374936131034, + "acc_norm": 0.6213872832369942, + "acc_norm_stderr": 0.02611374936131034 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6141975308641975, + "acc_stderr": 0.027085401226132146, + "acc_norm": 0.6141975308641975, + "acc_norm_stderr": 0.027085401226132146 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.5, + "acc_stderr": 0.047036043419179864, + "acc_norm": 0.5, + "acc_norm_stderr": 0.047036043419179864 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7009174311926606, + "acc_stderr": 0.019630417285415175, + "acc_norm": 0.7009174311926606, + "acc_norm_stderr": 0.019630417285415175 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.565359477124183, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.565359477124183, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.039418975265163046, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.039418975265163046 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.020226106567657807, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.020226106567657807 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.42907801418439717, + "acc_stderr": 0.02952591430255855, + "acc_norm": 0.42907801418439717, + "acc_norm_stderr": 0.02952591430255855 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5324074074074074, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.5324074074074074, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21564245810055865, + "acc_stderr": 0.013754835975482351, + "acc_norm": 0.21564245810055865, + "acc_norm_stderr": 0.013754835975482351 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5257352941176471, + "acc_stderr": 0.030332578094555033, + "acc_norm": 0.5257352941176471, + "acc_norm_stderr": 0.030332578094555033 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7848101265822784, + "acc_stderr": 0.026750826994676142, + "acc_norm": 0.7848101265822784, + "acc_norm_stderr": 0.026750826994676142 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4198174706649283, + "acc_stderr": 0.01260496081608737, + "acc_norm": 0.4198174706649283, + "acc_norm_stderr": 0.01260496081608737 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7009803921568627, + "acc_stderr": 0.03213325717373618, + "acc_norm": 0.7009803921568627, + "acc_norm_stderr": 0.03213325717373618 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3561811505507956, + "mc1_stderr": 0.016763790728446342, + "mc2": 0.517108292202163, + "mc2_stderr": 0.015615653927534476 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6198347107438017, + "acc_stderr": 0.016689333596980094, + "acc_norm": 0.6458087367178277, + "acc_norm_stderr": 0.01644317574921476 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "megastudyedu/M-SOLAR-10.7B-v1.3-dpo", + "model_sha": "c17bf59878b47379cc9f69081238dd2c01880b07", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/megastudyedu/M-SOLAR-10.7B-v1.4-dpo/result_2024-02-05 11:22:52.json b/megastudyedu/M-SOLAR-10.7B-v1.4-dpo/result_2024-02-05 11:22:52.json new file mode 100644 index 0000000000000000000000000000000000000000..5ace00c266bae0d361d1881cdac18d3817d7bd84 --- /dev/null +++ b/megastudyedu/M-SOLAR-10.7B-v1.4-dpo/result_2024-02-05 11:22:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4974402730375427, + "acc_stderr": 0.014611199329843788, + "acc_norm": 0.5460750853242321, + "acc_norm_stderr": 0.014549221105171858 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4707229635530771, + "acc_stderr": 0.004981220135882329, + "acc_norm": 0.6444931288587931, + "acc_norm_stderr": 0.004776883632722624 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6432748538011696, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.6432748538011696, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.045821241601615506, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.045821241601615506 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6781609195402298, + "acc_stderr": 0.016706381415057904, + "acc_norm": 0.6781609195402298, + "acc_norm_stderr": 0.016706381415057904 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.038913644958358175, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.038913644958358175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6205787781350482, + "acc_stderr": 0.027559949802347817, + "acc_norm": 0.6205787781350482, + "acc_norm_stderr": 0.027559949802347817 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6367713004484304, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.6367713004484304, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.029620227874790458, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.029620227874790458 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.592436974789916, + "acc_stderr": 0.031918633744784666, + "acc_norm": 0.592436974789916, + "acc_norm_stderr": 0.031918633744784666 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6102564102564103, + "acc_stderr": 0.024726967886647085, + "acc_norm": 0.6102564102564103, + "acc_norm_stderr": 0.024726967886647085 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.7129629629629629, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.7129629629629629, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6451612903225806, + "acc_stderr": 0.02721888977330877, + "acc_norm": 0.6451612903225806, + "acc_norm_stderr": 0.02721888977330877 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.025372139671722933, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.025372139671722933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5660377358490566, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.5660377358490566, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473065, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6965174129353234, + "acc_stderr": 0.03251006816458619, + "acc_norm": 0.6965174129353234, + "acc_norm_stderr": 0.03251006816458619 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.03804749744364763, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.03804749744364763 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43386243386243384, + "acc_stderr": 0.025525034382474884, + "acc_norm": 0.43386243386243384, + "acc_norm_stderr": 0.025525034382474884 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5694444444444444, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.5694444444444444, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5953757225433526, + "acc_stderr": 0.02642481659400985, + "acc_norm": 0.5953757225433526, + "acc_norm_stderr": 0.02642481659400985 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.03919415545048411, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.03919415545048411 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6327160493827161, + "acc_stderr": 0.026822801759507894, + "acc_norm": 0.6327160493827161, + "acc_norm_stderr": 0.026822801759507894 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.8031088082901554, + "acc_stderr": 0.028697873971860688, + "acc_norm": 0.8031088082901554, + "acc_norm_stderr": 0.028697873971860688 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.728440366972477, + "acc_stderr": 0.019069098363191452, + "acc_norm": 0.728440366972477, + "acc_norm_stderr": 0.019069098363191452 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6209150326797386, + "acc_stderr": 0.027780141207023337, + "acc_norm": 0.6209150326797386, + "acc_norm_stderr": 0.027780141207023337 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.03984979653302872, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.03984979653302872 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849726, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849726 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.02020665318788479, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.02020665318788479 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.43617021276595747, + "acc_stderr": 0.029583452036284062, + "acc_norm": 0.43617021276595747, + "acc_norm_stderr": 0.029583452036284062 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19217877094972066, + "acc_stderr": 0.013177759505210081, + "acc_norm": 0.19217877094972066, + "acc_norm_stderr": 0.013177759505210081 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5367647058823529, + "acc_stderr": 0.03029061918048569, + "acc_norm": 0.5367647058823529, + "acc_norm_stderr": 0.03029061918048569 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6244897959183674, + "acc_stderr": 0.03100120903989484, + "acc_norm": 0.6244897959183674, + "acc_norm_stderr": 0.03100120903989484 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7890295358649789, + "acc_stderr": 0.02655837250266192, + "acc_norm": 0.7890295358649789, + "acc_norm_stderr": 0.02655837250266192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4322033898305085, + "acc_stderr": 0.01265229777711497, + "acc_norm": 0.4322033898305085, + "acc_norm_stderr": 0.01265229777711497 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7009803921568627, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.7009803921568627, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7212121212121212, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.7212121212121212, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3806609547123623, + "mc1_stderr": 0.01699762787190793, + "mc2": 0.5414640273387645, + "mc2_stderr": 0.01588005279674397 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4946871310507674, + "acc_stderr": 0.017189383627229687, + "acc_norm": 0.5147579693034239, + "acc_norm_stderr": 0.017182864434998557 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "megastudyedu/M-SOLAR-10.7B-v1.4-dpo", + "model_sha": "43b261f146f590a7f76bd99f4275b3ced5eef952", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/megastudyedu/M-SOLAR-10.7B-v1.4/result_2024-02-03 11:42:24.json b/megastudyedu/M-SOLAR-10.7B-v1.4/result_2024-02-03 11:42:24.json new file mode 100644 index 0000000000000000000000000000000000000000..40970d7a3d007feb8588d42c0d12a181fe6f2247 --- /dev/null +++ b/megastudyedu/M-SOLAR-10.7B-v1.4/result_2024-02-03 11:42:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4735494880546075, + "acc_stderr": 0.014590931358120174, + "acc_norm": 0.5162116040955631, + "acc_norm_stderr": 0.014603708567414936 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4598685520812587, + "acc_stderr": 0.004973683026202182, + "acc_norm": 0.6249751045608445, + "acc_norm_stderr": 0.004831399218500231 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6432748538011696, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.6432748538011696, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6796116504854369, + "acc_stderr": 0.04620284082280041, + "acc_norm": 0.6796116504854369, + "acc_norm_stderr": 0.04620284082280041 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6756066411238825, + "acc_stderr": 0.016740929047162716, + "acc_norm": 0.6756066411238825, + "acc_norm_stderr": 0.016740929047162716 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033583, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033583 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6237942122186495, + "acc_stderr": 0.02751392568354943, + "acc_norm": 0.6237942122186495, + "acc_norm_stderr": 0.02751392568354943 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6143497757847534, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.6143497757847534, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7676767676767676, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.7676767676767676, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5840336134453782, + "acc_stderr": 0.0320165010073961, + "acc_norm": 0.5840336134453782, + "acc_norm_stderr": 0.0320165010073961 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.024811920017903832, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.024811920017903832 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.7314814814814815, + "acc_stderr": 0.042844679680521934, + "acc_norm": 0.7314814814814815, + "acc_norm_stderr": 0.042844679680521934 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6290322580645161, + "acc_stderr": 0.02748054188795359, + "acc_norm": 0.6290322580645161, + "acc_norm_stderr": 0.02748054188795359 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.025372139671722933, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.025372139671722933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.030437794342983045, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.030437794342983045 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652459, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652459 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014499, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014499 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43386243386243384, + "acc_stderr": 0.025525034382474887, + "acc_norm": 0.43386243386243384, + "acc_norm_stderr": 0.025525034382474887 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.026261677607806642, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.026261677607806642 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456608, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456608 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6141975308641975, + "acc_stderr": 0.027085401226132143, + "acc_norm": 0.6141975308641975, + "acc_norm_stderr": 0.027085401226132143 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7823834196891192, + "acc_stderr": 0.02977866303775296, + "acc_norm": 0.7823834196891192, + "acc_norm_stderr": 0.02977866303775296 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.708256880733945, + "acc_stderr": 0.01948930096887654, + "acc_norm": 0.708256880733945, + "acc_norm_stderr": 0.01948930096887654 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.02811092849280907, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.02811092849280907 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.03941897526516303, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.03941897526516303 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.618421052631579, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.618421052631579, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5310457516339869, + "acc_stderr": 0.020188804456361894, + "acc_norm": 0.5310457516339869, + "acc_norm_stderr": 0.020188804456361894 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.42907801418439717, + "acc_stderr": 0.029525914302558562, + "acc_norm": 0.42907801418439717, + "acc_norm_stderr": 0.029525914302558562 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.03406315360711507, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.03406315360711507 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.1553072625698324, + "acc_stderr": 0.012113691329725892, + "acc_norm": 0.1553072625698324, + "acc_norm_stderr": 0.012113691329725892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.76, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.76, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5073529411764706, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.5073529411764706, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.03093285879278986, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.03093285879278986 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7848101265822784, + "acc_stderr": 0.026750826994676166, + "acc_norm": 0.7848101265822784, + "acc_norm_stderr": 0.026750826994676166 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4211212516297262, + "acc_stderr": 0.012610325733489905, + "acc_norm": 0.4211212516297262, + "acc_norm_stderr": 0.012610325733489905 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7107843137254902, + "acc_stderr": 0.03182231867647554, + "acc_norm": 0.7107843137254902, + "acc_norm_stderr": 0.03182231867647554 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.03588624800091708, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.03588624800091708 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33414932680538556, + "mc1_stderr": 0.016512530677150517, + "mc2": 0.4949337437466185, + "mc2_stderr": 0.015532881253716376 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.017168187201429246, + "acc_norm": 0.5029515938606848, + "acc_norm_stderr": 0.017190054580194694 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "megastudyedu/M-SOLAR-10.7B-v1.4", + "model_sha": "bc851443b7e452cc8a8712c94672091343fcda43", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/megastudyedu/ME-7B-v1.0/result_2024-01-29 03:21:06.json b/megastudyedu/ME-7B-v1.0/result_2024-01-29 03:21:06.json new file mode 100644 index 0000000000000000000000000000000000000000..0deae7c7ae5de8ae0df1667d46aff8d3f5f9b0bf --- /dev/null +++ b/megastudyedu/ME-7B-v1.0/result_2024-01-29 03:21:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3890784982935154, + "acc_stderr": 0.014247309976045607, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955265 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3980282812188807, + "acc_stderr": 0.004884909544477103, + "acc_norm": 0.530870344552878, + "acc_norm_stderr": 0.004980262025472479 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5376756066411239, + "acc_stderr": 0.017829131764287184, + "acc_norm": 0.5376756066411239, + "acc_norm_stderr": 0.017829131764287184 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.042561937679014075, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.042561937679014075 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.0348890161685273, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.0348890161685273 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077636, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5064516129032258, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.5064516129032258, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924336, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924336 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794914, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794914 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.03983798306659808, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.03983798306659808 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.037940126746970296, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.037940126746970296 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155254, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155254 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5578034682080925, + "acc_stderr": 0.026738603643807403, + "acc_norm": 0.5578034682080925, + "acc_norm_stderr": 0.026738603643807403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958215, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958215 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5394495412844037, + "acc_stderr": 0.021370494609995096, + "acc_norm": 0.5394495412844037, + "acc_norm_stderr": 0.021370494609995096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884124, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.01983517648437539, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.01983517648437539 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.02889395541211589, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.02889395541211589 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.046840993210771065, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.046840993210771065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468648, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468648 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763127, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763127 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6624472573839663, + "acc_stderr": 0.030781549102026205, + "acc_norm": 0.6624472573839663, + "acc_norm_stderr": 0.030781549102026205 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3533246414602347, + "acc_stderr": 0.012208408211082433, + "acc_norm": 0.3533246414602347, + "acc_norm_stderr": 0.012208408211082433 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.015905987048184828, + "mc2": 0.45416518540228107, + "mc2_stderr": 0.015431600629687578 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3577331759149941, + "acc_stderr": 0.016479808935749983, + "acc_norm": 0.4014167650531287, + "acc_norm_stderr": 0.01685290785872906 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "megastudyedu/ME-7B-v1.0", + "model_sha": "7e55e77de6ac6c28dd8c0f1a6c678c5a57a46b14", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/megastudyedu/ME-7B-v1.1/result_2024-01-29 03:20:52.json b/megastudyedu/ME-7B-v1.1/result_2024-01-29 03:20:52.json new file mode 100644 index 0000000000000000000000000000000000000000..914b48490b02fb18e4c63a6b07e932e44d7db399 --- /dev/null +++ b/megastudyedu/ME-7B-v1.1/result_2024-01-29 03:20:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42235494880546076, + "acc_stderr": 0.01443413871337998, + "acc_norm": 0.4718430034129693, + "acc_norm_stderr": 0.0145882041051022 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3996215893248357, + "acc_stderr": 0.004888194985997393, + "acc_norm": 0.5326628161720772, + "acc_norm_stderr": 0.004979123236507975 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5363984674329502, + "acc_stderr": 0.01783252407959326, + "acc_norm": 0.5363984674329502, + "acc_norm_stderr": 0.01783252407959326 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234355, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234355 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.034373055019806184, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.034373055019806184 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5672268907563025, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.5672268907563025, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5051282051282051, + "acc_stderr": 0.025349672906838643, + "acc_norm": 0.5051282051282051, + "acc_norm_stderr": 0.025349672906838643 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4876847290640394, + "acc_stderr": 0.035169204442208966, + "acc_norm": 0.4876847290640394, + "acc_norm_stderr": 0.035169204442208966 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5193548387096775, + "acc_stderr": 0.02842268740431211, + "acc_norm": 0.5193548387096775, + "acc_norm_stderr": 0.02842268740431211 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.029343114798094455, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.029343114798094455 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.02977384701253297, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.02977384701253297 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.026720034380514995, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.026720034380514995 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.039158572914369714, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.039158572914369714 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.02780749004427619, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.02780749004427619 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008585, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008585 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.544954128440367, + "acc_stderr": 0.02135050309092517, + "acc_norm": 0.544954128440367, + "acc_norm_stderr": 0.02135050309092517 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.02862747055055606, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.02862747055055606 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.040633027314866704, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.040633027314866704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.02000791273935936, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.02000791273935936 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21564245810055865, + "acc_stderr": 0.013754835975482353, + "acc_norm": 0.21564245810055865, + "acc_norm_stderr": 0.013754835975482353 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824862, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824862 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.03181425118197786, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.03181425118197786 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35267275097783574, + "acc_stderr": 0.012203286846053886, + "acc_norm": 0.35267275097783574, + "acc_norm_stderr": 0.012203286846053886 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.4619384573080737, + "mc2_stderr": 0.01520929754090529 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38961038961038963, + "acc_stderr": 0.01676616167189351, + "acc_norm": 0.44155844155844154, + "acc_norm_stderr": 0.017072525875563103 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "megastudyedu/ME-7B-v1.1", + "model_sha": "a8d3d109ccce60fa04c1f0508004b1d6a12a4a2e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/megastudyedu/ME-dpo-7B-v1.0/result_2024-01-29 03:15:42.json b/megastudyedu/ME-dpo-7B-v1.0/result_2024-01-29 03:15:42.json new file mode 100644 index 0000000000000000000000000000000000000000..e84bc953a33a8351b3fac01fe204da43dda481ae --- /dev/null +++ b/megastudyedu/ME-dpo-7B-v1.0/result_2024-01-29 03:15:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4197952218430034, + "acc_stderr": 0.014422181226303028, + "acc_norm": 0.46757679180887374, + "acc_norm_stderr": 0.014580637569995416 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4048994224258116, + "acc_stderr": 0.004898693652043318, + "acc_norm": 0.5425214100776737, + "acc_norm_stderr": 0.004971704917267748 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.038110796698335316, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.038110796698335316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5427841634738186, + "acc_stderr": 0.017814385238534455, + "acc_norm": 0.5427841634738186, + "acc_norm_stderr": 0.017814385238534455 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.042561937679014075, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.042561937679014075 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936337, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.0387862677100236, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.0387862677100236 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.034812853382329645, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.034812853382329645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105654, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240627, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240627 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.02849346509102859, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.02849346509102859 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.02479606060269995, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.02479606060269995 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111502, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111502 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5751445086705202, + "acc_stderr": 0.02661335084026174, + "acc_norm": 0.5751445086705202, + "acc_norm_stderr": 0.02661335084026174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583302, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583302 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958215, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958215 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5522935779816514, + "acc_stderr": 0.02131975496242546, + "acc_norm": 0.5522935779816514, + "acc_norm_stderr": 0.02131975496242546 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.0439025926537756, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.0439025926537756 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.019898412717635892, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.019898412717635892 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199502, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199502 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26256983240223464, + "acc_stderr": 0.014716824273017754, + "acc_norm": 0.26256983240223464, + "acc_norm_stderr": 0.014716824273017754 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681404, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681404 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702358, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702358 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452225, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452225 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3353733170134639, + "mc1_stderr": 0.016527534039668987, + "mc2": 0.5050793518460981, + "mc2_stderr": 0.015853347327024214 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.35537190082644626, + "acc_stderr": 0.01645549600031451, + "acc_norm": 0.3837072018890201, + "acc_norm_stderr": 0.01671892463723182 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "megastudyedu/ME-dpo-7B-v1.0", + "model_sha": "9af72e83225f04498b55091d3adcbf4b2e85855d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/megastudyedu/ME-dpo-7B-v1.1/result_2024-01-29 03:20:32.json b/megastudyedu/ME-dpo-7B-v1.1/result_2024-01-29 03:20:32.json new file mode 100644 index 0000000000000000000000000000000000000000..74099da452ee825ea5ddf1123a11e4d1dcd9922f --- /dev/null +++ b/megastudyedu/ME-dpo-7B-v1.1/result_2024-01-29 03:20:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4249146757679181, + "acc_stderr": 0.014445698968520769, + "acc_norm": 0.48293515358361777, + "acc_norm_stderr": 0.014602878388536604 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4062935670185222, + "acc_stderr": 0.004901368629533413, + "acc_norm": 0.5427205735909182, + "acc_norm_stderr": 0.004971534874389942 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5376756066411239, + "acc_stderr": 0.017829131764287177, + "acc_norm": 0.5376756066411239, + "acc_norm_stderr": 0.017829131764287177 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863537, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863537 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6161616161616161, + "acc_stderr": 0.034648816750163375, + "acc_norm": 0.6161616161616161, + "acc_norm_stderr": 0.034648816750163375 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105654, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4948717948717949, + "acc_stderr": 0.025349672906838667, + "acc_norm": 0.4948717948717949, + "acc_norm_stderr": 0.025349672906838667 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4876847290640394, + "acc_stderr": 0.035169204442208966, + "acc_norm": 0.4876847290640394, + "acc_norm_stderr": 0.035169204442208966 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653333, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5471698113207547, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.5471698113207547, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.02925290592725197, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.02925290592725197 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.037940126746970296, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.037940126746970296 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.02507598176760168, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.02507598176760168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5596330275229358, + "acc_stderr": 0.02128431062376155, + "acc_norm": 0.5596330275229358, + "acc_norm_stderr": 0.02128431062376155 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292535, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292535 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.04068590050224971, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.04068590050224971 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.01997742260022747, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.01997742260022747 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639886, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639886 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21340782122905028, + "acc_stderr": 0.01370285993219609, + "acc_norm": 0.21340782122905028, + "acc_norm_stderr": 0.01370285993219609 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6919831223628692, + "acc_stderr": 0.03005238933560569, + "acc_norm": 0.6919831223628692, + "acc_norm_stderr": 0.03005238933560569 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35853976531942633, + "acc_stderr": 0.012248487319682741, + "acc_norm": 0.35853976531942633, + "acc_norm_stderr": 0.012248487319682741 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3378212974296206, + "mc1_stderr": 0.016557167322516896, + "mc2": 0.5037266909788182, + "mc2_stderr": 0.015672155620944272 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4179456906729634, + "acc_stderr": 0.016957292005279713, + "acc_norm": 0.44391971664698937, + "acc_norm_stderr": 0.017081884623542546 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "megastudyedu/ME-dpo-7B-v1.1", + "model_sha": "58da387207a1c1e14dc34438370a112806fd6959", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/melonpower39/unv_v0.1.5/result_2024-01-02 00:19:55.json b/melonpower39/unv_v0.1.5/result_2024-01-02 00:19:55.json new file mode 100644 index 0000000000000000000000000000000000000000..16ed22fa2c141def8935b2d5aeb37d2abcdbfa93 --- /dev/null +++ b/melonpower39/unv_v0.1.5/result_2024-01-02 00:19:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41723549488054607, + "acc_stderr": 0.014409825518403082, + "acc_norm": 0.4598976109215017, + "acc_norm_stderr": 0.014564318856924848 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38478390758812986, + "acc_stderr": 0.004855498343308388, + "acc_norm": 0.48904600677155946, + "acc_norm_stderr": 0.0049885838203099185 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3391812865497076, + "acc_stderr": 0.03631053496488905, + "acc_norm": 0.3391812865497076, + "acc_norm_stderr": 0.03631053496488905 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3300970873786408, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.3300970873786408, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.33077905491698595, + "acc_stderr": 0.016824818462563722, + "acc_norm": 0.33077905491698595, + "acc_norm_stderr": 0.016824818462563722 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.03047297336338005, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.03047297336338005 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.03529486801511115, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.03529486801511115 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.35691318327974275, + "acc_stderr": 0.027210420375934005, + "acc_norm": 0.35691318327974275, + "acc_norm_stderr": 0.027210420375934005 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22137404580152673, + "acc_stderr": 0.0364129708131373, + "acc_norm": 0.22137404580152673, + "acc_norm_stderr": 0.0364129708131373 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.03242497958178815, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.03242497958178815 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617748, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617748 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413926, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.024666744915187222, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.024666744915187222 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.33548387096774196, + "acc_stderr": 0.026860206444724356, + "acc_norm": 0.33548387096774196, + "acc_norm_stderr": 0.026860206444724356 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.03193705726200293, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.03193705726200293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3283018867924528, + "acc_stderr": 0.02890159361241178, + "acc_norm": 0.3283018867924528, + "acc_norm_stderr": 0.02890159361241178 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505416, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505416 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.39800995024875624, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.39800995024875624, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.034140140070440354, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.034140140070440354 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047732, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047732 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.02615219861972679, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.02615219861972679 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899616, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899616 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.026105673861409818, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.026105673861409818 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.03423465100104283, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.03423465100104283 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281338, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281338 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3339449541284404, + "acc_stderr": 0.020220554196736403, + "acc_norm": 0.3339449541284404, + "acc_norm_stderr": 0.020220554196736403 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.026568921015457155, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.026568921015457155 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.03823428969926604, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.03823428969926604 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32516339869281047, + "acc_stderr": 0.018950886770806297, + "acc_norm": 0.32516339869281047, + "acc_norm_stderr": 0.018950886770806297 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340455, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21452513966480447, + "acc_stderr": 0.01372892340782885, + "acc_norm": 0.21452513966480447, + "acc_norm_stderr": 0.01372892340782885 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.02850145286039658, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.02850145286039658 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.46835443037974683, + "acc_stderr": 0.03248197400511075, + "acc_norm": 0.46835443037974683, + "acc_norm_stderr": 0.03248197400511075 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214936, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214936 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.033321399446680854, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.033321399446680854 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.3972994639279278, + "mc2_stderr": 0.01650550802240544 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3482880755608028, + "acc_stderr": 0.016379926739148048, + "acc_norm": 0.41204250295159384, + "acc_norm_stderr": 0.016922276738528357 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "melonpower39/unv_v0.1.5", + "model_sha": "9eec66ee38d324e3617bfb0e14f03717481482e4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/meraGPT/mera-mix-4x7B/result_2024-06-10 09:35:05.json b/meraGPT/mera-mix-4x7B/result_2024-06-10 09:35:05.json new file mode 100644 index 0000000000000000000000000000000000000000..afc4d42722bdd3e218cacbd2e2f6a4a0100eae14 --- /dev/null +++ b/meraGPT/mera-mix-4x7B/result_2024-06-10 09:35:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39078498293515357, + "acc_stderr": 0.014258563880513778, + "acc_norm": 0.447098976109215, + "acc_norm_stderr": 0.014529380160526845 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39623580959968135, + "acc_stderr": 0.004881148866874186, + "acc_norm": 0.5267874925313683, + "acc_norm_stderr": 0.0049826152330571085 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45721583652618136, + "acc_stderr": 0.017814385238534427, + "acc_norm": 0.45721583652618136, + "acc_norm_stderr": 0.017814385238534427 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029319, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029319 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.02812534098397271, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.02812534098397271 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.032473902765696686, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.032473902765696686 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017827, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017827 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524572, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524572 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404948, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404948 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.02501074911613759, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.02501074911613759 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5082568807339449, + "acc_stderr": 0.021434399918214334, + "acc_norm": 0.5082568807339449, + "acc_norm_stderr": 0.021434399918214334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.044359328928514664, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.044359328928514664 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.028452639985088006, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.028452639985088006 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.019675808135281515, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.019675808135281515 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.028838921471251458, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.028838921471251458 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28156424581005585, + "acc_stderr": 0.015042290171866108, + "acc_norm": 0.28156424581005585, + "acc_norm_stderr": 0.015042290171866108 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131775, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131775 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.01218777337074152, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.01218777337074152 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.41982864137086906, + "mc1_stderr": 0.017277030301775766, + "mc2": 0.5928924697140163, + "mc2_stderr": 0.016240860757211272 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41912632821723733, + "acc_stderr": 0.016963995010862792, + "acc_norm": 0.42266824085005905, + "acc_norm_stderr": 0.016983506079577604 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "meraGPT/mera-mix-4x7B", + "model_sha": "09d965c5ef9b66ce419986027e03a915cb869e43", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/metterian/gemma-pro-ko-10b/result_2024-04-19 07:13:48.json b/metterian/gemma-pro-ko-10b/result_2024-04-19 07:13:48.json new file mode 100644 index 0000000000000000000000000000000000000000..4180e983af0863f7ca8ccb92ca18417faffadb63 --- /dev/null +++ b/metterian/gemma-pro-ko-10b/result_2024-04-19 07:13:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40102389078498296, + "acc_stderr": 0.014322255790719867, + "acc_norm": 0.4872013651877133, + "acc_norm_stderr": 0.014606603181012541 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4213304122684724, + "acc_stderr": 0.0049276318064775644, + "acc_norm": 0.5700059749053973, + "acc_norm_stderr": 0.004940631135803534 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036155076303109344, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036155076303109344 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.04689765937278132, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.04689765937278132 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6194125159642401, + "acc_stderr": 0.017362564126075425, + "acc_norm": 0.6194125159642401, + "acc_norm_stderr": 0.017362564126075425 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5106382978723404, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.5106382978723404, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.594855305466238, + "acc_stderr": 0.02788238379132596, + "acc_norm": 0.594855305466238, + "acc_norm_stderr": 0.02788238379132596 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6868686868686869, + "acc_stderr": 0.03304205087813652, + "acc_norm": 0.6868686868686869, + "acc_norm_stderr": 0.03304205087813652 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.03246816765752175, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.03246816765752175 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5025641025641026, + "acc_stderr": 0.025350672979412184, + "acc_norm": 0.5025641025641026, + "acc_norm_stderr": 0.025350672979412184 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5774193548387097, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.5774193548387097, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809446, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809446 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5433962264150943, + "acc_stderr": 0.030656748696739435, + "acc_norm": 0.5433962264150943, + "acc_norm_stderr": 0.030656748696739435 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228416, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228416 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.03812400565974833, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.03812400565974833 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.041227287076512825, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.041227287076512825 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5493827160493827, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.5493827160493827, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6678899082568808, + "acc_stderr": 0.020192682985423344, + "acc_norm": 0.6678899082568808, + "acc_norm_stderr": 0.020192682985423344 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.020206653187884786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.020206653187884786 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347237, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347237 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160834, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160834 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225608, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.02873932851398358, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.02873932851398358 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6624472573839663, + "acc_stderr": 0.030781549102026212, + "acc_norm": 0.6624472573839663, + "acc_norm_stderr": 0.030781549102026212 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585899, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585899 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.03434131164719129, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.03434131164719129 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380027, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380027 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.39313261890547413, + "mc2_stderr": 0.014791728967704983 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5619834710743802, + "acc_stderr": 0.01705775370216029, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.01692948023449523 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "metterian/gemma-pro-ko-10b", + "model_sha": "ec650ec3ce388d27ea585565ceb015bd0d674bb0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/metterian/llama-2-7b-pt/result_2023-11-24 06:52:27.json b/metterian/llama-2-7b-pt/result_2023-11-24 06:52:27.json new file mode 100644 index 0000000000000000000000000000000000000000..218a6624b47f5f075e4deee0b5594e455ffa5799 --- /dev/null +++ b/metterian/llama-2-7b-pt/result_2023-11-24 06:52:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2022184300341297, + "acc_stderr": 0.011737454431872105, + "acc_norm": 0.2354948805460751, + "acc_norm_stderr": 0.012399451855004753 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25144393547102173, + "acc_stderr": 0.0043295650165273165, + "acc_norm": 0.2604062935670185, + "acc_norm_stderr": 0.004379594059141041 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.03528211258245232, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.03528211258245232 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.015818450894777562, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.015818450894777562 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2, + "acc_stderr": 0.03455473702325438, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03455473702325438 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.026754391348039776, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.026754391348039776 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2469879518072289, + "acc_stderr": 0.03357351982064536, + "acc_norm": 0.2469879518072289, + "acc_norm_stderr": 0.03357351982064536 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2508038585209003, + "acc_stderr": 0.024619771956697168, + "acc_norm": 0.2508038585209003, + "acc_norm_stderr": 0.024619771956697168 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.02944249558585747, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.02944249558585747 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.037276735755969195, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.037276735755969195 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.18686868686868688, + "acc_stderr": 0.027772533334218967, + "acc_norm": 0.18686868686868688, + "acc_norm_stderr": 0.027772533334218967 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03724563619774632, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03724563619774632 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.18907563025210083, + "acc_stderr": 0.02543511943810535, + "acc_norm": 0.18907563025210083, + "acc_norm_stderr": 0.02543511943810535 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2153846153846154, + "acc_stderr": 0.020843034557462874, + "acc_norm": 0.2153846153846154, + "acc_norm_stderr": 0.020843034557462874 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.04453197507374983, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.04453197507374983 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.02850137816789395, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.02850137816789395 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1967741935483871, + "acc_stderr": 0.022616409420742025, + "acc_norm": 0.1967741935483871, + "acc_norm_stderr": 0.022616409420742025 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721375, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721375 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276613, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276613 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.03336767086567977, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.03336767086567977 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1791907514450867, + "acc_stderr": 0.029242513059063287, + "acc_norm": 0.1791907514450867, + "acc_norm_stderr": 0.029242513059063287 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708617, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708617 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.02344582627654554, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.02344582627654554 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.20245398773006135, + "acc_stderr": 0.03157065078911901, + "acc_norm": 0.20245398773006135, + "acc_norm_stderr": 0.03157065078911901 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24691358024691357, + "acc_stderr": 0.02399350170904211, + "acc_norm": 0.24691358024691357, + "acc_norm_stderr": 0.02399350170904211 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.18652849740932642, + "acc_stderr": 0.02811209121011746, + "acc_norm": 0.18652849740932642, + "acc_norm_stderr": 0.02811209121011746 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1944954128440367, + "acc_stderr": 0.016970289090458043, + "acc_norm": 0.1944954128440367, + "acc_norm_stderr": 0.016970289090458043 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.02355083135199509, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.02355083135199509 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.040261875275912046, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.040261875275912046 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.031103182383123377, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.031103182383123377 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24673202614379086, + "acc_stderr": 0.0174408203674025, + "acc_norm": 0.24673202614379086, + "acc_norm_stderr": 0.0174408203674025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.02646903681859062, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.02646903681859062 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.02453632602613424, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.02453632602613424 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21899441340782122, + "acc_stderr": 0.013831676687303198, + "acc_norm": 0.21899441340782122, + "acc_norm_stderr": 0.013831676687303198 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768081, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768081 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19852941176470587, + "acc_stderr": 0.0242310133705411, + "acc_norm": 0.19852941176470587, + "acc_norm_stderr": 0.0242310133705411 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.02412746346265015, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.02412746346265015 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2489451476793249, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.2489451476793249, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23728813559322035, + "acc_stderr": 0.010865436690780262, + "acc_norm": 0.23728813559322035, + "acc_norm_stderr": 0.010865436690780262 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.029331162294251728, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.029331162294251728 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.20606060606060606, + "acc_stderr": 0.03158415324047711, + "acc_norm": 0.20606060606060606, + "acc_norm_stderr": 0.03158415324047711 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22399020807833536, + "mc1_stderr": 0.014594964329474205, + "mc2": 0.4652172706232095, + "mc2_stderr": 0.016830514037368852 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.07792207792207792, + "acc_stderr": 0.009215711972304702, + "acc_norm": 0.2538370720188902, + "acc_norm_stderr": 0.014962672739769986 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "metterian/llama-2-7b-pt", + "model_sha": "92ac9b01be1d6c949d56eb45b4a25e0103d4d31e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/metterian/llama-2-ko-7b-pt/result_2023-11-25 16:20:36.json b/metterian/llama-2-ko-7b-pt/result_2023-11-25 16:20:36.json new file mode 100644 index 0000000000000000000000000000000000000000..9745efa26acd77620375a921d8067d393ec4396e --- /dev/null +++ b/metterian/llama-2-ko-7b-pt/result_2023-11-25 16:20:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25426621160409557, + "acc_stderr": 0.012724999945157736, + "acc_norm": 0.3122866894197952, + "acc_norm_stderr": 0.013542598541688065 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3370842461661024, + "acc_stderr": 0.004717478335689621, + "acc_norm": 0.41565425214100776, + "acc_norm_stderr": 0.004918272352137549 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03188578017686398, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03188578017686398 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.0376017800602662, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.0376017800602662 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2848020434227331, + "acc_stderr": 0.016139174096522574, + "acc_norm": 0.2848020434227331, + "acc_norm_stderr": 0.016139174096522574 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.030472973363380056, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.030472973363380056 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2289156626506024, + "acc_stderr": 0.03270745277352477, + "acc_norm": 0.2289156626506024, + "acc_norm_stderr": 0.03270745277352477 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.02558306248998483, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.02558306248998483 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289202, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289202 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.19083969465648856, + "acc_stderr": 0.03446513350752599, + "acc_norm": 0.19083969465648856, + "acc_norm_stderr": 0.03446513350752599 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2, + "acc_stderr": 0.0333333333333333, + "acc_norm": 0.2, + "acc_norm_stderr": 0.0333333333333333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.030388353551886838, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.030388353551886838 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3435897435897436, + "acc_stderr": 0.02407869658063548, + "acc_norm": 0.3435897435897436, + "acc_norm_stderr": 0.02407869658063548 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.02598850079241188, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.02598850079241188 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33584905660377357, + "acc_stderr": 0.029067220146644826, + "acc_norm": 0.33584905660377357, + "acc_norm_stderr": 0.029067220146644826 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878284, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878284 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.03096590312357303, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.03096590312357303 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918417, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2630057803468208, + "acc_stderr": 0.023703099525258176, + "acc_norm": 0.2630057803468208, + "acc_norm_stderr": 0.023703099525258176 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.033220157957767414, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.033220157957767414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02438366553103545, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02438366553103545 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32642487046632124, + "acc_stderr": 0.033840286211432945, + "acc_norm": 0.32642487046632124, + "acc_norm_stderr": 0.033840286211432945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26055045871559634, + "acc_stderr": 0.01881918203485007, + "acc_norm": 0.26055045871559634, + "acc_norm_stderr": 0.01881918203485007 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.02367908986180772, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.02367908986180772 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3305785123966942, + "acc_stderr": 0.042943408452120926, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.042943408452120926 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.01703522925803403, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.01703522925803403 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3877551020408163, + "acc_stderr": 0.03119223072679566, + "acc_norm": 0.3877551020408163, + "acc_norm_stderr": 0.03119223072679566 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23728813559322035, + "acc_stderr": 0.01086543669078026, + "acc_norm": 0.23728813559322035, + "acc_norm_stderr": 0.01086543669078026 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2107843137254902, + "acc_stderr": 0.02862654791243739, + "acc_norm": 0.2107843137254902, + "acc_norm_stderr": 0.02862654791243739 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.03256866661681102, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.03256866661681102 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715008, + "mc2": 0.40455212863354156, + "mc2_stderr": 0.015092027959978592 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30342384887839435, + "acc_stderr": 0.015806072717909576, + "acc_norm": 0.3730814639905549, + "acc_norm_stderr": 0.016627318275137425 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "metterian/llama-2-ko-7b-pt", + "model_sha": "4fc8a514d966d245f4f4f0e8ffca374aadb2b069", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/metterian/llama-pro-ko-8b/result_2024-02-15 12:16:11.json b/metterian/llama-pro-ko-8b/result_2024-02-15 12:16:11.json new file mode 100644 index 0000000000000000000000000000000000000000..e7bf2cfba675a18c112b9f9d0efc10a6e3580ba5 --- /dev/null +++ b/metterian/llama-pro-ko-8b/result_2024-02-15 12:16:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3430034129692833, + "acc_stderr": 0.013872423223718169, + "acc_norm": 0.40187713310580203, + "acc_norm_stderr": 0.014327268614578276 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3836885082652858, + "acc_stderr": 0.004852896681736761, + "acc_norm": 0.5125473013343955, + "acc_norm_stderr": 0.00498821003383201 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.33980582524271846, + "acc_stderr": 0.04689765937278135, + "acc_norm": 0.33980582524271846, + "acc_norm_stderr": 0.04689765937278135 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4329501915708812, + "acc_stderr": 0.017718469101513982, + "acc_norm": 0.4329501915708812, + "acc_norm_stderr": 0.017718469101513982 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596241, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596241 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42443729903536975, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.42443729903536975, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3632286995515695, + "acc_stderr": 0.032277904428505, + "acc_norm": 0.3632286995515695, + "acc_norm_stderr": 0.032277904428505 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.04039314978724561, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.04039314978724561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.040287315329475604, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.040287315329475604 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931673, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931673 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.02432173848460237, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.02432173848460237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36774193548387096, + "acc_stderr": 0.027430866579973474, + "acc_norm": 0.36774193548387096, + "acc_norm_stderr": 0.027430866579973474 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.032745319388423504, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.032745319388423504 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.04653429807913509, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.04653429807913509 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.46766169154228854, + "acc_stderr": 0.035281314729336065, + "acc_norm": 0.46766169154228854, + "acc_norm_stderr": 0.035281314729336065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.035149425512674394, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.035149425512674394 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.022717467897708607, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.022717467897708607 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3786127167630058, + "acc_stderr": 0.026113749361310345, + "acc_norm": 0.3786127167630058, + "acc_norm_stderr": 0.026113749361310345 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005138, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005138 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579858, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579858 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4091743119266055, + "acc_stderr": 0.02108067026443373, + "acc_norm": 0.4091743119266055, + "acc_norm_stderr": 0.02108067026443373 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238106, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238106 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.028110928492809068, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.028110928492809068 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849725, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.018635594034423972, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.018635594034423972 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022125, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022125 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.033851779760448106, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.033851779760448106 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.01446589382985994, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.01446589382985994 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596452, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596452 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4530612244897959, + "acc_stderr": 0.03186785930004129, + "acc_norm": 0.4530612244897959, + "acc_norm_stderr": 0.03186785930004129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4219409282700422, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.4219409282700422, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28292046936114734, + "acc_stderr": 0.011503891323188976, + "acc_norm": 0.28292046936114734, + "acc_norm_stderr": 0.011503891323188976 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.031660096793998116, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.031660096793998116 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4121212121212121, + "acc_stderr": 0.03843566993588718, + "acc_norm": 0.4121212121212121, + "acc_norm_stderr": 0.03843566993588718 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485081, + "mc2": 0.4023975248087275, + "mc2_stderr": 0.014765248034231975 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32585596221959856, + "acc_stderr": 0.016114023894800326, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.017057753702160287 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "metterian/llama-pro-ko-8b", + "model_sha": "6aad915c98898c9eb34d965085d07f3186e62631", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/metterian/polyglot-ko-kullm-v2-fix/result_2023-11-03 06:21:31.json b/metterian/polyglot-ko-kullm-v2-fix/result_2023-11-03 06:21:31.json new file mode 100644 index 0000000000000000000000000000000000000000..89fc54d75bdf01dfceda185f417f116baa2eb271 --- /dev/null +++ b/metterian/polyglot-ko-kullm-v2-fix/result_2023-11-03 06:21:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31313993174061433, + "acc_stderr": 0.013552671543623504, + "acc_norm": 0.34982935153583616, + "acc_norm_stderr": 0.013936809212158284 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38697470623381797, + "acc_stderr": 0.004860623733461129, + "acc_norm": 0.49571798446524595, + "acc_norm_stderr": 0.004989598426249536 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.36257309941520466, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.36257309941520466, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.04453254836326467, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.04453254836326467 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21711366538952745, + "acc_stderr": 0.014743125394823291, + "acc_norm": 0.21711366538952745, + "acc_norm_stderr": 0.014743125394823291 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.03047297336338005, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.03047297336338005 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.0357160923005348, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.0357160923005348 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2508038585209003, + "acc_stderr": 0.02461977195669716, + "acc_norm": 0.2508038585209003, + "acc_norm_stderr": 0.02461977195669716 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.029442495585857473, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.029442495585857473 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.029620227874790482, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.029620227874790482 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23949579831932774, + "acc_stderr": 0.027722065493361273, + "acc_norm": 0.23949579831932774, + "acc_norm_stderr": 0.027722065493361273 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.02136202772522272, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.02136202772522272 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.18719211822660098, + "acc_stderr": 0.027444924966882618, + "acc_norm": 0.18719211822660098, + "acc_norm_stderr": 0.027444924966882618 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.02489246917246283, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.02489246917246283 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.02948036054954119, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.02948036054954119 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.02700876609070809, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.02700876609070809 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.038950910157241364, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.038950910157241364 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.02504044387700069, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.02504044387700069 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389986, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389986 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.029705284056772436, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.029705284056772436 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641143, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641143 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.02084229093011466, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.02084229093011466 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322716, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322716 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.25722543352601157, + "acc_stderr": 0.023532925431044287, + "acc_norm": 0.25722543352601157, + "acc_norm_stderr": 0.023532925431044287 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.025089478523765127, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.025089478523765127 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19170984455958548, + "acc_stderr": 0.028408953626245282, + "acc_norm": 0.19170984455958548, + "acc_norm_stderr": 0.028408953626245282 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25137614678899084, + "acc_stderr": 0.018599206360287415, + "acc_norm": 0.25137614678899084, + "acc_norm_stderr": 0.018599206360287415 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2679738562091503, + "acc_stderr": 0.025360603796242557, + "acc_norm": 0.2679738562091503, + "acc_norm_stderr": 0.025360603796242557 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.03984979653302872, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.03984979653302872 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882924, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.0180540274588152, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.0180540274588152 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432403, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432403 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103987, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103987 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483924, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.21224489795918366, + "acc_stderr": 0.026176967197866764, + "acc_norm": 0.21224489795918366, + "acc_norm_stderr": 0.026176967197866764 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.23628691983122363, + "acc_stderr": 0.02765215314415926, + "acc_norm": 0.23628691983122363, + "acc_norm_stderr": 0.02765215314415926 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25358539765319427, + "acc_stderr": 0.011111715336101136, + "acc_norm": 0.25358539765319427, + "acc_norm_stderr": 0.011111715336101136 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.035014387062967806, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.035014387062967806 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731606, + "mc2": 0.39703809921502775, + "mc2_stderr": 0.01577852528205582 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32231404958677684, + "acc_stderr": 0.016068253615813967, + "acc_norm": 0.3612750885478158, + "acc_norm_stderr": 0.016515463022412 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "metterian/polyglot-ko-kullm-v2-fix", + "model_sha": "cd7387406ec0e3262d718583f5c204ec6493f5e5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/microsoft/Orca-2-7b/result_2023-11-21 08:49:11.json b/microsoft/Orca-2-7b/result_2023-11-21 08:49:11.json new file mode 100644 index 0000000000000000000000000000000000000000..a023ce26d64a78b7c4d222f006887d0dbf509bad --- /dev/null +++ b/microsoft/Orca-2-7b/result_2023-11-21 08:49:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29692832764505117, + "acc_stderr": 0.013352025976725225, + "acc_norm": 0.33361774744027306, + "acc_norm_stderr": 0.013778687054176536 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3337980481975702, + "acc_stderr": 0.004706048116764943, + "acc_norm": 0.398725353515236, + "acc_norm_stderr": 0.004886353563571845 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273482, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273482 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41890166028097064, + "acc_stderr": 0.01764320505237717, + "acc_norm": 0.41890166028097064, + "acc_norm_stderr": 0.01764320505237717 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.38263665594855306, + "acc_stderr": 0.027604689028581993, + "acc_norm": 0.38263665594855306, + "acc_norm_stderr": 0.027604689028581993 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.032443052830087304, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.032443052830087304 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.040287315329475604, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.040287315329475604 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.024756000382130945, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.024756000382130945 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3709677419354839, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.3709677419354839, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051621, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051621 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.0302422338008545, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.0302422338008545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547307, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547307 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463084, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463084 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.026636539741116076, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.026636539741116076 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.038258255488486076, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.038258255488486076 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4012345679012346, + "acc_stderr": 0.027272582849839792, + "acc_norm": 0.4012345679012346, + "acc_norm_stderr": 0.027272582849839792 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3779816513761468, + "acc_stderr": 0.020789187066728113, + "acc_norm": 0.3779816513761468, + "acc_norm_stderr": 0.020789187066728113 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.027780141207023327, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.027780141207023327 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.018718067052623223, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.018718067052623223 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.040598672469526864, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.040598672469526864 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.030225226160012404, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.030225226160012404 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.014551553659369922, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.014551553659369922 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29044117647058826, + "acc_stderr": 0.02757646862274051, + "acc_norm": 0.29044117647058826, + "acc_norm_stderr": 0.02757646862274051 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28748370273794005, + "acc_stderr": 0.011559337355708507, + "acc_norm": 0.28748370273794005, + "acc_norm_stderr": 0.011559337355708507 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.45223449862482046, + "mc2_stderr": 0.016160782909726883 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3187721369539551, + "acc_stderr": 0.016021427055309578, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.016366945603281276 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "microsoft/Orca-2-7b", + "model_sha": "07bbfb8d4e051dbef7a30e5b2cb3816260d917d3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mightbe/EEVE-10.8B-Multiturn/result_2024-03-20 06:20:02.json b/mightbe/EEVE-10.8B-Multiturn/result_2024-03-20 06:20:02.json new file mode 100644 index 0000000000000000000000000000000000000000..a505f2d5c48dee8f46ea41ce31acfebe1a9c1d85 --- /dev/null +++ b/mightbe/EEVE-10.8B-Multiturn/result_2024-03-20 06:20:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.47440273037542663, + "acc_stderr": 0.014592230885298964, + "acc_norm": 0.5392491467576792, + "acc_norm_stderr": 0.014566303676636584 + }, + "harness|ko_hellaswag|10": { + "acc": 0.47102170882294364, + "acc_stderr": 0.004981394110706142, + "acc_norm": 0.6325433180641307, + "acc_norm_stderr": 0.00481126997545061 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7017543859649122, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.7017543859649122, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7184466019417476, + "acc_stderr": 0.04453254836326469, + "acc_norm": 0.7184466019417476, + "acc_norm_stderr": 0.04453254836326469 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.722860791826309, + "acc_stderr": 0.01600563629412243, + "acc_norm": 0.722860791826309, + "acc_norm_stderr": 0.01600563629412243 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5276595744680851, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.5276595744680851, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6366559485530546, + "acc_stderr": 0.02731684767419271, + "acc_norm": 0.6366559485530546, + "acc_norm_stderr": 0.02731684767419271 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.03191178226713546, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.03191178226713546 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.03149930577784906, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.03149930577784906 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5358974358974359, + "acc_stderr": 0.025285585990017862, + "acc_norm": 0.5358974358974359, + "acc_norm_stderr": 0.025285585990017862 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6709677419354839, + "acc_stderr": 0.026729499068349954, + "acc_norm": 0.6709677419354839, + "acc_norm_stderr": 0.026729499068349954 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7991452991452992, + "acc_stderr": 0.02624677294689047, + "acc_norm": 0.7991452991452992, + "acc_norm_stderr": 0.02624677294689047 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6, + "acc_stderr": 0.03015113445777629, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03015113445777629 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.746268656716418, + "acc_stderr": 0.03076944496729601, + "acc_norm": 0.746268656716418, + "acc_norm_stderr": 0.03076944496729601 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851116, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851116 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5347222222222222, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.5347222222222222, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6011560693641619, + "acc_stderr": 0.026362437574546545, + "acc_norm": 0.6011560693641619, + "acc_norm_stderr": 0.026362437574546545 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.027125115513166854, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.027125115513166854 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7409326424870466, + "acc_stderr": 0.03161877917935411, + "acc_norm": 0.7409326424870466, + "acc_norm_stderr": 0.03161877917935411 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04644602091222316, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04644602091222316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.728440366972477, + "acc_stderr": 0.019069098363191452, + "acc_norm": 0.728440366972477, + "acc_norm_stderr": 0.019069098363191452 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884123, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884123 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.545751633986928, + "acc_stderr": 0.020142974553795205, + "acc_norm": 0.545751633986928, + "acc_norm_stderr": 0.020142974553795205 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125146, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.03385177976044809, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.03385177976044809 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.15977653631284916, + "acc_stderr": 0.01225420508370349, + "acc_norm": 0.15977653631284916, + "acc_norm_stderr": 0.01225420508370349 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.7, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.49264705882352944, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.49264705882352944, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6408163265306123, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.6408163265306123, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7721518987341772, + "acc_stderr": 0.02730348459906942, + "acc_norm": 0.7721518987341772, + "acc_norm_stderr": 0.02730348459906942 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.42633637548891784, + "acc_stderr": 0.012630884771599689, + "acc_norm": 0.42633637548891784, + "acc_norm_stderr": 0.012630884771599689 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7352941176470589, + "acc_stderr": 0.03096451792692339, + "acc_norm": 0.7352941176470589, + "acc_norm_stderr": 0.03096451792692339 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7212121212121212, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.7212121212121212, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326912, + "mc2": 0.42249922636736603, + "mc2_stderr": 0.015294443485863257 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5844155844155844, + "acc_stderr": 0.016943586313076568, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.0168194386429714 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mightbe/EEVE-10.8B-Multiturn", + "model_sha": "2084ff316d4a93095e59a3aeb1362a86fc08ca3d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/migtissera/Llama-3-8B-Synthia-v3.5/result_2024-06-29 15:16:49.json b/migtissera/Llama-3-8B-Synthia-v3.5/result_2024-06-29 15:16:49.json new file mode 100644 index 0000000000000000000000000000000000000000..3de24c9bdc942c8a5d2453949fb053d08f3ad5b4 --- /dev/null +++ b/migtissera/Llama-3-8B-Synthia-v3.5/result_2024-06-29 15:16:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39419795221843, + "acc_stderr": 0.014280522667467325, + "acc_norm": 0.44880546075085326, + "acc_norm_stderr": 0.01453459958509767 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38388767177853017, + "acc_stderr": 0.0048533716462392466, + "acc_norm": 0.5210117506472814, + "acc_norm_stderr": 0.004985373550775109 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6491228070175439, + "acc_stderr": 0.03660298834049162, + "acc_norm": 0.6491228070175439, + "acc_norm_stderr": 0.03660298834049162 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5568326947637292, + "acc_stderr": 0.017764085035348414, + "acc_norm": 0.5568326947637292, + "acc_norm_stderr": 0.017764085035348414 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467383, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467383 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.02839442137098453, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.02839442137098453 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956909, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956909 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.03526552724601198, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.03526552724601198 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.025334667080954897, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.025334667080954897 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103871, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103871 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5064516129032258, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.5064516129032258, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851302, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851302 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113114, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.038047497443647646, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.038047497443647646 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159788, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159788 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.02677299065336183, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.02677299065336183 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5339506172839507, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.5339506172839507, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5853211009174312, + "acc_stderr": 0.02112290320860259, + "acc_norm": 0.5853211009174312, + "acc_norm_stderr": 0.02112290320860259 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.028452639985088006, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.028452639985088006 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.02000791273935936, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.02000791273935936 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650147, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053757, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053757 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31508379888268156, + "acc_stderr": 0.01553685085247363, + "acc_norm": 0.31508379888268156, + "acc_norm_stderr": 0.01553685085247363 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.028888193103988637, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.028888193103988637 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6, + "acc_stderr": 0.03136250240935894, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03136250240935894 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744858, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36766623207301175, + "acc_stderr": 0.012314845910071705, + "acc_norm": 0.36766623207301175, + "acc_norm_stderr": 0.012314845910071705 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.034711579079534274, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.034711579079534274 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3378212974296206, + "mc1_stderr": 0.01655716732251689, + "mc2": 0.5104261223849632, + "mc2_stderr": 0.015408672449611426 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5336481700118064, + "acc_stderr": 0.017151384117131865, + "acc_norm": 0.5761511216056671, + "acc_norm_stderr": 0.01698981083462826 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "migtissera/Llama-3-8B-Synthia-v3.5", + "model_sha": "af4990801a24fee7acf16370cb5aa5643b5e9d6c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/migtissera/Tess-2.0-Llama-3-8B/result_2024-06-29 15:17:09.json b/migtissera/Tess-2.0-Llama-3-8B/result_2024-06-29 15:17:09.json new file mode 100644 index 0000000000000000000000000000000000000000..9ef12d3377b2c8e26a1aa2dbf9237d391ee2647d --- /dev/null +++ b/migtissera/Tess-2.0-Llama-3-8B/result_2024-06-29 15:17:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38993174061433444, + "acc_stderr": 0.014252959848892893, + "acc_norm": 0.44795221843003413, + "acc_norm_stderr": 0.01453201149821167 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39494124676359293, + "acc_stderr": 0.004878390226591717, + "acc_norm": 0.534654451304521, + "acc_norm_stderr": 0.004977782217582459 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.0368713061556206, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.0368713061556206 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.045821241601615506, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.045821241601615506 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.01777922523339422, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.01777922523339422 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.042185062153688786, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.042185062153688786 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5530546623794212, + "acc_stderr": 0.02823776942208533, + "acc_norm": 0.5530546623794212, + "acc_norm_stderr": 0.02823776942208533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.034373055019806184, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.034373055019806184 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.03225294232399639, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.03225294232399639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5358974358974359, + "acc_stderr": 0.025285585990017855, + "acc_norm": 0.5358974358974359, + "acc_norm_stderr": 0.025285585990017855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.034711928605184676, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.034711928605184676 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5419354838709678, + "acc_stderr": 0.028343787250540618, + "acc_norm": 0.5419354838709678, + "acc_norm_stderr": 0.028343787250540618 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.02860595370200424, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.02860595370200424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075657, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075657 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.038047497443647646, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.038047497443647646 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3941798941798942, + "acc_stderr": 0.02516798233389414, + "acc_norm": 0.3941798941798942, + "acc_norm_stderr": 0.02516798233389414 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.02678881193156276, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.02678881193156276 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5401234567901234, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.5401234567901234, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6217616580310881, + "acc_stderr": 0.03499807276193338, + "acc_norm": 0.6217616580310881, + "acc_norm_stderr": 0.03499807276193338 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5944954128440367, + "acc_stderr": 0.021050997991896834, + "acc_norm": 0.5944954128440367, + "acc_norm_stderr": 0.021050997991896834 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5620915032679739, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.5620915032679739, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.44281045751633985, + "acc_stderr": 0.020095083154577354, + "acc_norm": 0.44281045751633985, + "acc_norm_stderr": 0.020095083154577354 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639882, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639882 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.38324022346368714, + "acc_stderr": 0.016260159604429125, + "acc_norm": 0.38324022346368714, + "acc_norm_stderr": 0.016260159604429125 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877746, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6530612244897959, + "acc_stderr": 0.030472526026726496, + "acc_norm": 0.6530612244897959, + "acc_norm_stderr": 0.030472526026726496 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030685820596610805, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030685820596610805 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37027379400260757, + "acc_stderr": 0.012332930781256728, + "acc_norm": 0.37027379400260757, + "acc_norm_stderr": 0.012332930781256728 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.03465868196380762, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.03465868196380762 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.03713158067481913, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.03713158067481913 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768545, + "mc2": 0.46594904671568743, + "mc2_stderr": 0.015260599177281804 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5194805194805194, + "acc_stderr": 0.017177301992342547, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.01687694116504561 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "migtissera/Tess-2.0-Llama-3-8B", + "model_sha": "9a577d7666eb90c13752ada950a0b5f91d3749f1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/milkyyy/llama-2-ko-kullm/result_2024-02-21 05:59:41.json b/milkyyy/llama-2-ko-kullm/result_2024-02-21 05:59:41.json new file mode 100644 index 0000000000000000000000000000000000000000..3fb5a3055e001e56c1da4b7e6b98574806f18cba --- /dev/null +++ b/milkyyy/llama-2-ko-kullm/result_2024-02-21 05:59:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32764505119453924, + "acc_stderr": 0.013715847940719344, + "acc_norm": 0.3822525597269625, + "acc_norm_stderr": 0.014200454049979277 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38856801433977295, + "acc_stderr": 0.004864286176731827, + "acc_norm": 0.5068711412069309, + "acc_norm_stderr": 0.004989310228276113 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457922, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457922 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3652618135376756, + "acc_stderr": 0.01721853002883864, + "acc_norm": 0.3652618135376756, + "acc_norm_stderr": 0.01721853002883864 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3086816720257235, + "acc_stderr": 0.02623696588115326, + "acc_norm": 0.3086816720257235, + "acc_norm_stderr": 0.02623696588115326 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.032443052830087304, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.032443052830087304 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924812, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924812 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.028359620870533953, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.028359620870533953 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24102564102564103, + "acc_stderr": 0.02168554666533319, + "acc_norm": 0.24102564102564103, + "acc_norm_stderr": 0.02168554666533319 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.032406615658684086, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.032406615658684086 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2903225806451613, + "acc_stderr": 0.025822106119415898, + "acc_norm": 0.2903225806451613, + "acc_norm_stderr": 0.025822106119415898 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3162393162393162, + "acc_stderr": 0.03046365674734026, + "acc_norm": 0.3162393162393162, + "acc_norm_stderr": 0.03046365674734026 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3132075471698113, + "acc_stderr": 0.028544793319055326, + "acc_norm": 0.3132075471698113, + "acc_norm_stderr": 0.028544793319055326 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910507, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910507 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119996, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119996 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.03187187537919797, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.03187187537919797 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2630057803468208, + "acc_stderr": 0.02370309952525817, + "acc_norm": 0.2630057803468208, + "acc_norm_stderr": 0.02370309952525817 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.032591773927421776, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.032591773927421776 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.025171041915309684, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.025171041915309684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3412844036697248, + "acc_stderr": 0.020328612816592435, + "acc_norm": 0.3412844036697248, + "acc_norm_stderr": 0.020328612816592435 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.02656892101545716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.02656892101545716 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952925, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952925 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.017667841612378988, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.017667841612378988 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902006, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902006 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24537037037037038, + "acc_stderr": 0.02934666509437294, + "acc_norm": 0.24537037037037038, + "acc_norm_stderr": 0.02934666509437294 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536934, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536934 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682485, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682485 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.02653704531214529, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.02653704531214529 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.030964810588786713, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.030964810588786713 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2790091264667536, + "acc_stderr": 0.011455208832803543, + "acc_norm": 0.2790091264667536, + "acc_norm_stderr": 0.011455208832803543 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.029331162294251735, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.029331162294251735 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.39393715837044074, + "mc2_stderr": 0.014850055668664479 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33293978748524206, + "acc_stderr": 0.016202431208373808, + "acc_norm": 0.4592680047225502, + "acc_norm_stderr": 0.017133218276537673 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "milkyyy/llama-2-ko-kullm", + "model_sha": "cc1eacd2ae33261e52a90329b56c6edebe8ac416", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mintaeng/zephyr_mj_7b_v1/result_2024-06-17 01:36:22.json b/mintaeng/zephyr_mj_7b_v1/result_2024-06-17 01:36:22.json new file mode 100644 index 0000000000000000000000000000000000000000..7edec898502bcfe68b649ab8a69c59f5c5d33a36 --- /dev/null +++ b/mintaeng/zephyr_mj_7b_v1/result_2024-06-17 01:36:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31313993174061433, + "acc_stderr": 0.013552671543623501, + "acc_norm": 0.3728668941979522, + "acc_norm_stderr": 0.01413117676013116 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35232025492929697, + "acc_stderr": 0.004767168250414606, + "acc_norm": 0.44742083250348536, + "acc_norm_stderr": 0.004962115526014298 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4240102171136654, + "acc_stderr": 0.01767226332908423, + "acc_norm": 0.4240102171136654, + "acc_norm_stderr": 0.01767226332908423 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.031068985963122145, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.031068985963122145 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489425, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489425 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.03314190222110656, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.03314190222110656 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.035534363688280626, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.035534363688280626 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.02515826601686855, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.02515826601686855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.02804098138076155, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.02804098138076155 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911522, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911522 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113114, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696525, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.03522865864099597, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.03522865864099597 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983056, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.026864624366756646, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.026864624366756646 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.0274874729808716, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.0274874729808716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.021436998359765324, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.021436998359765324 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.04529146804435792, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.04529146804435792 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33169934640522875, + "acc_stderr": 0.019047485239360385, + "acc_norm": 0.33169934640522875, + "acc_norm_stderr": 0.019047485239360385 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160834, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160834 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31620111731843575, + "acc_stderr": 0.015551673652172545, + "acc_norm": 0.31620111731843575, + "acc_norm_stderr": 0.015551673652172545 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280058, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280058 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.0318421386668758, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.0318421386668758 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30638852672750977, + "acc_stderr": 0.011773980329380701, + "acc_norm": 0.30638852672750977, + "acc_norm_stderr": 0.011773980329380701 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03308611113236435, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03308611113236435 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.036810508691615514, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.036810508691615514 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3268053855569155, + "mc1_stderr": 0.016419874731135035, + "mc2": 0.5045750696460111, + "mc2_stderr": 0.01574364380023738 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.37662337662337664, + "acc_stderr": 0.01665879987405197, + "acc_norm": 0.3919716646989374, + "acc_norm_stderr": 0.016784332119424077 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mintaeng/zephyr_mj_7b_v1", + "model_sha": "0d4173a84a178b171bb033cccdc36fc8af7d5a8c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mistralai/Mistral-7B-Instruct-v0.1/result_2023-10-16 06:07:31.json b/mistralai/Mistral-7B-Instruct-v0.1/result_2023-10-16 06:07:31.json new file mode 100644 index 0000000000000000000000000000000000000000..2ae55f2b9e80919e6d6eb1eacd3a3d4f24d0a219 --- /dev/null +++ b/mistralai/Mistral-7B-Instruct-v0.1/result_2023-10-16 06:07:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2645051194539249, + "acc_stderr": 0.012889272949313368, + "acc_norm": 0.32849829351535836, + "acc_norm_stderr": 0.013724978465537378 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32682732523401714, + "acc_stderr": 0.004680949283855315, + "acc_norm": 0.3868751244771958, + "acc_norm_stderr": 0.004860393011974685 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.03733756969066163, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.03733756969066163 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.04931801994220414, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.04931801994220414 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.38569604086845466, + "acc_stderr": 0.01740647661921291, + "acc_norm": 0.38569604086845466, + "acc_norm_stderr": 0.01740647661921291 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.036293353299478595, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.036293353299478595 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.41479099678456594, + "acc_stderr": 0.027982680459759563, + "acc_norm": 0.41479099678456594, + "acc_norm_stderr": 0.027982680459759563 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928276, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03540294377095367, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03540294377095367 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793254, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793254 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413926, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37435897435897436, + "acc_stderr": 0.024537591572830524, + "acc_norm": 0.37435897435897436, + "acc_norm_stderr": 0.024537591572830524 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04750077341199985, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04750077341199985 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.033442837442804574, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.033442837442804574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3774193548387097, + "acc_stderr": 0.027575960723278253, + "acc_norm": 0.3774193548387097, + "acc_norm_stderr": 0.027575960723278253 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.031937057262002924, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.031937057262002924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.03047144586718323, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.03047144586718323 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.47761194029850745, + "acc_stderr": 0.035319879302087305, + "acc_norm": 0.47761194029850745, + "acc_norm_stderr": 0.035319879302087305 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31216931216931215, + "acc_stderr": 0.023865206836972602, + "acc_norm": 0.31216931216931215, + "acc_norm_stderr": 0.023865206836972602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.02642481659400985, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.02642481659400985 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.038020681028996146, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.038020681028996146 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.026105673861409814, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.026105673861409814 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.03561587327685884, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.03561587327685884 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3834862385321101, + "acc_stderr": 0.020847156641915984, + "acc_norm": 0.3834862385321101, + "acc_norm_stderr": 0.020847156641915984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351585, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351585 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.01863559403442397, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.01863559403442397 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.043642261558410445, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.043642261558410445 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31843575418994413, + "acc_stderr": 0.015581008080360274, + "acc_norm": 0.31843575418994413, + "acc_norm_stderr": 0.015581008080360274 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687758, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687758 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.47257383966244726, + "acc_stderr": 0.032498227183013026, + "acc_norm": 0.47257383966244726, + "acc_norm_stderr": 0.032498227183013026 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2966101694915254, + "acc_stderr": 0.011665946586082868, + "acc_norm": 0.2966101694915254, + "acc_norm_stderr": 0.011665946586082868 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.03283472056108567, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.03283472056108567 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.0372820699868265, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.0372820699868265 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3243574051407589, + "mc1_stderr": 0.01638797677964793, + "mc2": 0.49917419306073907, + "mc2_stderr": 0.016202138687957245 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2857142857142857, + "acc_stderr": 0.015531620786986743, + "acc_norm": 0.3565525383707202, + "acc_norm_stderr": 0.016467706981527445 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mistralai/Mistral-7B-Instruct-v0.1", + "model_sha": "7ad5799710574ba1c1d953eba3077af582f3a773", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mistralai/Mistral-7B-v0.1/result_2023-10-06 00:26:01.json b/mistralai/Mistral-7B-v0.1/result_2023-10-06 00:26:01.json new file mode 100644 index 0000000000000000000000000000000000000000..32a007d17a99baf43a51f8e3106166ae88d2cd09 --- /dev/null +++ b/mistralai/Mistral-7B-v0.1/result_2023-10-06 00:26:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33532423208191126, + "acc_stderr": 0.01379618294778556, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668526 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3703445528779128, + "acc_stderr": 0.004819100456867818, + "acc_norm": 0.481876120294762, + "acc_norm_stderr": 0.004986502296931182 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468544, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468544 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.042561937679014075, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.042561937679014075 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.041443118108781506, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.041443118108781506 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253252, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253252 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303118, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303118 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214338, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142628, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142628 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884124, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490435, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490435 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.01980828131744984, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.01980828131744984 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.03381200005643525, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.03381200005643525 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.34413407821229053, + "acc_stderr": 0.015889221313307094, + "acc_norm": 0.34413407821229053, + "acc_norm_stderr": 0.015889221313307094 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877743, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.03175195237583323, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.03175195237583323 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.011989936640666535, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.011989936640666535 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502342, + "mc2": 0.4613168911756529, + "mc2_stderr": 0.015417066073991514 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5076741440377804, + "acc_stderr": 0.017188329219654273, + "acc_norm": 0.5678866587957497, + "acc_norm_stderr": 0.017031170198851742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mistralai/Mistral-7B-v0.1", + "model_sha": "5e9c98b96d071dce59368012254c55b0ec6f8658", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mlabonne/AlphaMonarch-7B/result_2024-05-15 22:17:49.json b/mlabonne/AlphaMonarch-7B/result_2024-05-15 22:17:49.json new file mode 100644 index 0000000000000000000000000000000000000000..7202d31ae6084aab283780f2f879f5f0304b52ba --- /dev/null +++ b/mlabonne/AlphaMonarch-7B/result_2024-05-15 22:17:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938215, + "acc_norm": 0.4402730375426621, + "acc_norm_stderr": 0.014506769524804241 + }, + "harness|ko_hellaswag|10": { + "acc": 0.393945429197371, + "acc_stderr": 0.004876243842318603, + "acc_norm": 0.5235012945628361, + "acc_norm_stderr": 0.004984266543053129 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47509578544061304, + "acc_stderr": 0.017857770704901018, + "acc_norm": 0.47509578544061304, + "acc_norm_stderr": 0.017857770704901018 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029319, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029319 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449296, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449296 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954953, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954953 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568392, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568392 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02874204090394848, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02874204090394848 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.035080801121998406, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.035080801121998406 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596433, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596433 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02764847787741332, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02764847787741332 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051208, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051208 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.019821843688271758, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.019821843688271758 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.02909767559946393, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.02909767559946393 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30726256983240224, + "acc_stderr": 0.015430158846469606, + "acc_norm": 0.30726256983240224, + "acc_norm_stderr": 0.015430158846469606 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3474576271186441, + "acc_stderr": 0.012161417729749806, + "acc_norm": 0.3474576271186441, + "acc_norm_stderr": 0.012161417729749806 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398395, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398395 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4320685434516524, + "mc1_stderr": 0.01734120239498825, + "mc2": 0.5960038480502604, + "mc2_stderr": 0.016362519876481164 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41912632821723733, + "acc_stderr": 0.016963995010862792, + "acc_norm": 0.42384887839433294, + "acc_norm_stderr": 0.01698981083462825 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mlabonne/AlphaMonarch-7B", + "model_sha": "3de065d84411d74e5b3590f67f52b0b71faf6161", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mlabonne/Beyonder-4x7B-v3/result_2024-05-15 18:01:04.json b/mlabonne/Beyonder-4x7B-v3/result_2024-05-15 18:01:04.json new file mode 100644 index 0000000000000000000000000000000000000000..0f438f2121f295f644bb4bffa6a41365c69ef161 --- /dev/null +++ b/mlabonne/Beyonder-4x7B-v3/result_2024-05-15 18:01:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3779863481228669, + "acc_stderr": 0.014169664520303101, + "acc_norm": 0.4334470989761092, + "acc_norm_stderr": 0.014481376224558903 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39822744473212507, + "acc_stderr": 0.004885323175701677, + "acc_norm": 0.5216092411870146, + "acc_norm_stderr": 0.004985119183640759 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.01785298126663396, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.01785298126663396 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.033141902221106564, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.033141902221106564 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.040233822736177455, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.040233822736177455 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736125, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736125 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.02820622559150274, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.02820622559150274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.028120966503914394, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.028120966503914394 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556538, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556538 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602842, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.02690290045866664, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.02690290045866664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5, + "acc_stderr": 0.028629916715693413, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028629916715693413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.019898412717635903, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.019898412717635903 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.029049190342543458, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.029049190342543458 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03372343271653062, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03372343271653062 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.015131608849963753, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.015131608849963753 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763127, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763127 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.03190080389473236, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.03190080389473236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.012134433741002574, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.012134433741002574 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4112607099143207, + "mc1_stderr": 0.017225627083660853, + "mc2": 0.5915133020893522, + "mc2_stderr": 0.016293620242154062 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4132231404958678, + "acc_stderr": 0.016929480234495232, + "acc_norm": 0.4132231404958678, + "acc_norm_stderr": 0.016929480234495232 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mlabonne/Beyonder-4x7B-v3", + "model_sha": "8e923fa480f511ab54d79b44b0487768bdd3de4e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mlabonne/Daredevil-8B-abliterated-dpomix/result_2024-05-28 07:48:19.json b/mlabonne/Daredevil-8B-abliterated-dpomix/result_2024-05-28 07:48:19.json new file mode 100644 index 0000000000000000000000000000000000000000..b18e73c37aacc76f462be631abda792aa5df6c1b --- /dev/null +++ b/mlabonne/Daredevil-8B-abliterated-dpomix/result_2024-05-28 07:48:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4232081911262799, + "acc_stderr": 0.014438036220848029, + "acc_norm": 0.4812286689419795, + "acc_norm_stderr": 0.014601090150633964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3868751244771958, + "acc_stderr": 0.0048603930119746706, + "acc_norm": 0.5159330810595499, + "acc_norm_stderr": 0.004987247325495625 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5964912280701754, + "acc_stderr": 0.037627386999170565, + "acc_norm": 0.5964912280701754, + "acc_norm_stderr": 0.037627386999170565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041695, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041695 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4776500638569604, + "acc_stderr": 0.01786209177850787, + "acc_norm": 0.4776500638569604, + "acc_norm_stderr": 0.01786209177850787 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4851063829787234, + "acc_stderr": 0.03267151848924777, + "acc_norm": 0.4851063829787234, + "acc_norm_stderr": 0.03267151848924777 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5819935691318328, + "acc_stderr": 0.028013651891995076, + "acc_norm": 0.5819935691318328, + "acc_norm_stderr": 0.028013651891995076 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.035476014940069384, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.035476014940069384 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5862068965517241, + "acc_stderr": 0.04104269211806231, + "acc_norm": 0.5862068965517241, + "acc_norm_stderr": 0.04104269211806231 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.04913595201274498, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.04913595201274498 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.03225294232399639, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.03225294232399639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5102564102564102, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.5102564102564102, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.035107665979592174, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.035107665979592174 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5419354838709678, + "acc_stderr": 0.028343787250540608, + "acc_norm": 0.5419354838709678, + "acc_norm_stderr": 0.028343787250540608 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.027778835904935427, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.027778835904935427 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5358490566037736, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.5358490566037736, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.029773847012532967, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.029773847012532967 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3941798941798942, + "acc_stderr": 0.025167982333894143, + "acc_norm": 0.3941798941798942, + "acc_norm_stderr": 0.025167982333894143 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.73, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.02663653974111608, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.02663653974111608 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144809, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144809 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6055045871559633, + "acc_stderr": 0.02095464210858747, + "acc_norm": 0.6055045871559633, + "acc_norm_stderr": 0.02095464210858747 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591204, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591204 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.04060127035236395, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.04060127035236395 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4591503267973856, + "acc_stderr": 0.020160213617222516, + "acc_norm": 0.4591503267973856, + "acc_norm_stderr": 0.020160213617222516 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.0338517797604481, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.0338517797604481 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3094972067039106, + "acc_stderr": 0.01546116900237153, + "acc_norm": 0.3094972067039106, + "acc_norm_stderr": 0.01546116900237153 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.02946513363977613, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.02946513363977613 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6919831223628692, + "acc_stderr": 0.030052389335605688, + "acc_norm": 0.6919831223628692, + "acc_norm_stderr": 0.030052389335605688 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39048239895697523, + "acc_stderr": 0.012460135913945066, + "acc_norm": 0.39048239895697523, + "acc_norm_stderr": 0.012460135913945066 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03460228327239172, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03460228327239172 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3671970624235006, + "mc1_stderr": 0.01687480500145318, + "mc2": 0.5383229260314515, + "mc2_stderr": 0.015892505071415954 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49940968122786306, + "acc_stderr": 0.017190342123448662, + "acc_norm": 0.5312868949232585, + "acc_norm_stderr": 0.017156666859785463 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mlabonne/Daredevil-8B-abliterated-dpomix", + "model_sha": "2f4a5e8a8522f19dff345c7189b7891468763061", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mlabonne/Daredevil-8B-abliterated/result_2024-05-28 08:16:50.json b/mlabonne/Daredevil-8B-abliterated/result_2024-05-28 08:16:50.json new file mode 100644 index 0000000000000000000000000000000000000000..c2991d599a827773ffcb769cc60e55503a16af54 --- /dev/null +++ b/mlabonne/Daredevil-8B-abliterated/result_2024-05-28 08:16:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4129692832764505, + "acc_stderr": 0.014388344935398326, + "acc_norm": 0.4812286689419795, + "acc_norm_stderr": 0.014601090150633964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3822943636725752, + "acc_stderr": 0.004849547819134476, + "acc_norm": 0.5109539932284406, + "acc_norm_stderr": 0.0049885838203099185 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5964912280701754, + "acc_stderr": 0.037627386999170565, + "acc_norm": 0.5964912280701754, + "acc_norm_stderr": 0.037627386999170565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.04721188506097172, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.04721188506097172 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4840357598978289, + "acc_stderr": 0.01787084750608172, + "acc_norm": 0.4840357598978289, + "acc_norm_stderr": 0.01787084750608172 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.042667634040995814, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.042667634040995814 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4978723404255319, + "acc_stderr": 0.032685726586674915, + "acc_norm": 0.4978723404255319, + "acc_norm_stderr": 0.032685726586674915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.572347266881029, + "acc_stderr": 0.02809924077580956, + "acc_norm": 0.572347266881029, + "acc_norm_stderr": 0.02809924077580956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5862068965517241, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.5862068965517241, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.04810840148082636, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.04810840148082636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5630252100840336, + "acc_stderr": 0.03221943636566197, + "acc_norm": 0.5630252100840336, + "acc_norm_stderr": 0.03221943636566197 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.02534267129380724, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.02534267129380724 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5451612903225806, + "acc_stderr": 0.028327743091561084, + "acc_norm": 0.5451612903225806, + "acc_norm_stderr": 0.028327743091561084 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.027778835904935427, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.027778835904935427 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.03067609659938918, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.03067609659938918 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.02925290592725198, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.02925290592725198 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.038016851045244604, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.038016851045244604 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.02507598176760168, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.02507598176760168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5432098765432098, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.5432098765432098, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008585, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008585 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5981651376146789, + "acc_stderr": 0.02102010617299701, + "acc_norm": 0.5981651376146789, + "acc_norm_stderr": 0.02102010617299701 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.565359477124183, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.565359477124183, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635464, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635464 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.04065771002562603, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.04065771002562603 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.020109864547181364, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.020109864547181364 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281274, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281274 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.49107142857142855, + "acc_stderr": 0.04745033255489123, + "acc_norm": 0.49107142857142855, + "acc_norm_stderr": 0.04745033255489123 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3027932960893855, + "acc_stderr": 0.015366860386397112, + "acc_norm": 0.3027932960893855, + "acc_norm_stderr": 0.015366860386397112 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.0296246635811597, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.0296246635811597 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6448979591836734, + "acc_stderr": 0.030635655150387634, + "acc_norm": 0.6448979591836734, + "acc_norm_stderr": 0.030635655150387634 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6877637130801688, + "acc_stderr": 0.030165137867847015, + "acc_norm": 0.6877637130801688, + "acc_norm_stderr": 0.030165137867847015 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39308996088657105, + "acc_stderr": 0.012474899613873956, + "acc_norm": 0.39308996088657105, + "acc_norm_stderr": 0.012474899613873956 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5931372549019608, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.5931372549019608, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3525091799265606, + "mc1_stderr": 0.016724646380756544, + "mc2": 0.5268936518113212, + "mc2_stderr": 0.0158017010563356 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49586776859504134, + "acc_stderr": 0.017189767032130817, + "acc_norm": 0.5324675324675324, + "acc_norm_stderr": 0.017154073716682865 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mlabonne/Daredevil-8B-abliterated", + "model_sha": "034c0ce8ceeba075d1dff2bac1b113a017c79390", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mlabonne/Llama-3-8B-Instruct-abliterated-dpomix/result_2024-05-28 05:34:19.json b/mlabonne/Llama-3-8B-Instruct-abliterated-dpomix/result_2024-05-28 05:34:19.json new file mode 100644 index 0000000000000000000000000000000000000000..bf2c9e38533989bb6cb17e9c3061b41644c4bdc5 --- /dev/null +++ b/mlabonne/Llama-3-8B-Instruct-abliterated-dpomix/result_2024-05-28 05:34:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.371160409556314, + "acc_stderr": 0.01411797190114282, + "acc_norm": 0.44112627986348124, + "acc_norm_stderr": 0.014509747749064666 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36387173869747064, + "acc_stderr": 0.004801290954387073, + "acc_norm": 0.4718183628759211, + "acc_norm_stderr": 0.0049818492912996545 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.038110796698335316, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.038110796698335316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.01778403453499246, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.01778403453499246 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5594855305466238, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.5594855305466238, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234355, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234355 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105652, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105652 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5462184873949579, + "acc_stderr": 0.032339434681820885, + "acc_norm": 0.5462184873949579, + "acc_norm_stderr": 0.032339434681820885 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.02533466708095489, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.02533466708095489 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.535483870967742, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.535483870967742, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.028447965476231022, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.028447965476231022 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465073, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465073 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6616915422885572, + "acc_stderr": 0.033455630703391914, + "acc_norm": 0.6616915422885572, + "acc_norm_stderr": 0.033455630703391914 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.037940126746970296, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.037940126746970296 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155243, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155243 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.02683080599895224, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.02683080599895224 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.03582724530036093, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.03582724530036093 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6, + "acc_stderr": 0.021004201260420075, + "acc_norm": 0.6, + "acc_norm_stderr": 0.021004201260420075 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02010258389588718, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02010258389588718 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.028538650028878638, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.028538650028878638 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.03381200005643526, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.03381200005643526 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3229050279329609, + "acc_stderr": 0.015638440380241488, + "acc_norm": 0.3229050279329609, + "acc_norm_stderr": 0.015638440380241488 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.029624663581159696, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.029624663581159696 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6489795918367347, + "acc_stderr": 0.030555316755573644, + "acc_norm": 0.6489795918367347, + "acc_norm_stderr": 0.030555316755573644 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3663624511082138, + "acc_stderr": 0.012305658346838437, + "acc_norm": 0.3663624511082138, + "acc_norm_stderr": 0.012305658346838437 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31701346389228885, + "mc1_stderr": 0.016289203374403392, + "mc2": 0.4961913548567858, + "mc2_stderr": 0.01584244890607769 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46635182998819363, + "acc_stderr": 0.01715138411713187, + "acc_norm": 0.5064935064935064, + "acc_norm_stderr": 0.01718890435907731 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mlabonne/Llama-3-8B-Instruct-abliterated-dpomix", + "model_sha": "09c9096973af2b03a63786ec627bfcbaf1254792", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mlabonne/NeuralMonarch-7B/result_2024-05-15 22:18:10.json b/mlabonne/NeuralMonarch-7B/result_2024-05-15 22:18:10.json new file mode 100644 index 0000000000000000000000000000000000000000..1881c9ab9e6f6d331d9a5190118e6c37fca40cf2 --- /dev/null +++ b/mlabonne/NeuralMonarch-7B/result_2024-05-15 22:18:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3890784982935154, + "acc_stderr": 0.014247309976045607, + "acc_norm": 0.44112627986348124, + "acc_norm_stderr": 0.014509747749064664 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39364668392750446, + "acc_stderr": 0.004875595792850675, + "acc_norm": 0.522405895239992, + "acc_norm_stderr": 0.00498476891232694 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4776500638569604, + "acc_stderr": 0.017862091778507876, + "acc_norm": 0.4776500638569604, + "acc_norm_stderr": 0.017862091778507876 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.03740059382029319, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.03740059382029319 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449296, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449296 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954953, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954953 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568392, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568392 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183235, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183235 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473082, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473082 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.035080801121998406, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.035080801121998406 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817729, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817729 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596433, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596433 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02764847787741332, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02764847787741332 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5027522935779817, + "acc_stderr": 0.021436998359765317, + "acc_norm": 0.5027522935779817, + "acc_norm_stderr": 0.021436998359765317 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.019821843688271758, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.019821843688271758 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.02909767559946393, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.02909767559946393 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30614525139664805, + "acc_stderr": 0.01541449448790321, + "acc_norm": 0.30614525139664805, + "acc_norm_stderr": 0.01541449448790321 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411945, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411945 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3468057366362451, + "acc_stderr": 0.012156071332318708, + "acc_norm": 0.3468057366362451, + "acc_norm_stderr": 0.012156071332318708 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398395, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398395 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.42717258261933905, + "mc1_stderr": 0.017316834410963936, + "mc2": 0.5961600702020854, + "mc2_stderr": 0.016357164714892618 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4214876033057851, + "acc_stderr": 0.01697710193260152, + "acc_norm": 0.42266824085005905, + "acc_norm_stderr": 0.016983506079577604 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mlabonne/NeuralMonarch-7B", + "model_sha": "bebae99e187a1ab3b009b2736a99a32cdc178c8f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-alpaca-1k/result_2023-10-22 05:59:52.json b/mncai/Mistral-7B-v0.1-alpaca-1k/result_2023-10-22 05:59:52.json new file mode 100644 index 0000000000000000000000000000000000000000..8d71e8c389d9c24c90f1b3fdb960d6e734884065 --- /dev/null +++ b/mncai/Mistral-7B-v0.1-alpaca-1k/result_2023-10-22 05:59:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3046075085324232, + "acc_stderr": 0.013449522109932487, + "acc_norm": 0.3438566552901024, + "acc_norm_stderr": 0.013880644570156222 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36875124477195775, + "acc_stderr": 0.0048148030984368154, + "acc_norm": 0.4697271459868552, + "acc_norm_stderr": 0.00498062728714758 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.01783579880629064, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.01783579880629064 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079021, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079021 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539743, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539743 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561053, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561053 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674078, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.0302422338008545, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.0302422338008545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149138, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869334, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.027563010971606676, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.027563010971606676 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.04598188057816542, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.04598188057816542 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45504587155963305, + "acc_stderr": 0.021350503090925167, + "acc_norm": 0.45504587155963305, + "acc_norm_stderr": 0.021350503090925167 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.041349130183033156, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.041349130183033156 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5, + "acc_stderr": 0.028629916715693413, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028629916715693413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.01962744474841223, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.01962744474841223 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.014333522059217892, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.014333522059217892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.028739328513983583, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.028739328513983583 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.0325446201076786, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.0325446201076786 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2985658409387223, + "acc_stderr": 0.011688060141794231, + "acc_norm": 0.2985658409387223, + "acc_norm_stderr": 0.011688060141794231 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559693, + "mc2": 0.48747691141114763, + "mc2_stderr": 0.015615664106933899 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4935064935064935, + "acc_stderr": 0.017188904359077318, + "acc_norm": 0.5301062573789846, + "acc_norm_stderr": 0.017159163590170216 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/Mistral-7B-v0.1-alpaca-1k", + "model_sha": "97a2cb89d4f19712842c4e29c44e1b7821905fac", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-combine-1k/result_2023-10-22 06:02:36.json b/mncai/Mistral-7B-v0.1-combine-1k/result_2023-10-22 06:02:36.json new file mode 100644 index 0000000000000000000000000000000000000000..deb0531f4f37afdfe292df2ced5b71341667e647 --- /dev/null +++ b/mncai/Mistral-7B-v0.1-combine-1k/result_2023-10-22 06:02:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30802047781569963, + "acc_stderr": 0.01349142951729204, + "acc_norm": 0.3515358361774744, + "acc_norm_stderr": 0.013952413699600938 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3610834495120494, + "acc_stderr": 0.004793330525656211, + "acc_norm": 0.45120493925512845, + "acc_norm_stderr": 0.004965963647210315 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2573099415204678, + "acc_stderr": 0.03352799844161865, + "acc_norm": 0.2573099415204678, + "acc_norm_stderr": 0.03352799844161865 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2503192848020434, + "acc_stderr": 0.01549108895149458, + "acc_norm": 0.2503192848020434, + "acc_norm_stderr": 0.01549108895149458 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.026355158413349424, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.026355158413349424 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.031069390260789396, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.031069390260789396 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.1210762331838565, + "acc_stderr": 0.021894174113185737, + "acc_norm": 0.1210762331838565, + "acc_norm_stderr": 0.021894174113185737 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.04039314978724561, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.04039314978724561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3686868686868687, + "acc_stderr": 0.034373055019806184, + "acc_norm": 0.3686868686868687, + "acc_norm_stderr": 0.034373055019806184 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.04878608714466996, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.04878608714466996 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.024433016466052452, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.024433016466052452 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2094017094017094, + "acc_stderr": 0.026655699653922737, + "acc_norm": 0.2094017094017094, + "acc_norm_stderr": 0.026655699653922737 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.02825420034443866, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.02825420034443866 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.03152439186555401, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.03152439186555401 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.036146654241808254, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.036146654241808254 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577656, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577656 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.22832369942196531, + "acc_stderr": 0.022598703804321624, + "acc_norm": 0.22832369942196531, + "acc_norm_stderr": 0.022598703804321624 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.02357688174400572, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.02357688174400572 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37305699481865284, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.37305699481865284, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3559633027522936, + "acc_stderr": 0.020528559278244214, + "acc_norm": 0.3559633027522936, + "acc_norm_stderr": 0.020528559278244214 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.026090162504279053, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.026090162504279053 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.14049586776859505, + "acc_stderr": 0.031722334260021585, + "acc_norm": 0.14049586776859505, + "acc_norm_stderr": 0.031722334260021585 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316091, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316091 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.016774672365468517, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.016774672365468517 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.02564555362226673, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.02564555362226673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.0356236785009539, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.0356236785009539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20253164556962025, + "acc_stderr": 0.026160568246601464, + "acc_norm": 0.20253164556962025, + "acc_norm_stderr": 0.026160568246601464 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24511082138200782, + "acc_stderr": 0.010986307870045509, + "acc_norm": 0.24511082138200782, + "acc_norm_stderr": 0.010986307870045509 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.015983595101811392, + "mc2": 0.4616568963266555, + "mc2_stderr": 0.01577378737316958 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3659976387249115, + "acc_stderr": 0.01656148966489569, + "acc_norm": 0.44037780401416765, + "acc_norm_stderr": 0.017067699774312987 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/Mistral-7B-v0.1-combine-1k", + "model_sha": "0f7abf5c07a7f3add4c89c9e3525f29ab89be562", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-orca-1k/result_2023-10-22 04:26:18.json b/mncai/Mistral-7B-v0.1-orca-1k/result_2023-10-22 04:26:18.json new file mode 100644 index 0000000000000000000000000000000000000000..45cf736fa59311f9ba26684df5c57087bb34364d --- /dev/null +++ b/mncai/Mistral-7B-v0.1-orca-1k/result_2023-10-22 04:26:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31313993174061433, + "acc_stderr": 0.013552671543623494, + "acc_norm": 0.3575085324232082, + "acc_norm_stderr": 0.014005494275916573 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37064329814777935, + "acc_stderr": 0.004819899945342492, + "acc_norm": 0.4643497311292571, + "acc_norm_stderr": 0.004977081808179427 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.39766081871345027, + "acc_stderr": 0.0375363895576169, + "acc_norm": 0.39766081871345027, + "acc_norm_stderr": 0.0375363895576169 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.34951456310679613, + "acc_stderr": 0.04721188506097173, + "acc_norm": 0.34951456310679613, + "acc_norm_stderr": 0.04721188506097173 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.41507024265644954, + "acc_stderr": 0.01762013700365527, + "acc_norm": 0.41507024265644954, + "acc_norm_stderr": 0.01762013700365527 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745643, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745643 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824664, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824664 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36012861736334406, + "acc_stderr": 0.027264297599804012, + "acc_norm": 0.36012861736334406, + "acc_norm_stderr": 0.027264297599804012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449296, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449296 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.033832012232444426, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.033832012232444426 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.03878352372138622, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.03878352372138622 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121633, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121633 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3128205128205128, + "acc_stderr": 0.02350757902064535, + "acc_norm": 0.3128205128205128, + "acc_norm_stderr": 0.02350757902064535 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.033661244890514495, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.033661244890514495 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.02606936229533513, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02606936229533513 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.031804252043840985, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.031804252043840985 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30566037735849055, + "acc_stderr": 0.028353298073322663, + "acc_norm": 0.30566037735849055, + "acc_norm_stderr": 0.028353298073322663 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425464, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425464 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340496, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.373134328358209, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.373134328358209, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.03414014007044036, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.03414014007044036 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02391998416404774, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02391998416404774 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.025906632631016127, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.025906632631016127 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3765432098765432, + "acc_stderr": 0.02695934451874778, + "acc_norm": 0.3765432098765432, + "acc_norm_stderr": 0.02695934451874778 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3316062176165803, + "acc_stderr": 0.03397636541089116, + "acc_norm": 0.3316062176165803, + "acc_norm_stderr": 0.03397636541089116 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3321100917431193, + "acc_stderr": 0.020192682985423337, + "acc_norm": 0.3321100917431193, + "acc_norm_stderr": 0.020192682985423337 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.034550710191021475, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.034550710191021475 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.027121956071388852, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.027121956071388852 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319771, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319771 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.01869085027359529, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.01869085027359529 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.02718712701150381, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.02718712701150381 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25, + "acc_stderr": 0.029531221160930918, + "acc_norm": 0.25, + "acc_norm_stderr": 0.029531221160930918 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364548, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364548 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.026799562024887674, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.026799562024887674 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27346938775510204, + "acc_stderr": 0.028535560337128438, + "acc_norm": 0.27346938775510204, + "acc_norm_stderr": 0.028535560337128438 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.34177215189873417, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.34177215189873417, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26792698826597133, + "acc_stderr": 0.011311347690633885, + "acc_norm": 0.26792698826597133, + "acc_norm_stderr": 0.011311347690633885 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.03182231867647553, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.03182231867647553 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.03663974994391244, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.03663974994391244 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.015572840452875835, + "mc2": 0.4450037389871468, + "mc2_stderr": 0.01574377596952645 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2632821723730815, + "acc_stderr": 0.015141752199573208, + "acc_norm": 0.3624557260920897, + "acc_norm_stderr": 0.016527131240453716 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/Mistral-7B-v0.1-orca-1k", + "model_sha": "3bfedee0d952da852fefa84e70f6373174a1deaf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-orca-2k/result_2023-10-22 06:00:38.json b/mncai/Mistral-7B-v0.1-orca-2k/result_2023-10-22 06:00:38.json new file mode 100644 index 0000000000000000000000000000000000000000..4400fe31d129677847d2962c4c36755bd592bd14 --- /dev/null +++ b/mncai/Mistral-7B-v0.1-orca-2k/result_2023-10-22 06:00:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3174061433447099, + "acc_stderr": 0.01360223908803817, + "acc_norm": 0.35580204778157, + "acc_norm_stderr": 0.013990571137918763 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37293367855008963, + "acc_stderr": 0.004825963768772218, + "acc_norm": 0.4615614419438359, + "acc_norm_stderr": 0.004975014529648631 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.04931801994220414, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.04931801994220414 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4227330779054917, + "acc_stderr": 0.017665180351954062, + "acc_norm": 0.4227330779054917, + "acc_norm_stderr": 0.017665180351954062 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.03097669299853443, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.03097669299853443 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42443729903536975, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.42443729903536975, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330315, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330315 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009225, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009225 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.04013124195424386, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.04013124195424386 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413926, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985905, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985905 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.02790615082604114, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.02790615082604114 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.030365050829115208, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.030365050829115208 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228412, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228412 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.03535140084276719, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.03535140084276719 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165581, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165581 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332783, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3959537572254335, + "acc_stderr": 0.02632981334194626, + "acc_norm": 0.3959537572254335, + "acc_norm_stderr": 0.02632981334194626 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02657148348071997, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02657148348071997 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579859, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579859 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.038607315993160904, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.038607315993160904 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.019023726160724553, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.019023726160724553 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281274, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281274 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608043, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220513, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220513 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3877551020408163, + "acc_stderr": 0.03119223072679566, + "acc_norm": 0.3877551020408163, + "acc_norm_stderr": 0.03119223072679566 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5021097046413502, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.5021097046413502, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29139504563233376, + "acc_stderr": 0.011605720214257615, + "acc_norm": 0.29139504563233376, + "acc_norm_stderr": 0.011605720214257615 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.03883565977956929, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.03883565977956929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3108935128518972, + "mc1_stderr": 0.016203316673559693, + "mc2": 0.49057702125408326, + "mc2_stderr": 0.015830176414087203 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3246753246753247, + "acc_stderr": 0.016098883939346456, + "acc_norm": 0.3742621015348288, + "acc_norm_stderr": 0.01663791778979874 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/Mistral-7B-v0.1-orca-2k", + "model_sha": "60e3c4f7563c49c1c03e6e7ea873148ebc240b8d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-orca_platy-1k/result_2023-10-22 05:06:43.json b/mncai/Mistral-7B-v0.1-orca_platy-1k/result_2023-10-22 05:06:43.json new file mode 100644 index 0000000000000000000000000000000000000000..dd05815ebbbc868c2385d5fc4910de56a3e68214 --- /dev/null +++ b/mncai/Mistral-7B-v0.1-orca_platy-1k/result_2023-10-22 05:06:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2977815699658703, + "acc_stderr": 0.013363080107244487, + "acc_norm": 0.3293515358361775, + "acc_norm_stderr": 0.013734057652635474 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36168094005178253, + "acc_stderr": 0.004795051037917727, + "acc_norm": 0.4523999203345947, + "acc_norm_stderr": 0.004967118575905286 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.03805797505590459, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.03805797505590459 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.33980582524271846, + "acc_stderr": 0.04689765937278135, + "acc_norm": 0.33980582524271846, + "acc_norm_stderr": 0.04689765937278135 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3537675606641124, + "acc_stderr": 0.017098184708161906, + "acc_norm": 0.3537675606641124, + "acc_norm_stderr": 0.017098184708161906 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2, + "acc_stderr": 0.03455473702325438, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03455473702325438 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.029241883869628834, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.029241883869628834 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.037400593820293204, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.037400593820293204 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2090032154340836, + "acc_stderr": 0.02309314039837422, + "acc_norm": 0.2090032154340836, + "acc_norm_stderr": 0.02309314039837422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.031602951437766785, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.031602951437766785 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309993, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309993 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3025210084033613, + "acc_stderr": 0.029837962388291926, + "acc_norm": 0.3025210084033613, + "acc_norm_stderr": 0.029837962388291926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.31025641025641026, + "acc_stderr": 0.02345467488940429, + "acc_norm": 0.31025641025641026, + "acc_norm_stderr": 0.02345467488940429 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.030315099285617722, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.030315099285617722 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885193, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.03265903381186194, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.03265903381186194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.026341480371118355, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.026341480371118355 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910508, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.025348097468097845, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.025348097468097845 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4129353233830846, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.4129353233830846, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641144, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641144 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.022101128787415436, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.022101128787415436 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686934, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686934 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615625, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615625 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.025089478523765134, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.025089478523765134 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29533678756476683, + "acc_stderr": 0.03292296639155142, + "acc_norm": 0.29533678756476683, + "acc_norm_stderr": 0.03292296639155142 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281337, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281337 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27155963302752295, + "acc_stderr": 0.019069098363191445, + "acc_norm": 0.27155963302752295, + "acc_norm_stderr": 0.019069098363191445 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.027363593284684944, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.027363593284684944 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.04537935177947879, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.04537935177947879 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.01818521895431808, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.01818521895431808 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19907407407407407, + "acc_stderr": 0.027232298462690253, + "acc_norm": 0.19907407407407407, + "acc_norm_stderr": 0.027232298462690253 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2536764705882353, + "acc_stderr": 0.026431329870789562, + "acc_norm": 0.2536764705882353, + "acc_norm_stderr": 0.026431329870789562 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.031052391937584353, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.031052391937584353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26140808344198174, + "acc_stderr": 0.011222528169771314, + "acc_norm": 0.26140808344198174, + "acc_norm_stderr": 0.011222528169771314 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511782, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511782 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2998776009791922, + "mc1_stderr": 0.016040352966713616, + "mc2": 0.4583811632060745, + "mc2_stderr": 0.015833164608294075 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3234946871310508, + "acc_stderr": 0.016083627290483675, + "acc_norm": 0.3825265643447462, + "acc_norm_stderr": 0.016709165387228813 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/Mistral-7B-v0.1-orca_platy-1k", + "model_sha": "37f66cf60e2e2b4e299419202fe22a45b8d96874", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/Mistral-7B-v0.1-platy-1k/result_2023-10-22 04:56:50.json b/mncai/Mistral-7B-v0.1-platy-1k/result_2023-10-22 04:56:50.json new file mode 100644 index 0000000000000000000000000000000000000000..1da369d78b2f598b977b9c7da145ca043ba7f6eb --- /dev/null +++ b/mncai/Mistral-7B-v0.1-platy-1k/result_2023-10-22 04:56:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30119453924914674, + "acc_stderr": 0.01340674176784762, + "acc_norm": 0.34044368600682595, + "acc_norm_stderr": 0.013847460518892976 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3564031069508066, + "acc_stderr": 0.004779574402771374, + "acc_norm": 0.45269866560446126, + "acc_norm_stderr": 0.004967402792744855 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796617, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2711864406779661, + "acc_stderr": 0.011354581451622985, + "acc_norm": 0.2711864406779661, + "acc_norm_stderr": 0.011354581451622985 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766379, + "mc2": 0.44338943697081723, + "mc2_stderr": 0.015766267984553387 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30932703659976385, + "acc_stderr": 0.01589132050552089, + "acc_norm": 0.44155844155844154, + "acc_norm_stderr": 0.017072525875563103 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/Mistral-7B-v0.1-platy-1k", + "model_sha": "ae6790c706091e0a0ffada183edf6f08e06ba235", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/Pr_Llama2_7B-Sh5K_Wi5K_Ne5K_Ct5K-Lr05_Ep4/result_2023-11-23 18:42:19.json b/mncai/Pr_Llama2_7B-Sh5K_Wi5K_Ne5K_Ct5K-Lr05_Ep4/result_2023-11-23 18:42:19.json new file mode 100644 index 0000000000000000000000000000000000000000..57388a3d7cb3168ae1b2bce1ee7dc695cbc61cad --- /dev/null +++ b/mncai/Pr_Llama2_7B-Sh5K_Wi5K_Ne5K_Ct5K-Lr05_Ep4/result_2023-11-23 18:42:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28924914675767915, + "acc_stderr": 0.013250012579393443, + "acc_norm": 0.32337883959044367, + "acc_norm_stderr": 0.013669421630012132 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3481378211511651, + "acc_stderr": 0.004754063867700179, + "acc_norm": 0.4107747460665206, + "acc_norm_stderr": 0.0049096898763420415 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.03786720706234215, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.03786720706234215 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.38058748403575987, + "acc_stderr": 0.017362564126075418, + "acc_norm": 0.38058748403575987, + "acc_norm_stderr": 0.017362564126075418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.03047297336338005, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.03047297336338005 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3858520900321543, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.3858520900321543, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928276, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03481285338232964, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03481285338232964 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.04043461861916747, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.04043461861916747 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.03524068951567447, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.03524068951567447 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.022815813098896614, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.022815813098896614 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0317852971064275, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0317852971064275 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3903225806451613, + "acc_stderr": 0.027751256636969573, + "acc_norm": 0.3903225806451613, + "acc_norm_stderr": 0.027751256636969573 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.03265903381186194, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.03265903381186194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.0294451753281996, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.0294451753281996 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844082, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844082 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.43283582089552236, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.43283582089552236, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.034140140070440354, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.034140140070440354 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02351729433596329, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02351729433596329 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36127167630057805, + "acc_stderr": 0.025862201852277895, + "acc_norm": 0.36127167630057805, + "acc_norm_stderr": 0.025862201852277895 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899616, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899616 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.026725868809100786, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.026725868809100786 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.03499807276193338, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.03499807276193338 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3119266055045872, + "acc_stderr": 0.019862967976707245, + "acc_norm": 0.3119266055045872, + "acc_norm_stderr": 0.019862967976707245 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276863, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276863 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.02742047766262923, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.02742047766262923 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.045454545454545484, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.045454545454545484 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.0378272898086547, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.0378272898086547 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.018550634502952964, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.018550634502952964 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.02678917235114023, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.02678917235114023 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510944, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510944 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.0296246635811597, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.0296246635811597 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3469387755102041, + "acc_stderr": 0.030472526026726496, + "acc_norm": 0.3469387755102041, + "acc_norm_stderr": 0.030472526026726496 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3924050632911392, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.3924050632911392, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26988265971316816, + "acc_stderr": 0.011337381084250425, + "acc_norm": 0.26988265971316816, + "acc_norm_stderr": 0.011337381084250425 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.036639749943912434, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.036639749943912434 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2962056303549572, + "mc1_stderr": 0.01598359510181139, + "mc2": 0.4563469870156814, + "mc2_stderr": 0.01627127621105625 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2762691853600944, + "acc_stderr": 0.015373387500464464, + "acc_norm": 0.3270365997638725, + "acc_norm_stderr": 0.016129047485457036 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/Pr_Llama2_7B-Sh5K_Wi5K_Ne5K_Ct5K-Lr05_Ep4", + "model_sha": "500045ae7ecdb4ce4913d3d5effad48fa02433b5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/agiin-13.6B-v0.1/result_2023-12-20 10:58:15.json b/mncai/agiin-13.6B-v0.1/result_2023-12-20 10:58:15.json new file mode 100644 index 0000000000000000000000000000000000000000..2568d924f7413b17fe080263e9b347a19809ae87 --- /dev/null +++ b/mncai/agiin-13.6B-v0.1/result_2023-12-20 10:58:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33532423208191126, + "acc_stderr": 0.013796182947785562, + "acc_norm": 0.39590443686006827, + "acc_norm_stderr": 0.01429122839353659 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3580959968133838, + "acc_stderr": 0.004784607222774629, + "acc_norm": 0.4439354710217088, + "acc_norm_stderr": 0.004958314114266491 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4661558109833972, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.4661558109833972, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.35319148936170214, + "acc_stderr": 0.031245325202761926, + "acc_norm": 0.35319148936170214, + "acc_norm_stderr": 0.031245325202761926 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42765273311897106, + "acc_stderr": 0.028099240775809563, + "acc_norm": 0.42765273311897106, + "acc_norm_stderr": 0.028099240775809563 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459157, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459157 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.025069094387296542, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.025069094387296542 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760626, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036546, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036546 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.02812911270916589, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.02812911270916589 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.02905858830374884, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.02905858830374884 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.03487558640462064, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.03487558640462064 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41798941798941797, + "acc_stderr": 0.025402555503260912, + "acc_norm": 0.41798941798941797, + "acc_norm_stderr": 0.025402555503260912 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.026918645383239004, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.026918645383239004 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.021436420955529414, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.021436420955529414 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852387, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852387 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34477124183006536, + "acc_stderr": 0.019228322018696647, + "acc_norm": 0.34477124183006536, + "acc_norm_stderr": 0.019228322018696647 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.0289473388516141, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.0289473388516141 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833587, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833587 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.0338517797604481, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.0338517797604481 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859926, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859926 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687758, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687758 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.0317229500433233, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.0317229500433233 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34224250325945244, + "acc_stderr": 0.012117939998705862, + "acc_norm": 0.34224250325945244, + "acc_norm_stderr": 0.012117939998705862 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3659730722154223, + "mc1_stderr": 0.01686294168408836, + "mc2": 0.54650809718614, + "mc2_stderr": 0.016523179064123664 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.39433293978748524, + "acc_stderr": 0.0168020906748932, + "acc_norm": 0.41912632821723733, + "acc_norm_stderr": 0.01696399501086279 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/agiin-13.6B-v0.1", + "model_sha": "6c93ca1d60b09b9b91e15c57dc8525827d371798", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/llama2-13b-dpo-v2/result_2023-12-03 07:07:36.json b/mncai/llama2-13b-dpo-v2/result_2023-12-03 07:07:36.json new file mode 100644 index 0000000000000000000000000000000000000000..069b0e96b8547b5bed2150984edd4e718bbe43dc --- /dev/null +++ b/mncai/llama2-13b-dpo-v2/result_2023-12-03 07:07:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.013855831287497724, + "acc_norm": 0.40187713310580203, + "acc_norm_stderr": 0.014327268614578274 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3693487353116909, + "acc_stderr": 0.004816421208654088, + "acc_norm": 0.46883091017725553, + "acc_norm_stderr": 0.004980076707392432 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.03771283107626545, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.03771283107626545 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46360153256704983, + "acc_stderr": 0.01783252407959326, + "acc_norm": 0.46360153256704983, + "acc_norm_stderr": 0.01783252407959326 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.44694533762057875, + "acc_stderr": 0.028237769422085335, + "acc_norm": 0.44694533762057875, + "acc_norm_stderr": 0.028237769422085335 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.032928028193303135, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.032928028193303135 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461227, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461227 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.028071588901091852, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.028071588901091852 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.0312561082442188, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.0312561082442188 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507384, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507384 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.02375292871211213, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.02375292871211213 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.39, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005138, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005138 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.037124548537213684, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.037124548537213684 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46972477064220186, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.46972477064220186, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.02849199358617157, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.02849199358617157 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.019206606848825365, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.019206606848825365 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281288, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281288 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298825, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298825 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.014736926383761974, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.014736926383761974 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163908, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163908 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.333116036505867, + "acc_stderr": 0.012037930451512047, + "acc_norm": 0.333116036505867, + "acc_norm_stderr": 0.012037930451512047 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03484941514429231, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03484941514429231 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3708690330477356, + "mc1_stderr": 0.016909693580248804, + "mc2": 0.5432148185655791, + "mc2_stderr": 0.01594594261862486 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3837072018890201, + "acc_stderr": 0.016718924637231826, + "acc_norm": 0.41204250295159384, + "acc_norm_stderr": 0.01692227673852836 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/llama2-13b-dpo-v2", + "model_sha": "c164263281b29c2cca9929351e472484db01d78d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/llama2-13b-dpo-v3/result_2023-12-03 13:38:55.json b/mncai/llama2-13b-dpo-v3/result_2023-12-03 13:38:55.json new file mode 100644 index 0000000000000000000000000000000000000000..217e36d6c720c17b37b00e84e9ded5097faac174 --- /dev/null +++ b/mncai/llama2-13b-dpo-v3/result_2023-12-03 13:38:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4334470989761092, + "acc_stderr": 0.0144813762245589, + "acc_norm": 0.49146757679180886, + "acc_norm_stderr": 0.014609263165632186 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4462258514240191, + "acc_stderr": 0.004960839986099525, + "acc_norm": 0.5910177255526787, + "acc_norm_stderr": 0.0049064119844767886 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5906432748538012, + "acc_stderr": 0.037712831076265434, + "acc_norm": 0.5906432748538012, + "acc_norm_stderr": 0.037712831076265434 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.017797751493865633, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.017797751493865633 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079019, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079019 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564577, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564577 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.03526552724601198, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.03526552724601198 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638627, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638627 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179961, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179961 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.034524539038220406, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.034524539038220406 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.027869320571664635, + "acc_norm": 0.4, + "acc_norm_stderr": 0.027869320571664635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618554, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618554 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009794, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009794 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230175, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230175 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.035123109641239346, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.035123109641239346 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112136, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112136 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.026788811931562764, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.026788811931562764 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323674, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323674 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5706422018348624, + "acc_stderr": 0.021222286397236504, + "acc_norm": 0.5706422018348624, + "acc_norm_stderr": 0.021222286397236504 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.019910377463105932, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.019910377463105932 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915185, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915185 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.030998666304560524, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.030998666304560524 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335317, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.030932858792789838, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.030932858792789838 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6835443037974683, + "acc_stderr": 0.03027497488021897, + "acc_norm": 0.6835443037974683, + "acc_norm_stderr": 0.03027497488021897 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36571056062581486, + "acc_stderr": 0.012301028188840568, + "acc_norm": 0.36571056062581486, + "acc_norm_stderr": 0.012301028188840568 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070265, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070265 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.37454100367197063, + "mc1_stderr": 0.016943535128405345, + "mc2": 0.5355134469215413, + "mc2_stderr": 0.01601794771276579 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4462809917355372, + "acc_stderr": 0.017090852631668332, + "acc_norm": 0.500590318772137, + "acc_norm_stderr": 0.01719034212344859 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/llama2-13b-dpo-v3", + "model_sha": "6dbdd5c708e7b7cc22552e620ad976f24d08beac", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/llama2-13b-dpo-v4/result_2023-12-05 02:27:45.json b/mncai/llama2-13b-dpo-v4/result_2023-12-05 02:27:45.json new file mode 100644 index 0000000000000000000000000000000000000000..63563b8b9e6bc3d82585d83eb66f3902b5709013 --- /dev/null +++ b/mncai/llama2-13b-dpo-v4/result_2023-12-05 02:27:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4189419795221843, + "acc_stderr": 0.014418106953639013, + "acc_norm": 0.48208191126279865, + "acc_norm_stderr": 0.014602005585490983 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4475204142601075, + "acc_stderr": 0.0049622205125483595, + "acc_norm": 0.6004779924317865, + "acc_norm_stderr": 0.004887991225950279 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5672514619883041, + "acc_stderr": 0.03799978644370607, + "acc_norm": 0.5672514619883041, + "acc_norm_stderr": 0.03799978644370607 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5606641123882503, + "acc_stderr": 0.0177478742456836, + "acc_norm": 0.5606641123882503, + "acc_norm_stderr": 0.0177478742456836 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840622, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840622 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828064, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828064 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416544, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416544 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.02525448542479961, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.02525448542479961 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561953, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561953 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.03107502852650776, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.03107502852650776 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523867, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523867 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.02677299065336182, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.02677299065336182 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.558282208588957, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.558282208588957, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.03561587327685884, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.03561587327685884 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.581651376146789, + "acc_stderr": 0.021149548596443878, + "acc_norm": 0.581651376146789, + "acc_norm_stderr": 0.021149548596443878 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.02843109544417664, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.02843109544417664 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685741, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685741 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.03203614084670058, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.03203614084670058 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.01446589382985994, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.01446589382985994 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763127, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763127 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.679324894514768, + "acc_stderr": 0.03038193194999041, + "acc_norm": 0.679324894514768, + "acc_norm_stderr": 0.03038193194999041 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35071707953063885, + "acc_stderr": 0.012187773370741518, + "acc_norm": 0.35071707953063885, + "acc_norm_stderr": 0.012187773370741518 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3292533659730722, + "mc1_stderr": 0.016451264440068242, + "mc2": 0.50360676511002, + "mc2_stderr": 0.015375083858045636 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49586776859504134, + "acc_stderr": 0.01718976703213082, + "acc_norm": 0.5596221959858324, + "acc_norm_stderr": 0.01706769977431297 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/llama2-13b-dpo-v4", + "model_sha": "4be900561f9dc8c16a2f26f5ebfa6c31ac35fd3e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/llama2-13b-dpo-v6/result_2023-12-11 14:18:02.json b/mncai/llama2-13b-dpo-v6/result_2023-12-11 14:18:02.json new file mode 100644 index 0000000000000000000000000000000000000000..d19de8d4a912d86454427e99ae2aa9fcf5ad3930 --- /dev/null +++ b/mncai/llama2-13b-dpo-v6/result_2023-12-11 14:18:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4308873720136519, + "acc_stderr": 0.014471133392642463, + "acc_norm": 0.4854948805460751, + "acc_norm_stderr": 0.014605241081370056 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43835889265086636, + "acc_stderr": 0.0049517176220079786, + "acc_norm": 0.5828520215096594, + "acc_norm_stderr": 0.004920800313232744 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.01778403453499242, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.01778403453499242 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598053, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598053 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.02833327710956279, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.02833327710956279 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617749, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617749 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115007, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986476, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986476 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036543, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036543 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.0303650508291152, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.0303650508291152 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275794, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275794 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.036586032627637426, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.036586032627637426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047736, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047736 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206184, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206184 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5834862385321101, + "acc_stderr": 0.021136376504030874, + "acc_norm": 0.5834862385321101, + "acc_norm_stderr": 0.021136376504030874 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.027996723180631466, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.027996723180631466 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.01997742260022747, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.01997742260022747 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281285, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281285 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776122, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776122 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.031680911612338825, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.031680911612338825 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35984354628422427, + "acc_stderr": 0.0122582604836898, + "acc_norm": 0.35984354628422427, + "acc_norm_stderr": 0.0122582604836898 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33414932680538556, + "mc1_stderr": 0.016512530677150524, + "mc2": 0.49948963054555756, + "mc2_stderr": 0.01566865586505939 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4935064935064935, + "acc_stderr": 0.01718890435907731, + "acc_norm": 0.5631641086186541, + "acc_norm_stderr": 0.017052633559856065 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/llama2-13b-dpo-v6", + "model_sha": "56f7d5a1b8f6ce6587cc10ff4ba335755bc062f8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/llama2-13b-dpo-v7/result_2023-12-13 01:47:38.json b/mncai/llama2-13b-dpo-v7/result_2023-12-13 01:47:38.json new file mode 100644 index 0000000000000000000000000000000000000000..23beca144c4ea1eb6cad61fa0397945cb2578d7c --- /dev/null +++ b/mncai/llama2-13b-dpo-v7/result_2023-12-13 01:47:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4453924914675768, + "acc_stderr": 0.014523987638344086, + "acc_norm": 0.49658703071672355, + "acc_norm_stderr": 0.014611050403244081 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4458275243975304, + "acc_stderr": 0.004960408362133238, + "acc_norm": 0.5933081059549891, + "acc_norm_stderr": 0.0049021253880022035 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.01777922523339422, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.01777922523339422 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.025339003010106522, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.025339003010106522 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723456, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723456 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.023973861998992062, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.023973861998992062 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456608, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456608 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833925, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833925 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6128440366972477, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.6128440366972477, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.01997742260022747, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.01997742260022747 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199502, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199502 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312548, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312548 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301847, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301847 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3748370273794003, + "acc_stderr": 0.012363652467551924, + "acc_norm": 0.3748370273794003, + "acc_norm_stderr": 0.012363652467551924 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3488372093023256, + "mc1_stderr": 0.01668441985998688, + "mc2": 0.5096219872166472, + "mc2_stderr": 0.015864379577843746 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4911452184179457, + "acc_stderr": 0.017187658199336733, + "acc_norm": 0.5478158205430933, + "acc_norm_stderr": 0.017111567130916792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/llama2-13b-dpo-v7", + "model_sha": "bc6e1316dbe8f6530eee9850f42b63c6a38fe379", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/llama2-7b-dpo-v1/result_2023-12-02 10:33:57.json b/mncai/llama2-7b-dpo-v1/result_2023-12-02 10:33:57.json new file mode 100644 index 0000000000000000000000000000000000000000..6f038978dcf296977d73b6c7eb3612d28dc8a20d --- /dev/null +++ b/mncai/llama2-7b-dpo-v1/result_2023-12-02 10:33:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2883959044368601, + "acc_stderr": 0.01323839442242817, + "acc_norm": 0.3267918088737201, + "acc_norm_stderr": 0.013706665975587331 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34843656642103166, + "acc_stderr": 0.004755013243022123, + "acc_norm": 0.4192391953794065, + "acc_norm_stderr": 0.004924261467934419 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.38058748403575987, + "acc_stderr": 0.017362564126075418, + "acc_norm": 0.38058748403575987, + "acc_norm_stderr": 0.017362564126075418 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.035915667978246635, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.035915667978246635 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3633440514469453, + "acc_stderr": 0.027316847674192707, + "acc_norm": 0.3633440514469453, + "acc_norm_stderr": 0.027316847674192707 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.042607351576445594, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.042607351576445594 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.034812853382329624, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.034812853382329624 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.039417076320648906, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.039417076320648906 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237653, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.02498535492310233, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.02498535492310233 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4129032258064516, + "acc_stderr": 0.028009138125400387, + "acc_norm": 0.4129032258064516, + "acc_norm_stderr": 0.028009138125400387 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5598290598290598, + "acc_stderr": 0.032520741720630506, + "acc_norm": 0.5598290598290598, + "acc_norm_stderr": 0.032520741720630506 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342582, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342582 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.37272727272727274, + "acc_stderr": 0.04631381319425463, + "acc_norm": 0.37272727272727274, + "acc_norm_stderr": 0.04631381319425463 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5024875621890548, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.5024875621890548, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.02226181769240016, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.02226181769240016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624555, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624555 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.02642481659400985, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.02642481659400985 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.38271604938271603, + "acc_stderr": 0.027044538138402612, + "acc_norm": 0.38271604938271603, + "acc_norm_stderr": 0.027044538138402612 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3889908256880734, + "acc_stderr": 0.020902300887392863, + "acc_norm": 0.3889908256880734, + "acc_norm_stderr": 0.020902300887392863 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871137, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871137 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.028074158947600653, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.028074158947600653 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5289256198347108, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.018433427649401906, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.018433427649401906 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.04327040932578728, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.04327040932578728 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.01502408388332287, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.01502408388332287 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398865, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398865 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175364, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3291139240506329, + "acc_stderr": 0.030587326294702344, + "acc_norm": 0.3291139240506329, + "acc_norm_stderr": 0.030587326294702344 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25749674054758803, + "acc_stderr": 0.01116770601490415, + "acc_norm": 0.25749674054758803, + "acc_norm_stderr": 0.01116770601490415 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3157894736842105, + "mc1_stderr": 0.016272287957916944, + "mc2": 0.4875420781374341, + "mc2_stderr": 0.015912120755521442 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2798110979929162, + "acc_stderr": 0.01543371579542777, + "acc_norm": 0.33293978748524206, + "acc_norm_stderr": 0.01620243120837379 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/llama2-7b-dpo-v1", + "model_sha": "01245798d4fa8d885b632cfeb5b1a369b73a8003", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/mistral-7b-ko-1871-2p1/result_2023-10-06 10:27:15.json b/mncai/mistral-7b-ko-1871-2p1/result_2023-10-06 10:27:15.json new file mode 100644 index 0000000000000000000000000000000000000000..b6cc01a81676fa6e1e0f7909c445a1c84efcdc79 --- /dev/null +++ b/mncai/mistral-7b-ko-1871-2p1/result_2023-10-06 10:27:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3250853242320819, + "acc_stderr": 0.013688147309729124, + "acc_norm": 0.3609215017064846, + "acc_norm_stderr": 0.01403476138617546 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3724357697669787, + "acc_stderr": 0.00482465540607556, + "acc_norm": 0.4759012148974308, + "acc_norm_stderr": 0.0049839823961873655 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.017852981266633955, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.017852981266633955 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.032469569197899575, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.032469569197899575 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755292, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755292 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.04161808503501528, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.04161808503501528 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240627, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240627 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507755, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507755 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752042, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752042 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036094, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.04339138322579859, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.04339138322579859 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47522935779816516, + "acc_stderr": 0.02141099975363592, + "acc_norm": 0.47522935779816516, + "acc_norm_stderr": 0.02141099975363592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.02855582751652879, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.02855582751652879 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.01965992249362335, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.01965992249362335 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963768, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963768 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03372343271653063, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03372343271653063 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26927374301675977, + "acc_stderr": 0.014835616582882611, + "acc_norm": 0.26927374301675977, + "acc_norm_stderr": 0.014835616582882611 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4641350210970464, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.4641350210970464, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3135593220338983, + "acc_stderr": 0.011849234291459324, + "acc_norm": 0.3135593220338983, + "acc_norm_stderr": 0.011849234291459324 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.03815494308688929, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.03815494308688929 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31701346389228885, + "mc1_stderr": 0.016289203374403396, + "mc2": 0.4891689873387216, + "mc2_stderr": 0.015571905877884106 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4155844155844156, + "acc_stderr": 0.016943586313076568, + "acc_norm": 0.5053128689492326, + "acc_norm_stderr": 0.01718938362722971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/mistral-7b-ko-1871-2p1", + "model_sha": "1ab1ccefadb9c3e832b4d2018cf0220974f998b3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mncai/mistral-7b-v5/result_2023-12-11 09:32:53.json b/mncai/mistral-7b-v5/result_2023-12-11 09:32:53.json new file mode 100644 index 0000000000000000000000000000000000000000..26f73fc187f94173f59c30f8b6589c99a3241dac --- /dev/null +++ b/mncai/mistral-7b-v5/result_2023-12-11 09:32:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4402730375426621, + "acc_stderr": 0.014506769524804244, + "acc_norm": 0.47952218430034127, + "acc_norm_stderr": 0.01459913135303501 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40977892850029873, + "acc_stderr": 0.004907877144720013, + "acc_norm": 0.5423222465644294, + "acc_norm_stderr": 0.004971874159777691 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5708812260536399, + "acc_stderr": 0.017699388483126785, + "acc_norm": 0.5708812260536399, + "acc_norm_stderr": 0.017699388483126785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108102, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237656, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237656 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49743589743589745, + "acc_stderr": 0.025350672979412205, + "acc_norm": 0.49743589743589745, + "acc_norm_stderr": 0.025350672979412205 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.034711928605184676, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.034711928605184676 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.028438677998909558, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.028438677998909558 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004257, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004257 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808086, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808086 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.037242495958177295, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.037242495958177295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.025279850397404904, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.025279850397404904 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.02780749004427619, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.02780749004427619 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747664, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747664 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041017, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041017 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249032, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249032 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024106, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024106 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19888268156424582, + "acc_stderr": 0.013349892983092517, + "acc_norm": 0.19888268156424582, + "acc_norm_stderr": 0.013349892983092517 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33702737940026073, + "acc_stderr": 0.012072836273691327, + "acc_norm": 0.33702737940026073, + "acc_norm_stderr": 0.012072836273691327 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3182374541003672, + "mc1_stderr": 0.016305988648920598, + "mc2": 0.477954610435675, + "mc2_stderr": 0.015463026163904131 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.017168187201429253, + "acc_norm": 0.5194805194805194, + "acc_norm_stderr": 0.017177301992342547 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mncai/mistral-7b-v5", + "model_sha": "4cd578d40e01a31f3de057ac14d6dc999ffb77d6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mohomin123/M-DIE-M-10.7B/result_2024-01-10 07:53:57.json b/mohomin123/M-DIE-M-10.7B/result_2024-01-10 07:53:57.json new file mode 100644 index 0000000000000000000000000000000000000000..9569507bea40361a89078363d6a71161a151ba7d --- /dev/null +++ b/mohomin123/M-DIE-M-10.7B/result_2024-01-10 07:53:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.45563139931740615, + "acc_stderr": 0.014553749939306863, + "acc_norm": 0.5051194539249146, + "acc_norm_stderr": 0.014610624890309154 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45140410276837284, + "acc_stderr": 0.004966158142645421, + "acc_norm": 0.5987851025692094, + "acc_norm_stderr": 0.004891426533390624 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.648786717752235, + "acc_stderr": 0.017069982051499427, + "acc_norm": 0.648786717752235, + "acc_norm_stderr": 0.017069982051499427 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.032600385118357715, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.032600385118357715 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5755627009646302, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.5755627009646302, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.03318833286217281, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.03318833286217281 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7121212121212122, + "acc_stderr": 0.03225883512300992, + "acc_norm": 0.7121212121212122, + "acc_norm_stderr": 0.03225883512300992 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.02527589207024065, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.02527589207024065 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5870967741935483, + "acc_stderr": 0.028009138125400377, + "acc_norm": 0.5870967741935483, + "acc_norm_stderr": 0.028009138125400377 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.02514093595033543, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.02514093595033543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.045820048415054174, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.045820048415054174 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3962962962962963, + "acc_stderr": 0.029822619458533997, + "acc_norm": 0.3962962962962963, + "acc_norm_stderr": 0.029822619458533997 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.032200241045342054, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.032200241045342054 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273957, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273957 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.025279850397404904, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.025279850397404904 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6127167630057804, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.6127167630057804, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.595679012345679, + "acc_stderr": 0.027306625297327677, + "acc_norm": 0.595679012345679, + "acc_norm_stderr": 0.027306625297327677 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6807339449541284, + "acc_stderr": 0.019987829069750013, + "acc_norm": 0.6807339449541284, + "acc_norm_stderr": 0.019987829069750013 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.565359477124183, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.565359477124183, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.03941897526516303, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.03941897526516303 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.040089737857792046, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.040089737857792046 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5081699346405228, + "acc_stderr": 0.02022513434305727, + "acc_norm": 0.5081699346405228, + "acc_norm_stderr": 0.02022513434305727 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.029275532159704725, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.029275532159704725 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767864, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767864 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4742647058823529, + "acc_stderr": 0.03033257809455504, + "acc_norm": 0.4742647058823529, + "acc_norm_stderr": 0.03033257809455504 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6775510204081633, + "acc_stderr": 0.02992310056368391, + "acc_norm": 0.6775510204081633, + "acc_norm_stderr": 0.02992310056368391 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4172099087353325, + "acc_stderr": 0.012593959992906427, + "acc_norm": 0.4172099087353325, + "acc_norm_stderr": 0.012593959992906427 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03308611113236435, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03308611113236435 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3182374541003672, + "mc1_stderr": 0.0163059886489206, + "mc2": 0.47878693903425384, + "mc2_stderr": 0.015539566529720125 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5631641086186541, + "acc_stderr": 0.01705263355985607, + "acc_norm": 0.5926800472255017, + "acc_norm_stderr": 0.01689245669519127 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mohomin123/M-DIE-M-10.7B", + "model_sha": "a82ae31d3c2b3c5199f74474a0249e435ede1208", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:03:30.json b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:03:30.json new file mode 100644 index 0000000000000000000000000000000000000000..ba96b12f23e0a6e648446faee639899902dcb9a0 --- /dev/null +++ b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:03:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29948805460750855, + "acc_stderr": 0.01338502163731356, + "acc_norm": 0.35494880546075086, + "acc_norm_stderr": 0.013983036904094094 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38627763393746267, + "acc_stderr": 0.004859004184694615, + "acc_norm": 0.4993029277036447, + "acc_norm_stderr": 0.00498977656227611 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.034678266857038245, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.034678266857038245 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.280970625798212, + "acc_stderr": 0.016073127851221225, + "acc_norm": 0.280970625798212, + "acc_norm_stderr": 0.016073127851221225 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.02655698211783875, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.02655698211783875 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.031069390260789413, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.031069390260789413 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3054662379421222, + "acc_stderr": 0.026160584450140474, + "acc_norm": 0.3054662379421222, + "acc_norm_stderr": 0.026160584450140474 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.02715715047956382, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.02715715047956382 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533084, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533084 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378949, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378949 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2184873949579832, + "acc_stderr": 0.026841514322958924, + "acc_norm": 0.2184873949579832, + "acc_norm_stderr": 0.026841514322958924 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2153846153846154, + "acc_stderr": 0.020843034557462878, + "acc_norm": 0.2153846153846154, + "acc_norm_stderr": 0.020843034557462878 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.02509189237885928, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.02509189237885928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.02264421261552521, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.02264421261552521 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3092485549132948, + "acc_stderr": 0.02488314057007176, + "acc_norm": 0.3092485549132948, + "acc_norm_stderr": 0.02488314057007176 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868038, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.01792308766780305, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.01792308766780305 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.14285714285714285, + "acc_stderr": 0.03129843185743809, + "acc_norm": 0.14285714285714285, + "acc_norm_stderr": 0.03129843185743809 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.025160998214292456, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.025160998214292456 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.038035102483515854, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.038035102483515854 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.027696910713093936, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.027696910713093936 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31645569620253167, + "acc_stderr": 0.030274974880218977, + "acc_norm": 0.31645569620253167, + "acc_norm_stderr": 0.030274974880218977 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.011328734403140327, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.011328734403140327 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.03149328104507956, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.03149328104507956 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752337, + "mc2": 0.3942593710384486, + "mc2_stderr": 0.014811018314989769 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32113341204250295, + "acc_stderr": 0.01605276257911158, + "acc_norm": 0.42502951593860683, + "acc_norm_stderr": 0.016996016308362883 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "momo/polyglot-ko-12.8b-Chat-QLoRA-Merge", + "model_sha": "793d22f37f5945b22fbc33c447f8cdcaa4a50221", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:08:21.json b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:08:21.json new file mode 100644 index 0000000000000000000000000000000000000000..ba96b12f23e0a6e648446faee639899902dcb9a0 --- /dev/null +++ b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge/result_2023-10-03 01:08:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29948805460750855, + "acc_stderr": 0.01338502163731356, + "acc_norm": 0.35494880546075086, + "acc_norm_stderr": 0.013983036904094094 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38627763393746267, + "acc_stderr": 0.004859004184694615, + "acc_norm": 0.4993029277036447, + "acc_norm_stderr": 0.00498977656227611 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.034678266857038245, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.034678266857038245 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.280970625798212, + "acc_stderr": 0.016073127851221225, + "acc_norm": 0.280970625798212, + "acc_norm_stderr": 0.016073127851221225 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.02655698211783875, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.02655698211783875 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.031069390260789413, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.031069390260789413 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3054662379421222, + "acc_stderr": 0.026160584450140474, + "acc_norm": 0.3054662379421222, + "acc_norm_stderr": 0.026160584450140474 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.02715715047956382, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.02715715047956382 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.03135305009533084, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.03135305009533084 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378949, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378949 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2184873949579832, + "acc_stderr": 0.026841514322958924, + "acc_norm": 0.2184873949579832, + "acc_norm_stderr": 0.026841514322958924 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2153846153846154, + "acc_stderr": 0.020843034557462878, + "acc_norm": 0.2153846153846154, + "acc_norm_stderr": 0.020843034557462878 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2645161290322581, + "acc_stderr": 0.02509189237885928, + "acc_norm": 0.2645161290322581, + "acc_norm_stderr": 0.02509189237885928 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.02264421261552521, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.02264421261552521 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3092485549132948, + "acc_stderr": 0.02488314057007176, + "acc_norm": 0.3092485549132948, + "acc_norm_stderr": 0.02488314057007176 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868038, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.01792308766780305, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.01792308766780305 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.14285714285714285, + "acc_stderr": 0.03129843185743809, + "acc_norm": 0.14285714285714285, + "acc_norm_stderr": 0.03129843185743809 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.025160998214292456, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.025160998214292456 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.038035102483515854, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.038035102483515854 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.026577860943307854, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.026577860943307854 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.027696910713093936, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.027696910713093936 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31645569620253167, + "acc_stderr": 0.030274974880218977, + "acc_norm": 0.31645569620253167, + "acc_norm_stderr": 0.030274974880218977 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.011328734403140327, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.011328734403140327 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.03149328104507956, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.03149328104507956 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3515151515151515, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.3515151515151515, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752337, + "mc2": 0.3942593710384486, + "mc2_stderr": 0.014811018314989769 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32113341204250295, + "acc_stderr": 0.01605276257911158, + "acc_norm": 0.42502951593860683, + "acc_norm_stderr": 0.016996016308362883 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "momo/polyglot-ko-12.8b-Chat-QLoRA-Merge", + "model_sha": "793d22f37f5945b22fbc33c447f8cdcaa4a50221", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:39.json b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:39.json new file mode 100644 index 0000000000000000000000000000000000000000..8e19679f279a51edb8db2167bcd20f9832dd6224 --- /dev/null +++ b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30119453924914674, + "acc_stderr": 0.013406741767847612, + "acc_norm": 0.33532423208191126, + "acc_norm_stderr": 0.013796182947785562 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38707428799044014, + "acc_stderr": 0.004860854240821967, + "acc_norm": 0.5005974905397331, + "acc_norm_stderr": 0.004989777848791005 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3001277139208174, + "acc_stderr": 0.016389249691317425, + "acc_norm": 0.3001277139208174, + "acc_norm_stderr": 0.016389249691317425 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.028957342788342343, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.028957342788342343 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.0332939411907353, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.0332939411907353 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3536977491961415, + "acc_stderr": 0.027155208103200868, + "acc_norm": 0.3536977491961415, + "acc_norm_stderr": 0.027155208103200868 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19730941704035873, + "acc_stderr": 0.02670985334496796, + "acc_norm": 0.19730941704035873, + "acc_norm_stderr": 0.02670985334496796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728743, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728743 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30808080808080807, + "acc_stderr": 0.032894773300986155, + "acc_norm": 0.30808080808080807, + "acc_norm_stderr": 0.032894773300986155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.03921545312467122, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.03921545312467122 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.029472485833136112, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.029472485833136112 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28974358974358977, + "acc_stderr": 0.02300062824368797, + "acc_norm": 0.28974358974358977, + "acc_norm_stderr": 0.02300062824368797 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.042365112580946336, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.042365112580946336 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233485, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.02489246917246283, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.02489246917246283 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.03057281131029961, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.03057281131029961 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.027495663683724046, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.027495663683724046 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878285, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878285 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199966, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.03187187537919798, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.03187187537919798 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594316, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776568, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776568 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.02572280220089582, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.02572280220089582 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924055, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924055 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868045, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565319, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565319 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26788990825688075, + "acc_stderr": 0.01898746225797865, + "acc_norm": 0.26788990825688075, + "acc_norm_stderr": 0.01898746225797865 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.02718449890994162, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.02718449890994162 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909281, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909281 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2231404958677686, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.2231404958677686, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17105263157894737, + "acc_stderr": 0.030643607071677098, + "acc_norm": 0.17105263157894737, + "acc_norm_stderr": 0.030643607071677098 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29248366013071897, + "acc_stderr": 0.01840341571010979, + "acc_norm": 0.29248366013071897, + "acc_norm_stderr": 0.01840341571010979 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984301, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984301 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260664, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260664 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.02824568739146292, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.02824568739146292 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.33877551020408164, + "acc_stderr": 0.03029950656215418, + "acc_norm": 0.33877551020408164, + "acc_norm_stderr": 0.03029950656215418 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.379746835443038, + "acc_stderr": 0.031591887529658504, + "acc_norm": 0.379746835443038, + "acc_norm_stderr": 0.031591887529658504 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31877444589308995, + "acc_stderr": 0.01190189563578609, + "acc_norm": 0.31877444589308995, + "acc_norm_stderr": 0.01190189563578609 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.03354092437591518, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.03354092437591518 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715, + "mc2": 0.39860268740922694, + "mc2_stderr": 0.015473079108834439 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29279811097992914, + "acc_stderr": 0.015644823205401334, + "acc_norm": 0.33412042502951594, + "acc_norm_stderr": 0.016216763304239695 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3", + "model_sha": "33bfc3a65f355b210a21b6f7c8f04f49492835bf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:47.json b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:47.json new file mode 100644 index 0000000000000000000000000000000000000000..8e19679f279a51edb8db2167bcd20f9832dd6224 --- /dev/null +++ b/momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3/result_2023-10-03 17:56:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30119453924914674, + "acc_stderr": 0.013406741767847612, + "acc_norm": 0.33532423208191126, + "acc_norm_stderr": 0.013796182947785562 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38707428799044014, + "acc_stderr": 0.004860854240821967, + "acc_norm": 0.5005974905397331, + "acc_norm_stderr": 0.004989777848791005 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3001277139208174, + "acc_stderr": 0.016389249691317425, + "acc_norm": 0.3001277139208174, + "acc_norm_stderr": 0.016389249691317425 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2680851063829787, + "acc_stderr": 0.028957342788342343, + "acc_norm": 0.2680851063829787, + "acc_norm_stderr": 0.028957342788342343 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.0332939411907353, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.0332939411907353 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3536977491961415, + "acc_stderr": 0.027155208103200868, + "acc_norm": 0.3536977491961415, + "acc_norm_stderr": 0.027155208103200868 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19730941704035873, + "acc_stderr": 0.02670985334496796, + "acc_norm": 0.19730941704035873, + "acc_norm_stderr": 0.02670985334496796 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.03768335959728743, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.03768335959728743 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30808080808080807, + "acc_stderr": 0.032894773300986155, + "acc_norm": 0.30808080808080807, + "acc_norm_stderr": 0.032894773300986155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.03921545312467122, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.03921545312467122 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.029472485833136112, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.029472485833136112 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28974358974358977, + "acc_stderr": 0.02300062824368797, + "acc_norm": 0.28974358974358977, + "acc_norm_stderr": 0.02300062824368797 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.042365112580946336, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.042365112580946336 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233485, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.02489246917246283, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.02489246917246283 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.03057281131029961, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.03057281131029961 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.027495663683724046, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.027495663683724046 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878285, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878285 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371217, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371217 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199966, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.03187187537919798, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.03187187537919798 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594316, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594316 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776568, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776568 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.02572280220089582, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.02572280220089582 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924055, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924055 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.025557653981868045, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.025557653981868045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565319, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565319 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26788990825688075, + "acc_stderr": 0.01898746225797865, + "acc_norm": 0.26788990825688075, + "acc_norm_stderr": 0.01898746225797865 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.02718449890994162, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.02718449890994162 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909281, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909281 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2231404958677686, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.2231404958677686, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17105263157894737, + "acc_stderr": 0.030643607071677098, + "acc_norm": 0.17105263157894737, + "acc_norm_stderr": 0.030643607071677098 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29248366013071897, + "acc_stderr": 0.01840341571010979, + "acc_norm": 0.29248366013071897, + "acc_norm_stderr": 0.01840341571010979 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984301, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984301 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260664, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260664 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.02824568739146292, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.02824568739146292 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.33877551020408164, + "acc_stderr": 0.03029950656215418, + "acc_norm": 0.33877551020408164, + "acc_norm_stderr": 0.03029950656215418 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.379746835443038, + "acc_stderr": 0.031591887529658504, + "acc_norm": 0.379746835443038, + "acc_norm_stderr": 0.031591887529658504 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31877444589308995, + "acc_stderr": 0.01190189563578609, + "acc_norm": 0.31877444589308995, + "acc_norm_stderr": 0.01190189563578609 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.03354092437591518, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.03354092437591518 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715, + "mc2": 0.39860268740922694, + "mc2_stderr": 0.015473079108834439 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29279811097992914, + "acc_stderr": 0.015644823205401334, + "acc_norm": 0.33412042502951594, + "acc_norm_stderr": 0.016216763304239695 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "momo/polyglot-ko-12.8b-Chat-QLoRA-Merge_v3", + "model_sha": "33bfc3a65f355b210a21b6f7c8f04f49492835bf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge-v2/result_2023-10-10 00:11:38.json b/momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge-v2/result_2023-10-10 00:11:38.json new file mode 100644 index 0000000000000000000000000000000000000000..8a1e9b00fa1d48fc0d68415b2cd174421b336a82 --- /dev/null +++ b/momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge-v2/result_2023-10-10 00:11:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32081911262798635, + "acc_stderr": 0.013640943091946524, + "acc_norm": 0.37457337883959047, + "acc_norm_stderr": 0.014144193471893446 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3913563035251942, + "acc_stderr": 0.004870563921220623, + "acc_norm": 0.5044811790479984, + "acc_norm_stderr": 0.004989581008163209 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.033773102522091925, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.033773102522091925 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.046202840822800406, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.046202840822800406 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3231162196679438, + "acc_stderr": 0.016723726512343048, + "acc_norm": 0.3231162196679438, + "acc_norm_stderr": 0.016723726512343048 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424004, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.028504856470514178, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.028504856470514178 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.031417842916639245, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.031417842916639245 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33762057877813506, + "acc_stderr": 0.026858825879488547, + "acc_norm": 0.33762057877813506, + "acc_norm_stderr": 0.026858825879488547 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21076233183856502, + "acc_stderr": 0.027373095500540193, + "acc_norm": 0.21076233183856502, + "acc_norm_stderr": 0.027373095500540193 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.04010358942462202, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.04010358942462202 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.034812853382329624, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.034812853382329624 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.30344827586206896, + "acc_stderr": 0.038312260488503336, + "acc_norm": 0.30344827586206896, + "acc_norm_stderr": 0.038312260488503336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.02971914287634284, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.02971914287634284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35128205128205126, + "acc_stderr": 0.024203665177902796, + "acc_norm": 0.35128205128205126, + "acc_norm_stderr": 0.024203665177902796 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970187, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970187 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.02652270967466776, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.02652270967466776 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.33760683760683763, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.33760683760683763, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.29056603773584905, + "acc_stderr": 0.027943219989337142, + "acc_norm": 0.29056603773584905, + "acc_norm_stderr": 0.027943219989337142 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2818181818181818, + "acc_stderr": 0.043091187099464585, + "acc_norm": 0.2818181818181818, + "acc_norm_stderr": 0.043091187099464585 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31343283582089554, + "acc_stderr": 0.03280188205348644, + "acc_norm": 0.31343283582089554, + "acc_norm_stderr": 0.03280188205348644 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165582, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165582 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261124, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261124 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548574, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548574 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3901734104046243, + "acc_stderr": 0.026261677607806642, + "acc_norm": 0.3901734104046243, + "acc_norm_stderr": 0.026261677607806642 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.02577311116963044, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.02577311116963044 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.03480175668466036, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.03480175668466036 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022057, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022057 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3431192660550459, + "acc_stderr": 0.020354777736086037, + "acc_norm": 0.3431192660550459, + "acc_norm_stderr": 0.020354777736086037 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510468008, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510468008 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2975206611570248, + "acc_stderr": 0.041733491480834974, + "acc_norm": 0.2975206611570248, + "acc_norm_stderr": 0.041733491480834974 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.037150621549989056, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.037150621549989056 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2826797385620915, + "acc_stderr": 0.018217269552053435, + "acc_norm": 0.2826797385620915, + "acc_norm_stderr": 0.018217269552053435 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.025770015644290403, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.025770015644290403 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.029711275860005333, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.029711275860005333 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.01431099954796145, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.01431099954796145 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.028582709753898445, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.028582709753898445 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27755102040816326, + "acc_stderr": 0.028666857790274655, + "acc_norm": 0.27755102040816326, + "acc_norm_stderr": 0.028666857790274655 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.42616033755274263, + "acc_stderr": 0.032190357031317736, + "acc_norm": 0.42616033755274263, + "acc_norm_stderr": 0.032190357031317736 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3161668839634941, + "acc_stderr": 0.011875780894386578, + "acc_norm": 0.3161668839634941, + "acc_norm_stderr": 0.011875780894386578 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.037937131711656344, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.037937131711656344 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.01568092936402464, + "mc2": 0.4401386616406487, + "mc2_stderr": 0.015231170871530949 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30932703659976385, + "acc_stderr": 0.0158913205055209, + "acc_norm": 0.3612750885478158, + "acc_norm_stderr": 0.016515463022412014 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "momo/polyglot-ko-12.8b-Orca-Chat-QLoRA-Merge-v2", + "model_sha": "fe0117824036ebe2d054ddf14b2ef04a1cb19dda", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/moondriller/anarchy-llama2-13B-v2/result_2024-04-04 10:20:42.json b/moondriller/anarchy-llama2-13B-v2/result_2024-04-04 10:20:42.json new file mode 100644 index 0000000000000000000000000000000000000000..970bfff2f6180b27a18df8aca706e0fb3334cd9e --- /dev/null +++ b/moondriller/anarchy-llama2-13B-v2/result_2024-04-04 10:20:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3720136518771331, + "acc_stderr": 0.014124597881844461, + "acc_norm": 0.43430034129692835, + "acc_norm_stderr": 0.014484703048857359 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4214299940250946, + "acc_stderr": 0.004927790036726646, + "acc_norm": 0.5616411073491336, + "acc_norm_stderr": 0.004951717622007976 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.42718446601941745, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.42718446601941745, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.508301404853129, + "acc_stderr": 0.017877498991072008, + "acc_norm": 0.508301404853129, + "acc_norm_stderr": 0.017877498991072008 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4578313253012048, + "acc_stderr": 0.03878626771002361, + "acc_norm": 0.4578313253012048, + "acc_norm_stderr": 0.03878626771002361 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.02821768355665232, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.02821768355665232 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828063, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828063 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266236, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179964, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179964 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3907563025210084, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.3907563025210084, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.028040981380761543, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.028040981380761543 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6495726495726496, + "acc_stderr": 0.03125610824421881, + "acc_norm": 0.6495726495726496, + "acc_norm_stderr": 0.03125610824421881 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.0302422338008545, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.0302422338008545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228412, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228412 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887249, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887249 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194974, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194974 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.026613350840261746, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.026613350840261746 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379424, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379424 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.03606065001832919, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.03606065001832919 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48990825688073397, + "acc_stderr": 0.021432956203453306, + "acc_norm": 0.48990825688073397, + "acc_norm_stderr": 0.021432956203453306 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848876, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848876 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033522, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033522 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04545454545454546, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04545454545454546 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.01929196189506638, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.01929196189506638 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503796, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503796 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510923, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510923 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.01465578083749773, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.01465578083749773 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.028418208619406794, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.028418208619406794 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3057366362451108, + "acc_stderr": 0.011766973847072912, + "acc_norm": 0.3057366362451108, + "acc_norm_stderr": 0.011766973847072912 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842878, + "mc2": 0.4428626236611729, + "mc2_stderr": 0.01586888088479542 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45218417945690675, + "acc_stderr": 0.017111567130916785, + "acc_norm": 0.49940968122786306, + "acc_norm_stderr": 0.017190342123448662 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "moondriller/anarchy-llama2-13B-v2", + "model_sha": "daf4e97d173141685887b3aecce887f93224c3f5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/moondriller/anarchy-llama2-13B/result_2024-04-03 07:27:33.json b/moondriller/anarchy-llama2-13B/result_2024-04-03 07:27:33.json new file mode 100644 index 0000000000000000000000000000000000000000..848282936859dbb691d138463a33b7564eed4ac0 --- /dev/null +++ b/moondriller/anarchy-llama2-13B/result_2024-04-03 07:27:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40102389078498296, + "acc_stderr": 0.014322255790719869, + "acc_norm": 0.4539249146757679, + "acc_norm_stderr": 0.014549221105171864 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4326827325234017, + "acc_stderr": 0.0049443510655458565, + "acc_norm": 0.579964150567616, + "acc_norm_stderr": 0.004925556104679417 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5440613026819924, + "acc_stderr": 0.017810403925435342, + "acc_norm": 0.5440613026819924, + "acc_norm_stderr": 0.017810403925435342 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149353, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149353 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102304, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102304 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33004926108374383, + "acc_stderr": 0.03308530426228258, + "acc_norm": 0.33004926108374383, + "acc_norm_stderr": 0.03308530426228258 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6452991452991453, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.6452991452991453, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.02578787422095931, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.02578787422095931 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4660493827160494, + "acc_stderr": 0.02775653525734767, + "acc_norm": 0.4660493827160494, + "acc_norm_stderr": 0.02775653525734767 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5321100917431193, + "acc_stderr": 0.021393071222680804, + "acc_norm": 0.5321100917431193, + "acc_norm_stderr": 0.021393071222680804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.038095238095238106, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.038095238095238106 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225875, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225875 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3872549019607843, + "acc_stderr": 0.019706875804085637, + "acc_norm": 0.3872549019607843, + "acc_norm_stderr": 0.019706875804085637 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686186, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686186 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.02952009569768776, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.02952009569768776 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713672, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713672 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3272490221642764, + "acc_stderr": 0.011983819806464745, + "acc_norm": 0.3272490221642764, + "acc_norm_stderr": 0.011983819806464745 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015474, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015474 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485081, + "mc2": 0.4341506838824099, + "mc2_stderr": 0.015209649479727722 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5064935064935064, + "acc_stderr": 0.017188904359077307, + "acc_norm": 0.5667060212514758, + "acc_norm_stderr": 0.017036683641893095 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "moondriller/anarchy-llama2-13B", + "model_sha": "3da5f602a176b3f31946756f9619fbe650bc5f1c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/moondriller/anarchy-solar-10B-v1/result_2024-04-05 08:43:03.json b/moondriller/anarchy-solar-10B-v1/result_2024-04-05 08:43:03.json new file mode 100644 index 0000000000000000000000000000000000000000..4d8e6c71b77c5ad62583d282fde114e7b9093557 --- /dev/null +++ b/moondriller/anarchy-solar-10B-v1/result_2024-04-05 08:43:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7320819112627986, + "acc_stderr": 0.01294203019513643, + "acc_norm": 0.7687713310580204, + "acc_norm_stderr": 0.012320858834772267 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6981676956781517, + "acc_stderr": 0.004581147247963184, + "acc_norm": 0.8124875522804222, + "acc_norm_stderr": 0.00389524632045276 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7475728155339806, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.7475728155339806, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6883780332056194, + "acc_stderr": 0.016562433867284176, + "acc_norm": 0.6883780332056194, + "acc_norm_stderr": 0.016562433867284176 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033583, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033583 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.027466610213140098, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.027466610213140098 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6502242152466368, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.6502242152466368, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062946, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062946 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.031566630992154156, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.031566630992154156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.617948717948718, + "acc_stderr": 0.024635549163908237, + "acc_norm": 0.617948717948718, + "acc_norm_stderr": 0.024635549163908237 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801715, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801715 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4729064039408867, + "acc_stderr": 0.03512819077876106, + "acc_norm": 0.4729064039408867, + "acc_norm_stderr": 0.03512819077876106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.028040981380761533, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.028040981380761533 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8376068376068376, + "acc_stderr": 0.024161618127987745, + "acc_norm": 0.8376068376068376, + "acc_norm_stderr": 0.024161618127987745 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5547169811320755, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.5547169811320755, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948496, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.038073017265045125, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.038073017265045125 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4497354497354497, + "acc_stderr": 0.02562085704293665, + "acc_norm": 0.4497354497354497, + "acc_norm_stderr": 0.02562085704293665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6458333333333334, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.6458333333333334, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6134969325153374, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.6134969325153374, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.026406145973625682, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.026406145973625682 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7461139896373057, + "acc_stderr": 0.03141024780565318, + "acc_norm": 0.7461139896373057, + "acc_norm_stderr": 0.03141024780565318 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.04677473004491199, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.04677473004491199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7302752293577982, + "acc_stderr": 0.01902848671111545, + "acc_norm": 0.7302752293577982, + "acc_norm_stderr": 0.01902848671111545 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5849673202614379, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.5849673202614379, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.020036393768352628, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.020036393768352628 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4326241134751773, + "acc_stderr": 0.02955545423677885, + "acc_norm": 0.4326241134751773, + "acc_norm_stderr": 0.02955545423677885 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.0340763209385405, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.0340763209385405 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3318435754189944, + "acc_stderr": 0.015748421208187303, + "acc_norm": 0.3318435754189944, + "acc_norm_stderr": 0.015748421208187303 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5, + "acc_stderr": 0.030372836961539352, + "acc_norm": 0.5, + "acc_norm_stderr": 0.030372836961539352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.673469387755102, + "acc_stderr": 0.03002105623844033, + "acc_norm": 0.673469387755102, + "acc_norm_stderr": 0.03002105623844033 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.729957805907173, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.729957805907173, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44784876140808344, + "acc_stderr": 0.01270058240476823, + "acc_norm": 0.44784876140808344, + "acc_norm_stderr": 0.01270058240476823 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6813725490196079, + "acc_stderr": 0.032702871814820816, + "acc_norm": 0.6813725490196079, + "acc_norm_stderr": 0.032702871814820816 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7649938800489596, + "mc1_stderr": 0.014843061507731613, + "mc2": 0.8387512223724319, + "mc2_stderr": 0.012341439997261935 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49940968122786306, + "acc_stderr": 0.017190342123448665, + "acc_norm": 0.5312868949232585, + "acc_norm_stderr": 0.017156666859785466 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "moondriller/anarchy-solar-10B-v1", + "model_sha": "3d300c7a81eca02931d8c025b1bcab6962fd9695", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/moondriller/llama2-13B-eugeneparkthebest/result_2024-03-25 04:33:48.json b/moondriller/llama2-13B-eugeneparkthebest/result_2024-03-25 04:33:48.json new file mode 100644 index 0000000000000000000000000000000000000000..a6770a4017dd01e2b524ad483d8ecf5e2b008746 --- /dev/null +++ b/moondriller/llama2-13B-eugeneparkthebest/result_2024-03-25 04:33:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.01415063143511173, + "acc_norm": 0.43856655290102387, + "acc_norm_stderr": 0.01450068261821286 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4245170284803824, + "acc_stderr": 0.004932593348813624, + "acc_norm": 0.5776737701653057, + "acc_norm_stderr": 0.004929204864315971 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.01787469866749135, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.01787469866749135 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.02977164271249122, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.02977164271249122 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3452914798206278, + "acc_stderr": 0.031911001928357934, + "acc_norm": 0.3452914798206278, + "acc_norm_stderr": 0.031911001928357934 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.034961309720561266, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.034961309720561266 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.0412273711137033, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.0412273711137033 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.37815126050420167, + "acc_stderr": 0.03149930577784906, + "acc_norm": 0.37815126050420167, + "acc_norm_stderr": 0.03149930577784906 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33589743589743587, + "acc_stderr": 0.02394672474156397, + "acc_norm": 0.33589743589743587, + "acc_norm_stderr": 0.02394672474156397 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.033442837442804574, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.033442837442804574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.028251557906849748, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.028251557906849748 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230193, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230193 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.03522865864099598, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.03522865864099598 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.037336266553835096, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.037336266553835096 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918428, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918428 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.02687408588351835, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.02687408588351835 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.027667138569422704, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.027667138569422704 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.03561587327685884, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.03561587327685884 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46422018348623856, + "acc_stderr": 0.0213823647757019, + "acc_norm": 0.46422018348623856, + "acc_norm_stderr": 0.0213823647757019 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412225, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412225 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257013, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.02833295951403124, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.02833295951403124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4530612244897959, + "acc_stderr": 0.03186785930004129, + "acc_norm": 0.4530612244897959, + "acc_norm_stderr": 0.03186785930004129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3259452411994785, + "acc_stderr": 0.011971507294982777, + "acc_norm": 0.3259452411994785, + "acc_norm_stderr": 0.011971507294982777 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386854, + "mc2": 0.46768176908038145, + "mc2_stderr": 0.015264379247144485 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4982290436835891, + "acc_stderr": 0.017190246276231867, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.017014038119297508 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "moondriller/llama2-13B-eugeneparkthebest", + "model_sha": "8e0740c8f0a61903c563126bc45d0e9a16547742", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/moondriller/solar10B-eugeneparkthebest/result_2024-03-27 05:04:52.json b/moondriller/solar10B-eugeneparkthebest/result_2024-03-27 05:04:52.json new file mode 100644 index 0000000000000000000000000000000000000000..0bb9270929fba001bd2c94699f8b0bd7da9ab7f3 --- /dev/null +++ b/moondriller/solar10B-eugeneparkthebest/result_2024-03-27 05:04:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34726962457337884, + "acc_stderr": 0.01391303452962045, + "acc_norm": 0.42662116040955633, + "acc_norm_stderr": 0.014453185592920293 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33608842859988053, + "acc_stderr": 0.004714041652598607, + "acc_norm": 0.477096195976897, + "acc_norm_stderr": 0.004984543540932338 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6140350877192983, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.6140350877192983, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6219667943805874, + "acc_stderr": 0.017339844462104577, + "acc_norm": 0.6219667943805874, + "acc_norm_stderr": 0.017339844462104577 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.028173917761762906, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.028173917761762906 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5650224215246636, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.5650224215246636, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6919191919191919, + "acc_stderr": 0.03289477330098615, + "acc_norm": 0.6919191919191919, + "acc_norm_stderr": 0.03289477330098615 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5358974358974359, + "acc_stderr": 0.025285585990017866, + "acc_norm": 0.5358974358974359, + "acc_norm_stderr": 0.025285585990017866 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04712821257426769, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04712821257426769 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883231, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883231 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6064516129032258, + "acc_stderr": 0.02779187875313226, + "acc_norm": 0.6064516129032258, + "acc_norm_stderr": 0.02779187875313226 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131133, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131133 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518026, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518026 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851102, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851102 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.041808067502949374, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.041808067502949374 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206177, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.039158572914369714, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.039158572914369714 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5524691358024691, + "acc_stderr": 0.027667138569422697, + "acc_norm": 0.5524691358024691, + "acc_norm_stderr": 0.027667138569422697 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6580310880829016, + "acc_stderr": 0.03423465100104283, + "acc_norm": 0.6580310880829016, + "acc_norm_stderr": 0.03423465100104283 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6678899082568808, + "acc_stderr": 0.020192682985423344, + "acc_norm": 0.6678899082568808, + "acc_norm_stderr": 0.020192682985423344 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.028431095444176636, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.028431095444176636 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.040179012759817494, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.040179012759817494 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.020217030653186446, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.020217030653186446 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.03395322726375797, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.03395322726375797 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.014655780837497719, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.014655780837497719 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5959183673469388, + "acc_stderr": 0.03141470802586588, + "acc_norm": 0.5959183673469388, + "acc_norm_stderr": 0.03141470802586588 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6919831223628692, + "acc_stderr": 0.0300523893356057, + "acc_norm": 0.6919831223628692, + "acc_norm_stderr": 0.0300523893356057 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3852672750977836, + "acc_stderr": 0.012429485434955178, + "acc_norm": 0.3852672750977836, + "acc_norm_stderr": 0.012429485434955178 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6078431372549019, + "acc_stderr": 0.03426712349247272, + "acc_norm": 0.6078431372549019, + "acc_norm_stderr": 0.03426712349247272 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398393, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398393 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253597, + "mc2": 0.46052826278968023, + "mc2_stderr": 0.01640577830131905 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4604486422668241, + "acc_stderr": 0.017136487626049843, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.017122829143292648 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "moondriller/solar10B-eugeneparkthebest", + "model_sha": "0a6a90892d7d6f8e6289f39e09a73613c6ce5594", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/moondriller/solar10B-eugeneparkthebestv2/result_2024-03-28 04:32:17.json b/moondriller/solar10B-eugeneparkthebestv2/result_2024-03-28 04:32:17.json new file mode 100644 index 0000000000000000000000000000000000000000..d4eacf6cc5639c2da9624842476110d706ea953a --- /dev/null +++ b/moondriller/solar10B-eugeneparkthebestv2/result_2024-03-28 04:32:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34812286689419797, + "acc_stderr": 0.013921008595179352, + "acc_norm": 0.42918088737201365, + "acc_norm_stderr": 0.014464085894870648 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33718382792272455, + "acc_stderr": 0.0047178207149687484, + "acc_norm": 0.4840669189404501, + "acc_norm_stderr": 0.004987247325495622 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6081871345029239, + "acc_stderr": 0.037439798259264, + "acc_norm": 0.6081871345029239, + "acc_norm_stderr": 0.037439798259264 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6194125159642401, + "acc_stderr": 0.017362564126075425, + "acc_norm": 0.6194125159642401, + "acc_norm_stderr": 0.017362564126075425 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.702020202020202, + "acc_stderr": 0.032586303838365555, + "acc_norm": 0.702020202020202, + "acc_norm_stderr": 0.032586303838365555 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.02529460802398646, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.02529460802398646 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04712821257426769, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04712821257426769 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883231, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883231 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6064516129032258, + "acc_stderr": 0.02779187875313226, + "acc_norm": 0.6064516129032258, + "acc_norm_stderr": 0.02779187875313226 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616265, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616265 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520203, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520203 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206174, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.039158572914369714, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.039158572914369714 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5493827160493827, + "acc_stderr": 0.027684721415656192, + "acc_norm": 0.5493827160493827, + "acc_norm_stderr": 0.027684721415656192 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6580310880829016, + "acc_stderr": 0.03423465100104284, + "acc_norm": 0.6580310880829016, + "acc_norm_stderr": 0.03423465100104284 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.673394495412844, + "acc_stderr": 0.0201069908899373, + "acc_norm": 0.673394495412844, + "acc_norm_stderr": 0.0201069908899373 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5522875816993464, + "acc_stderr": 0.02847293847803353, + "acc_norm": 0.5522875816993464, + "acc_norm_stderr": 0.02847293847803353 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.040260970832965634, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.040260970832965634 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.020217030653186453, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.020217030653186453 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125146, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.03395322726375797, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.03395322726375797 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260659, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260659 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5959183673469388, + "acc_stderr": 0.03141470802586588, + "acc_norm": 0.5959183673469388, + "acc_norm_stderr": 0.03141470802586588 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6962025316455697, + "acc_stderr": 0.02993669638713861, + "acc_norm": 0.6962025316455697, + "acc_norm_stderr": 0.02993669638713861 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38461538461538464, + "acc_stderr": 0.012425548416302943, + "acc_norm": 0.38461538461538464, + "acc_norm_stderr": 0.012425548416302943 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.03815494308688931, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.03815494308688931 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006523, + "mc2": 0.4639323936150009, + "mc2_stderr": 0.016425275606203424 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.01716818720142925, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.01707725413155622 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "moondriller/solar10B-eugeneparkthebestv2", + "model_sha": "59d3af4307b6388dc6d13abe89b4957f6d9bdbb3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/moondriller/solar10B-eugeneparkthebestv3/result_2024-04-01 02:28:47.json b/moondriller/solar10B-eugeneparkthebestv3/result_2024-04-01 02:28:47.json new file mode 100644 index 0000000000000000000000000000000000000000..1580d706dad1c24a9292dc7f320eefb042ccb67e --- /dev/null +++ b/moondriller/solar10B-eugeneparkthebestv3/result_2024-04-01 02:28:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.013855831287497723, + "acc_norm": 0.39505119453924914, + "acc_norm_stderr": 0.014285898292938174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39016132244572793, + "acc_stderr": 0.004867893927258243, + "acc_norm": 0.5000995817566222, + "acc_norm_stderr": 0.00498978131248322 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4327485380116959, + "acc_stderr": 0.03799978644370608, + "acc_norm": 0.4327485380116959, + "acc_norm_stderr": 0.03799978644370608 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4661558109833972, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.4661558109833972, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977112, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977112 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.03047297336338005, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.03047297336338005 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894255, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894255 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3452914798206278, + "acc_stderr": 0.03191100192835794, + "acc_norm": 0.3452914798206278, + "acc_norm_stderr": 0.03191100192835794 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.37404580152671757, + "acc_stderr": 0.042438692422305246, + "acc_norm": 0.37404580152671757, + "acc_norm_stderr": 0.042438692422305246 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.03191863374478465, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.03191863374478465 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33004926108374383, + "acc_stderr": 0.033085304262282574, + "acc_norm": 0.33004926108374383, + "acc_norm_stderr": 0.033085304262282574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.028071588901091852, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.028071588901091852 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.031804252043840985, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.031804252043840985 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.03050329201334259, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.03050329201334259 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911499, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911499 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232065, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232065 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.34971098265895956, + "acc_stderr": 0.025674281456531018, + "acc_norm": 0.34971098265895956, + "acc_norm_stderr": 0.025674281456531018 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.027163686038271233, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.027163686038271233 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45504587155963305, + "acc_stderr": 0.021350503090925167, + "acc_norm": 0.45504587155963305, + "acc_norm_stderr": 0.021350503090925167 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626567, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626567 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.018521756215423024, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.018521756215423024 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650154, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650154 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291521, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291521 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608044, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372434, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372434 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687758, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687758 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4177215189873418, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.4177215189873418, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3239895697522816, + "acc_stderr": 0.01195284080964656, + "acc_norm": 0.3239895697522816, + "acc_norm_stderr": 0.01195284080964656 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.03354092437591518, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.03354092437591518 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133026, + "mc2": 0.4783448548538914, + "mc2_stderr": 0.01602588606617583 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4167650531286895, + "acc_stderr": 0.016950489146108822, + "acc_norm": 0.48406139315230223, + "acc_norm_stderr": 0.017181617837190195 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "moondriller/solar10B-eugeneparkthebestv3", + "model_sha": "a573009534ff47f2709f303a50948a110ae5799a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssma/ko-solar-10.7b-v0.1/result_2024-04-15 05:50:27.json b/mssma/ko-solar-10.7b-v0.1/result_2024-04-15 05:50:27.json new file mode 100644 index 0000000000000000000000000000000000000000..88e87f4eb4694d8b0cf0ed738e0c89a2b2cab77d --- /dev/null +++ b/mssma/ko-solar-10.7b-v0.1/result_2024-04-15 05:50:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20563139931740615, + "acc_stderr": 0.011810745260742578, + "acc_norm": 0.24914675767918087, + "acc_norm_stderr": 0.012639407111926442 + }, + "harness|ko_hellaswag|10": { + "acc": 0.253734315873332, + "acc_stderr": 0.0043425802776627205, + "acc_norm": 0.2551284604660426, + "acc_norm_stderr": 0.0043504247506462035 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23391812865497075, + "acc_stderr": 0.03246721765117827, + "acc_norm": 0.23391812865497075, + "acc_norm_stderr": 0.03246721765117827 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26181353767560667, + "acc_stderr": 0.015720838678445252, + "acc_norm": 0.26181353767560667, + "acc_norm_stderr": 0.015720838678445252 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.27009646302250806, + "acc_stderr": 0.025218040373410605, + "acc_norm": 0.27009646302250806, + "acc_norm_stderr": 0.025218040373410605 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.029376616484945627, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.029376616484945627 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.21379310344827587, + "acc_stderr": 0.03416520447747549, + "acc_norm": 0.21379310344827587, + "acc_norm_stderr": 0.03416520447747549 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.020932445774463168, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.020932445774463168 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.03090379695211447, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.03090379695211447 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23870967741935484, + "acc_stderr": 0.024251071262208837, + "acc_norm": 0.23870967741935484, + "acc_norm_stderr": 0.024251071262208837 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24786324786324787, + "acc_stderr": 0.028286324075564404, + "acc_norm": 0.24786324786324787, + "acc_norm_stderr": 0.028286324075564404 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.0270087660907081, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.0270087660907081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910508, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22388059701492538, + "acc_stderr": 0.029475250236017197, + "acc_norm": 0.22388059701492538, + "acc_norm_stderr": 0.029475250236017197 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.03214737302029471, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.03214737302029471 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.024836057868294674, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.024836057868294674 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.18652849740932642, + "acc_stderr": 0.02811209121011747, + "acc_norm": 0.18652849740932642, + "acc_norm_stderr": 0.02811209121011747 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.02367908986180772, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.02367908986180772 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.031546980450822305, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.031546980450822305 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266733, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266733 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.0358870281282637, + "acc_norm": 0.15, + "acc_norm_stderr": 0.0358870281282637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1948529411764706, + "acc_stderr": 0.024060599423487417, + "acc_norm": 0.1948529411764706, + "acc_norm_stderr": 0.024060599423487417 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.0284588209914603, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.0284588209914603 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2405475880052151, + "acc_stderr": 0.010916406735478947, + "acc_norm": 0.2405475880052151, + "acc_norm_stderr": 0.010916406735478947 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.02955429260569506, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.02955429260569506 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.10153482880755609, + "acc_stderr": 0.010384198041619998, + "acc_norm": 0.36835891381345925, + "acc_norm_stderr": 0.01658385898263907 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssma/ko-solar-10.7b-v0.1", + "model_sha": "177bd341688cec645bd92b126c309d0a684f3a4f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssma/ko-solar-10.7b-v0.10/result_2024-06-17 07:21:11.json b/mssma/ko-solar-10.7b-v0.10/result_2024-06-17 07:21:11.json new file mode 100644 index 0000000000000000000000000000000000000000..e9cb8186b303284489f0dea2851ca06d793f3ad8 --- /dev/null +++ b/mssma/ko-solar-10.7b-v0.10/result_2024-06-17 07:21:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4931740614334471, + "acc_stderr": 0.014610029151379813, + "acc_norm": 0.5443686006825939, + "acc_norm_stderr": 0.01455374993930687 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4707229635530771, + "acc_stderr": 0.0049812201358823294, + "acc_norm": 0.6372236606253734, + "acc_norm_stderr": 0.004798184463156363 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6081871345029239, + "acc_stderr": 0.037439798259263996, + "acc_norm": 0.6081871345029239, + "acc_norm_stderr": 0.037439798259263996 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6871008939974457, + "acc_stderr": 0.016580935940304048, + "acc_norm": 0.6871008939974457, + "acc_norm_stderr": 0.016580935940304048 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5819935691318328, + "acc_stderr": 0.028013651891995076, + "acc_norm": 0.5819935691318328, + "acc_norm_stderr": 0.028013651891995076 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7070707070707071, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.7070707070707071, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.025294608023986465, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.025294608023986465 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.047323326159788126, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.047323326159788126 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.027869320571664625, + "acc_norm": 0.6, + "acc_norm_stderr": 0.027869320571664625 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.02645350805404035, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.02645350805404035 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5547169811320755, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.5547169811320755, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02911661760608303, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02911661760608303 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.02479606060269994, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.02479606060269994 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5346820809248555, + "acc_stderr": 0.026854257928258886, + "acc_norm": 0.5346820809248555, + "acc_norm_stderr": 0.026854257928258886 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6787564766839378, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.6787564766839378, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6403669724770642, + "acc_stderr": 0.020575234660123783, + "acc_norm": 0.6403669724770642, + "acc_norm_stderr": 0.020575234660123783 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5359477124183006, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.5359477124183006, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.020184583359102202, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.020184583359102202 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.028999080904806178, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.028999080904806178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767867, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767867 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329376, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329376 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.03160106993449601, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.03160106993449601 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598035, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598035 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36310299869621904, + "acc_stderr": 0.012282264406018763, + "acc_norm": 0.36310299869621904, + "acc_norm_stderr": 0.012282264406018763 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.593939393939394, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.593939393939394, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3488372093023256, + "mc1_stderr": 0.016684419859986883, + "mc2": 0.5160376776969597, + "mc2_stderr": 0.01581379368401375 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.564344746162928, + "acc_stderr": 0.017047415229476306, + "acc_norm": 0.5726092089728453, + "acc_norm_stderr": 0.017008129844823156 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssma/ko-solar-10.7b-v0.10", + "model_sha": "9acf9af16249f5194219c2dfe7bd00e518e474e1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssma/ko-solar-10.7b-v0.11/result_2024-06-17 07:21:17.json b/mssma/ko-solar-10.7b-v0.11/result_2024-06-17 07:21:17.json new file mode 100644 index 0000000000000000000000000000000000000000..a4dfc456aefa8cf03ef219df36df93c7fea77c7f --- /dev/null +++ b/mssma/ko-solar-10.7b-v0.11/result_2024-06-17 07:21:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.49829351535836175, + "acc_stderr": 0.014611305705056995, + "acc_norm": 0.5537542662116041, + "acc_norm_stderr": 0.014526705548539983 + }, + "harness|ko_hellaswag|10": { + "acc": 0.47161919936267677, + "acc_stderr": 0.004981736689518744, + "acc_norm": 0.6446922923720374, + "acc_norm_stderr": 0.004776283203468085 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6491228070175439, + "acc_stderr": 0.03660298834049162, + "acc_norm": 0.6491228070175439, + "acc_norm_stderr": 0.03660298834049162 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.016857391247472545, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.016857391247472545 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5594855305466238, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.5594855305466238, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5650224215246636, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.5650224215246636, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6868686868686869, + "acc_stderr": 0.03304205087813652, + "acc_norm": 0.6868686868686869, + "acc_norm_stderr": 0.03304205087813652 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.025275892070240648, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.025275892070240648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145631, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145631 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.567741935483871, + "acc_stderr": 0.028181739720019403, + "acc_norm": 0.567741935483871, + "acc_norm_stderr": 0.028181739720019403 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.028120966503914404, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.028120966503914404 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524593, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6865671641791045, + "acc_stderr": 0.03280188205348642, + "acc_norm": 0.6865671641791045, + "acc_norm_stderr": 0.03280188205348642 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159795, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159795 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.041808067502949374, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.041808067502949374 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.74, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5404624277456648, + "acc_stderr": 0.026830805998952233, + "acc_norm": 0.5404624277456648, + "acc_norm_stderr": 0.026830805998952233 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5154320987654321, + "acc_stderr": 0.02780749004427621, + "acc_norm": 0.5154320987654321, + "acc_norm_stderr": 0.02780749004427621 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6683937823834197, + "acc_stderr": 0.03397636541089118, + "acc_norm": 0.6683937823834197, + "acc_norm_stderr": 0.03397636541089118 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6165137614678899, + "acc_stderr": 0.02084715664191598, + "acc_norm": 0.6165137614678899, + "acc_norm_stderr": 0.02084715664191598 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.02835895631342355, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.02835895631342355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874143, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874143 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.02018014484330729, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.02018014484330729 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966727, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966727 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010071, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010071 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.031717528240626645, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.031717528240626645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7046413502109705, + "acc_stderr": 0.02969633871342288, + "acc_norm": 0.7046413502109705, + "acc_norm_stderr": 0.02969633871342288 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34876140808344197, + "acc_stderr": 0.012172035157127116, + "acc_norm": 0.34876140808344197, + "acc_norm_stderr": 0.012172035157127116 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6372549019607843, + "acc_stderr": 0.03374499356319355, + "acc_norm": 0.6372549019607843, + "acc_norm_stderr": 0.03374499356319355 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.038435669935887186, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.038435669935887186 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35862913096695226, + "mc1_stderr": 0.016789289499502022, + "mc2": 0.5219272628648576, + "mc2_stderr": 0.015772360385573416 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.58913813459268, + "acc_stderr": 0.016914972767841055, + "acc_norm": 0.5903187721369539, + "acc_norm_stderr": 0.016907568192219478 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssma/ko-solar-10.7b-v0.11", + "model_sha": "cac2851f557e55d676ff832f311c34645d407ab8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssma/ko-solar-10.7b-v0.12/result_2024-06-17 07:21:24.json b/mssma/ko-solar-10.7b-v0.12/result_2024-06-17 07:21:24.json new file mode 100644 index 0000000000000000000000000000000000000000..6d8e54b1f731941f79656fe241fbfef0e6683358 --- /dev/null +++ b/mssma/ko-solar-10.7b-v0.12/result_2024-06-17 07:21:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4931740614334471, + "acc_stderr": 0.014610029151379813, + "acc_norm": 0.5503412969283277, + "acc_norm_stderr": 0.014537144444284743 + }, + "harness|ko_hellaswag|10": { + "acc": 0.47301334395538736, + "acc_stderr": 0.004982508198584254, + "acc_norm": 0.6448914558852819, + "acc_norm_stderr": 0.004775681871529873 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6257309941520468, + "acc_stderr": 0.03711601185389481, + "acc_norm": 0.6257309941520468, + "acc_norm_stderr": 0.03711601185389481 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6807151979565773, + "acc_stderr": 0.016671261749538733, + "acc_norm": 0.6807151979565773, + "acc_norm_stderr": 0.016671261749538733 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5659163987138264, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.5659163987138264, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5650224215246636, + "acc_stderr": 0.03327283370271345, + "acc_norm": 0.5650224215246636, + "acc_norm_stderr": 0.03327283370271345 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956909, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956909 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6818181818181818, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.6818181818181818, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5538461538461539, + "acc_stderr": 0.025203571773028323, + "acc_norm": 0.5538461538461539, + "acc_norm_stderr": 0.025203571773028323 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.028040981380761533, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.028040981380761533 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922733, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922733 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5584905660377358, + "acc_stderr": 0.030561590426731837, + "acc_norm": 0.5584905660377358, + "acc_norm_stderr": 0.030561590426731837 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113113, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113113 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268815, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268815 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159795, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159795 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.041633319989322626, + "acc_norm": 0.78, + "acc_norm_stderr": 0.041633319989322626 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756643, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756643 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.689119170984456, + "acc_stderr": 0.03340361906276587, + "acc_norm": 0.689119170984456, + "acc_norm_stderr": 0.03340361906276587 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6275229357798165, + "acc_stderr": 0.020728368457638494, + "acc_norm": 0.6275229357798165, + "acc_norm_stderr": 0.020728368457638494 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5849673202614379, + "acc_stderr": 0.028213504177824106, + "acc_norm": 0.5849673202614379, + "acc_norm_stderr": 0.028213504177824106 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.020196594933541197, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.020196594933541197 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125146, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.01502408388332289, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.01502408388332289 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.02972215209928004, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.02972215209928004 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556166, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556166 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6877637130801688, + "acc_stderr": 0.03016513786784701, + "acc_norm": 0.6877637130801688, + "acc_norm_stderr": 0.03016513786784701 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.363754889178618, + "acc_stderr": 0.012286991879902889, + "acc_norm": 0.363754889178618, + "acc_norm_stderr": 0.012286991879902889 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5931372549019608, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.5931372549019608, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165633, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165633 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.386780905752754, + "mc1_stderr": 0.01704885701051511, + "mc2": 0.5618405823598557, + "mc2_stderr": 0.015789015307202 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5525383707201889, + "acc_stderr": 0.017095190301500574, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.017077254131556228 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssma/ko-solar-10.7b-v0.12", + "model_sha": "89dc6481a278e1bc20f7b5ed8be19afd52345689", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssma/ko-solar-10.7b-v0.13/result_2024-06-17 07:21:28.json b/mssma/ko-solar-10.7b-v0.13/result_2024-06-17 07:21:28.json new file mode 100644 index 0000000000000000000000000000000000000000..b62aa2bf0ac1cbe6ecda797711d0536f4ab72a9e --- /dev/null +++ b/mssma/ko-solar-10.7b-v0.13/result_2024-06-17 07:21:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5025597269624573, + "acc_stderr": 0.01461119932984378, + "acc_norm": 0.5588737201365188, + "acc_norm_stderr": 0.014509747749064664 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4678350926110337, + "acc_stderr": 0.004979446038824757, + "acc_norm": 0.6342362079267079, + "acc_norm_stderr": 0.004806593424942258 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.04721188506097172, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.04721188506097172 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6781609195402298, + "acc_stderr": 0.01670638141505791, + "acc_norm": 0.6781609195402298, + "acc_norm_stderr": 0.01670638141505791 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5498392282958199, + "acc_stderr": 0.028256660723360184, + "acc_norm": 0.5498392282958199, + "acc_norm_stderr": 0.028256660723360184 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.033456784227567773, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.033456784227567773 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.032284106267163895, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.032284106267163895 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.025275892070240648, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.025275892070240648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.02786932057166462, + "acc_norm": 0.6, + "acc_norm_stderr": 0.02786932057166462 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.02645350805404033, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.02645350805404033 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851295, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851295 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028604, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028604 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887248, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887248 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.041808067502949374, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.041808067502949374 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.74, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.02684298551961537, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.02684298551961537 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5154320987654321, + "acc_stderr": 0.02780749004427621, + "acc_norm": 0.5154320987654321, + "acc_norm_stderr": 0.02780749004427621 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6683937823834197, + "acc_stderr": 0.03397636541089118, + "acc_norm": 0.6683937823834197, + "acc_norm_stderr": 0.03397636541089118 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.618348623853211, + "acc_stderr": 0.0208281485170226, + "acc_norm": 0.618348623853211, + "acc_norm_stderr": 0.0208281485170226 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536672, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536672 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02010258389588719, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02010258389588719 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.0443280405529152, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.0443280405529152 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602156, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602156 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925312, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925312 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003472, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003472 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.03121956944530184, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.03121956944530184 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3396349413298566, + "acc_stderr": 0.01209559250693197, + "acc_norm": 0.3396349413298566, + "acc_norm_stderr": 0.01209559250693197 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.0341078533890472, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.0341078533890472 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070262, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070262 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31334149326805383, + "mc1_stderr": 0.016238065069059605, + "mc2": 0.48295697822763467, + "mc2_stderr": 0.015588606835622463 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.577331759149941, + "acc_stderr": 0.016983506079577604, + "acc_norm": 0.5855962219598583, + "acc_norm_stderr": 0.016936583383943615 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssma/ko-solar-10.7b-v0.13", + "model_sha": "efd843249db9a708e776fe203eda7dcb75e7b8d5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssma/ko-solar-10.7b-v0.1b/result_2024-05-16 01:21:49.json b/mssma/ko-solar-10.7b-v0.1b/result_2024-05-16 01:21:49.json new file mode 100644 index 0000000000000000000000000000000000000000..3bd8db15e1ff619644375a6995f85600a525c935 --- /dev/null +++ b/mssma/ko-solar-10.7b-v0.1b/result_2024-05-16 01:21:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4180887372013652, + "acc_stderr": 0.014413988396996081, + "acc_norm": 0.4684300341296928, + "acc_norm_stderr": 0.014582236460866984 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41276638119896436, + "acc_stderr": 0.004913253031155693, + "acc_norm": 0.5619398526190001, + "acc_norm_stderr": 0.004951346338164479 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.34951456310679613, + "acc_stderr": 0.04721188506097173, + "acc_norm": 0.34951456310679613, + "acc_norm_stderr": 0.04721188506097173 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3831417624521073, + "acc_stderr": 0.017384774194885634, + "acc_norm": 0.3831417624521073, + "acc_norm_stderr": 0.017384774194885634 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.03036358219723816, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.03036358219723816 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.033844291552331346, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.033844291552331346 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.41479099678456594, + "acc_stderr": 0.02798268045975957, + "acc_norm": 0.41479099678456594, + "acc_norm_stderr": 0.02798268045975957 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459157, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459157 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3282442748091603, + "acc_stderr": 0.041184385658062976, + "acc_norm": 0.3282442748091603, + "acc_norm_stderr": 0.041184385658062976 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4595959595959596, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.4595959595959596, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083287, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083287 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.030388353551886845, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.030388353551886845 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.02439667298509477, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.02439667298509477 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3258064516129032, + "acc_stderr": 0.026662010578567107, + "acc_norm": 0.3258064516129032, + "acc_norm_stderr": 0.026662010578567107 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.032745319388423504, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.032745319388423504 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302505, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302505 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02730914058823018, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02730914058823018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48258706467661694, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.48258706467661694, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.035676037996391706, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.035676037996391706 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.02351729433596329, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.02351729433596329 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.33815028901734107, + "acc_stderr": 0.025469770149400172, + "acc_norm": 0.33815028901734107, + "acc_norm_stderr": 0.025469770149400172 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3374233128834356, + "acc_stderr": 0.03714908409935573, + "acc_norm": 0.3374233128834356, + "acc_norm_stderr": 0.03714908409935573 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.026571483480719978, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.026571483480719978 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3394495412844037, + "acc_stderr": 0.02030210934266235, + "acc_norm": 0.3394495412844037, + "acc_norm_stderr": 0.02030210934266235 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.03512207412302052, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.03512207412302052 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.026992544339297236, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.026992544339297236 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4793388429752066, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.4793388429752066, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849726, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849726 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.01899970738316267, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.01899970738316267 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460987, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460987 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787317, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787317 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605607, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605607 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.01450897945355399, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.01450897945355399 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20955882352941177, + "acc_stderr": 0.02472311040767705, + "acc_norm": 0.20955882352941177, + "acc_norm_stderr": 0.02472311040767705 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.027833023871399666, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.027833023871399666 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.47257383966244726, + "acc_stderr": 0.03249822718301303, + "acc_norm": 0.47257383966244726, + "acc_norm_stderr": 0.03249822718301303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29726205997392435, + "acc_stderr": 0.011673346173086033, + "acc_norm": 0.29726205997392435, + "acc_norm_stderr": 0.011673346173086033 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.03283472056108567, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.03283472056108567 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.03793713171165635, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.03793713171165635 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015011, + "mc2": 0.4460619018984922, + "mc2_stderr": 0.015743940227327165 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4757969303423849, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.017019847535972202 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssma/ko-solar-10.7b-v0.1b", + "model_sha": "87eb36db3edbd506caee5893b552b5c97e7a653c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssma/ko-solar-10.7b-v0.2/result_2024-05-07 02:58:51.json b/mssma/ko-solar-10.7b-v0.2/result_2024-05-07 02:58:51.json new file mode 100644 index 0000000000000000000000000000000000000000..6693ea597b008f347f2bbbfa9700ffbf43085406 --- /dev/null +++ b/mssma/ko-solar-10.7b-v0.2/result_2024-05-07 02:58:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20648464163822525, + "acc_stderr": 0.011828865619002316, + "acc_norm": 0.26109215017064846, + "acc_norm_stderr": 0.012835523909473852 + }, + "harness|ko_hellaswag|10": { + "acc": 0.24945230033857796, + "acc_stderr": 0.004318117166358329, + "acc_norm": 0.2484564827723561, + "acc_norm_stderr": 0.004312347492538343 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.031267817146631786, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.031267817146631786 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646035, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646035 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21583652618135377, + "acc_stderr": 0.014711684386139966, + "acc_norm": 0.21583652618135377, + "acc_norm_stderr": 0.014711684386139966 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.19148936170212766, + "acc_stderr": 0.025722149992637795, + "acc_norm": 0.19148936170212766, + "acc_norm_stderr": 0.025722149992637795 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1927710843373494, + "acc_stderr": 0.030709824050565274, + "acc_norm": 0.1927710843373494, + "acc_norm_stderr": 0.030709824050565274 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.29260450160771706, + "acc_stderr": 0.025839898334877983, + "acc_norm": 0.29260450160771706, + "acc_norm_stderr": 0.025839898334877983 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.18834080717488788, + "acc_stderr": 0.02624113299640726, + "acc_norm": 0.18834080717488788, + "acc_norm_stderr": 0.02624113299640726 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.037528339580033376, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.037528339580033376 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383889, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383889 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.18067226890756302, + "acc_stderr": 0.02499196496660077, + "acc_norm": 0.18067226890756302, + "acc_norm_stderr": 0.02499196496660077 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2358974358974359, + "acc_stderr": 0.02152596540740873, + "acc_norm": 0.2358974358974359, + "acc_norm_stderr": 0.02152596540740873 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.14814814814814814, + "acc_stderr": 0.03434300243631001, + "acc_norm": 0.14814814814814814, + "acc_norm_stderr": 0.03434300243631001 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678242, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678242 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.22258064516129034, + "acc_stderr": 0.023664216671642535, + "acc_norm": 0.22258064516129034, + "acc_norm_stderr": 0.023664216671642535 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.027046857630716688, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.027046857630716688 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891363, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891363 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724137, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724137 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.033367670865679766, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.033367670865679766 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21393034825870647, + "acc_stderr": 0.02899690969332891, + "acc_norm": 0.21393034825870647, + "acc_norm_stderr": 0.02899690969332891 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633356, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633356 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.023948512905468365, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.023948512905468365 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24382716049382716, + "acc_stderr": 0.023891879541959607, + "acc_norm": 0.24382716049382716, + "acc_norm_stderr": 0.023891879541959607 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.03027690994517826, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.03027690994517826 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03835153954399421, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03835153954399421 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1981651376146789, + "acc_stderr": 0.01709057380421789, + "acc_norm": 0.1981651376146789, + "acc_norm_stderr": 0.01709057380421789 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.038932596106046734, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.038932596106046734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.02463004897982478, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.02463004897982478 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.32231404958677684, + "acc_stderr": 0.042664163633521664, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.042664163633521664 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.26973684210526316, + "acc_stderr": 0.036117805602848975, + "acc_norm": 0.26973684210526316, + "acc_norm_stderr": 0.036117805602848975 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.017401816711427653, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.017401816711427653 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340461004, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340461004 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755808, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755808 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.02876511171804696, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.02876511171804696 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261457, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261457 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.21691176470588236, + "acc_stderr": 0.025035845227711257, + "acc_norm": 0.21691176470588236, + "acc_norm_stderr": 0.025035845227711257 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2571428571428571, + "acc_stderr": 0.02797982353874455, + "acc_norm": 0.2571428571428571, + "acc_norm_stderr": 0.02797982353874455 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.1940928270042194, + "acc_stderr": 0.025744902532290916, + "acc_norm": 0.1940928270042194, + "acc_norm_stderr": 0.025744902532290916 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2503259452411995, + "acc_stderr": 0.011064151027165448, + "acc_norm": 0.2503259452411995, + "acc_norm_stderr": 0.011064151027165448 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.20098039215686275, + "acc_stderr": 0.02812597226565439, + "acc_norm": 0.20098039215686275, + "acc_norm_stderr": 0.02812597226565439 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.17575757575757575, + "acc_stderr": 0.02972094300622445, + "acc_norm": 0.17575757575757575, + "acc_norm_stderr": 0.02972094300622445 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23011015911872704, + "mc1_stderr": 0.014734557959807767, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.1038961038961039, + "acc_stderr": 0.010490438260958935, + "acc_norm": 0.35064935064935066, + "acc_norm_stderr": 0.0164055569038933 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssma/ko-solar-10.7b-v0.2", + "model_sha": "e47d942ee5aaf43a3ea4aa069eaabe704df64769", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssma/ko-solar-10.7b-v0.2b/result_2024-05-13 05:31:38.json b/mssma/ko-solar-10.7b-v0.2b/result_2024-05-13 05:31:38.json new file mode 100644 index 0000000000000000000000000000000000000000..76896660b56634a732f3dcd49517ea5bb38fa11e --- /dev/null +++ b/mssma/ko-solar-10.7b-v0.2b/result_2024-05-13 05:31:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44368600682593856, + "acc_stderr": 0.014518421825670447, + "acc_norm": 0.5034129692832765, + "acc_norm_stderr": 0.014611050403244084 + }, + "harness|ko_hellaswag|10": { + "acc": 0.454690300736905, + "acc_stderr": 0.004969251445596328, + "acc_norm": 0.6224855606452898, + "acc_norm_stderr": 0.004837744647345717 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.03833185275213026, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.03833185275213026 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.048257293373563895, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.048257293373563895 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.017797751493865633, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.017797751493865633 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368878, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368878 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.0348890161685273, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.0348890161685273 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062948, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062948 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5025641025641026, + "acc_stderr": 0.025350672979412184, + "acc_norm": 0.5025641025641026, + "acc_norm_stderr": 0.025350672979412184 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.033327690684107895, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.033327690684107895 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456645, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456645 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253252, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253252 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028417, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4421965317919075, + "acc_stderr": 0.0267386036438074, + "acc_norm": 0.4421965317919075, + "acc_norm_stderr": 0.0267386036438074 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6269430051813472, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.6269430051813472, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5669724770642202, + "acc_stderr": 0.021244146569074345, + "acc_norm": 0.5669724770642202, + "acc_norm_stderr": 0.021244146569074345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.02852638345214264, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.02852638345214264 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762626, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762626 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.046695106638751926, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.046695106638751926 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28268156424581004, + "acc_stderr": 0.015060381730018089, + "acc_norm": 0.28268156424581004, + "acc_norm_stderr": 0.015060381730018089 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.029624663581159685, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.029624663581159685 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.03178471874564729, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.03178471874564729 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.01201414210184297, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.01201414210184297 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5637254901960784, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.5637254901960784, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3684210526315789, + "mc1_stderr": 0.016886551261046042, + "mc2": 0.5450380626898707, + "mc2_stderr": 0.016088328593236022 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.4852420306965762, + "acc_norm_stderr": 0.017182864434998564 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssma/ko-solar-10.7b-v0.2b", + "model_sha": "840cdeb76f96ef4bdebf585653b6d17061432169", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssma/ko-solar-10.7b-v0.2c/result_2024-05-13 06:24:52.json b/mssma/ko-solar-10.7b-v0.2c/result_2024-05-13 06:24:52.json new file mode 100644 index 0000000000000000000000000000000000000000..8f927cd88c4336541c12e7ca4528077b5d0b5098 --- /dev/null +++ b/mssma/ko-solar-10.7b-v0.2c/result_2024-05-13 06:24:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19027303754266212, + "acc_stderr": 0.011470424179225704, + "acc_norm": 0.2593856655290102, + "acc_norm_stderr": 0.012808273573927106 + }, + "harness|ko_hellaswag|10": { + "acc": 0.24805815574586737, + "acc_stderr": 0.0043100310444591575, + "acc_norm": 0.2484564827723561, + "acc_norm_stderr": 0.004312347492538338 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03615507630310934, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03615507630310934 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.0398913985953177, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.0398913985953177 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2503192848020434, + "acc_stderr": 0.015491088951494597, + "acc_norm": 0.2503192848020434, + "acc_norm_stderr": 0.015491088951494597 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313143, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313143 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.02937917046412482, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.02937917046412482 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.0355092018568963, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.0355092018568963 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1832797427652733, + "acc_stderr": 0.021974198848265812, + "acc_norm": 0.1832797427652733, + "acc_norm_stderr": 0.021974198848265812 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.030898610882477518, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.030898610882477518 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.029126522834586825, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.029126522834586825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924812, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924812 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793254, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793254 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2184873949579832, + "acc_stderr": 0.02684151432295894, + "acc_norm": 0.2184873949579832, + "acc_norm_stderr": 0.02684151432295894 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128006, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128006 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.02850137816789395, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.02850137816789395 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2032258064516129, + "acc_stderr": 0.02289168798455495, + "acc_norm": 0.2032258064516129, + "acc_norm_stderr": 0.02289168798455495 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.030572811310299604, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.030572811310299604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.02674989977124123, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.02674989977124123 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.025497532639609556, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.025497532639609556 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.19205298013245034, + "acc_stderr": 0.032162984205936156, + "acc_norm": 0.19205298013245034, + "acc_norm_stderr": 0.032162984205936156 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.03096590312357304, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.03096590312357304 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641142, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641142 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184756, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184756 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080342, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080342 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.02335736578587403, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.02335736578587403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.033519538795212696, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.033519538795212696 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24382716049382716, + "acc_stderr": 0.023891879541959607, + "acc_norm": 0.24382716049382716, + "acc_norm_stderr": 0.023891879541959607 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21243523316062177, + "acc_stderr": 0.02951928261681725, + "acc_norm": 0.21243523316062177, + "acc_norm_stderr": 0.02951928261681725 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21100917431192662, + "acc_stderr": 0.017493922404112648, + "acc_norm": 0.21100917431192662, + "acc_norm_stderr": 0.017493922404112648 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516303, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516303 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.017704531653250075, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.017704531653250075 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.02564555362226672, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.02564555362226672 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952685, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952685 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.17592592592592593, + "acc_stderr": 0.02596742095825853, + "acc_norm": 0.17592592592592593, + "acc_norm_stderr": 0.02596742095825853 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859926, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859926 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19852941176470587, + "acc_stderr": 0.024231013370541107, + "acc_norm": 0.19852941176470587, + "acc_norm_stderr": 0.024231013370541107 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20408163265306123, + "acc_stderr": 0.025801283475090496, + "acc_norm": 0.20408163265306123, + "acc_norm_stderr": 0.025801283475090496 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24902216427640156, + "acc_stderr": 0.01104489226404077, + "acc_norm": 0.24902216427640156, + "acc_norm_stderr": 0.01104489226404077 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.031145570659486782, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.031145570659486782 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885415, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885415 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456418, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09327036599763873, + "acc_stderr": 0.00999828619027673, + "acc_norm": 0.33293978748524206, + "acc_norm_stderr": 0.016202431208373776 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssma/ko-solar-10.7b-v0.2c", + "model_sha": "e7b3fb51a50b86976694b6cb3510a8a1d8966b62", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssma/ko-solar-10.7b-v0.3b/result_2024-05-16 01:22:15.json b/mssma/ko-solar-10.7b-v0.3b/result_2024-05-16 01:22:15.json new file mode 100644 index 0000000000000000000000000000000000000000..bdb8fea98fa496f66e34973ebe639582e7d256af --- /dev/null +++ b/mssma/ko-solar-10.7b-v0.3b/result_2024-05-16 01:22:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44795221843003413, + "acc_stderr": 0.01453201149821167, + "acc_norm": 0.5025597269624573, + "acc_norm_stderr": 0.014611199329843796 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43925512846046605, + "acc_stderr": 0.004952820538831881, + "acc_norm": 0.5990838478390759, + "acc_norm_stderr": 0.004890824718530301 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.541507024265645, + "acc_stderr": 0.01781824860346558, + "acc_norm": 0.541507024265645, + "acc_norm_stderr": 0.01781824860346558 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.02836504154256457, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.02836504154256457 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.035315058793591834, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.035315058793591834 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.02521731518484648, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.02521731518484648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.04760548821460325, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.04760548821460325 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253252, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253252 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.037242495958177295, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.037242495958177295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.024594975128920945, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.024594975128920945 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.02688264343402289, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.02688264343402289 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4104938271604938, + "acc_stderr": 0.027371350925124764, + "acc_norm": 0.4104938271604938, + "acc_norm_stderr": 0.027371350925124764 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5596330275229358, + "acc_stderr": 0.02128431062376154, + "acc_norm": 0.5596330275229358, + "acc_norm_stderr": 0.02128431062376154 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04545454545454548, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04545454545454548 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.04060127035236395, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.04060127035236395 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.01978046595477753, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.01978046595477753 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534795, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534795 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467763, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467763 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.014736926383761974, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.014736926383761974 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824852, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824852 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.032006820201639065, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.032006820201639065 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.031722950043323296, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.031722950043323296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3468057366362451, + "acc_stderr": 0.012156071332318708, + "acc_norm": 0.3468057366362451, + "acc_norm_stderr": 0.012156071332318708 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606785, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606785 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3671970624235006, + "mc1_stderr": 0.01687480500145318, + "mc2": 0.539476932713123, + "mc2_stderr": 0.016033065321525937 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47107438016528924, + "acc_stderr": 0.01716156394991635, + "acc_norm": 0.4899645808736718, + "acc_norm_stderr": 0.017186891286894053 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssma/ko-solar-10.7b-v0.3b", + "model_sha": "a800dbf820c0ee6c598b42bf3252357aabda5de8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssma/ko-solar-10.7b-v0.4/result_2024-05-21 08:49:16.json b/mssma/ko-solar-10.7b-v0.4/result_2024-05-21 08:49:16.json new file mode 100644 index 0000000000000000000000000000000000000000..8128a6aacaeda059867291d70f39a9fd1ef3ccbb --- /dev/null +++ b/mssma/ko-solar-10.7b-v0.4/result_2024-05-21 08:49:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.46075085324232085, + "acc_stderr": 0.014566303676636583, + "acc_norm": 0.5102389078498294, + "acc_norm_stderr": 0.014608326906285015 + }, + "harness|ko_hellaswag|10": { + "acc": 0.454690300736905, + "acc_stderr": 0.004969251445596325, + "acc_norm": 0.6130252937661821, + "acc_norm_stderr": 0.00486062373346113 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.038200425866029654, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.038200425866029654 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5491698595146871, + "acc_stderr": 0.017793297572699034, + "acc_norm": 0.5491698595146871, + "acc_norm_stderr": 0.017793297572699034 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108102, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197422, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197422 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.034889016168527305, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.034889016168527305 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.039609335494512087, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.039609335494512087 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954953, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954953 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.0302363899421731, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.0302363899421731 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.02794045713622841, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02794045713622841 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.03528131472933607, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.03528131472933607 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342654, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342654 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.03874102859818081, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.03874102859818081 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.02779476010500874, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.02779476010500874 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.563302752293578, + "acc_stderr": 0.021264820158714202, + "acc_norm": 0.563302752293578, + "acc_norm_stderr": 0.021264820158714202 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.019922115682786668, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.019922115682786668 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.02772498944950931, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.02772498944950931 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364553, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364553 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125485, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125485 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3194263363754889, + "acc_stderr": 0.011908357176756158, + "acc_norm": 0.3194263363754889, + "acc_norm_stderr": 0.011908357176756158 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32558139534883723, + "mc1_stderr": 0.016403989469907815, + "mc2": 0.5044359154614917, + "mc2_stderr": 0.016114053884967953 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4675324675324675, + "acc_stderr": 0.01715407371668286, + "acc_norm": 0.4757969303423849, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssma/ko-solar-10.7b-v0.4", + "model_sha": "78a1012b94515aeff44a1b5b4aa81bbd7b43d23f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssma/ko-solar-10.7b-v0.5/result_2024-05-24 06:37:05.json b/mssma/ko-solar-10.7b-v0.5/result_2024-05-24 06:37:05.json new file mode 100644 index 0000000000000000000000000000000000000000..30e50c9b35979d0ba88ae92b3252ae2135a17844 --- /dev/null +++ b/mssma/ko-solar-10.7b-v0.5/result_2024-05-24 06:37:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4334470989761092, + "acc_stderr": 0.014481376224558896, + "acc_norm": 0.46245733788395904, + "acc_norm_stderr": 0.014570144495075574 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41943835889265085, + "acc_stderr": 0.004924586362301655, + "acc_norm": 0.5664210316669986, + "acc_norm_stderr": 0.004945558069852528 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.03660298834049165, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.03660298834049165 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.04931801994220416, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.04931801994220416 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4891443167305236, + "acc_stderr": 0.01787574884024242, + "acc_norm": 0.4891443167305236, + "acc_norm_stderr": 0.01787574884024242 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40836012861736337, + "acc_stderr": 0.027917050748484627, + "acc_norm": 0.40836012861736337, + "acc_norm_stderr": 0.027917050748484627 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.031918633744784645, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.031918633744784645 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.02506909438729655, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.02506909438729655 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4064516129032258, + "acc_stderr": 0.02794172734625631, + "acc_norm": 0.4064516129032258, + "acc_norm_stderr": 0.02794172734625631 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114993, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114993 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.03530235517334682, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.03530235517334682 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.02369541500946308, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.02369541500946308 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.025722802200895813, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.025722802200895813 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995093, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995093 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.028180596328259283, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.028180596328259283 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4628099173553719, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.4628099173553719, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490435, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490435 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.019184639328092487, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.019184639328092487 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.02755336616510137, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.02755336616510137 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.042032772914677614, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.042032772914677614 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312547, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312547 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.03254462010767859, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.03254462010767859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30638852672750977, + "acc_stderr": 0.011773980329380705, + "acc_norm": 0.30638852672750977, + "acc_norm_stderr": 0.011773980329380705 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431856, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431856 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.01594506858123661, + "mc2": 0.4683571818582267, + "mc2_stderr": 0.015705039509377062 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5182998819362455, + "acc_stderr": 0.017178836639177755, + "acc_norm": 0.5690672963400236, + "acc_norm_stderr": 0.01702555819604314 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssma/ko-solar-10.7b-v0.5", + "model_sha": "96f5114e92ee17f9a070f236439a32abee642b54", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssma/ko-solar-10.7b-v0.6/result_2024-05-27 06:51:14.json b/mssma/ko-solar-10.7b-v0.6/result_2024-05-27 06:51:14.json new file mode 100644 index 0000000000000000000000000000000000000000..4853fa15b1ea14403c2952a73bcb68de07dc694d --- /dev/null +++ b/mssma/ko-solar-10.7b-v0.6/result_2024-05-27 06:51:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.01423008476191047, + "acc_norm": 0.4300341296928328, + "acc_norm_stderr": 0.014467631559137994 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40689105755825533, + "acc_stderr": 0.0049025025147386, + "acc_norm": 0.5412268472415853, + "acc_norm_stderr": 0.004972790690640185 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4508301404853129, + "acc_stderr": 0.01779329757269905, + "acc_norm": 0.4508301404853129, + "acc_norm_stderr": 0.01779329757269905 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596239, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596239 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.02832032583010592, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.02832032583010592 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3282442748091603, + "acc_stderr": 0.04118438565806298, + "acc_norm": 0.3282442748091603, + "acc_norm_stderr": 0.04118438565806298 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4595959595959596, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.4595959595959596, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.0412273711137033, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.0412273711137033 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36134453781512604, + "acc_stderr": 0.03120469122515001, + "acc_norm": 0.36134453781512604, + "acc_norm_stderr": 0.03120469122515001 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.02489047176993815, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.02489047176993815 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.028040981380761536, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.028040981380761536 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051621, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051621 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815632, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815632 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.03534439848539579, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.03534439848539579 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.037336266553835096, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.037336266553835096 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30423280423280424, + "acc_stderr": 0.023695415009463087, + "acc_norm": 0.30423280423280424, + "acc_norm_stderr": 0.023695415009463087 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.026424816594009852, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.026424816594009852 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.36809815950920244, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.36809815950920244, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.36728395061728397, + "acc_stderr": 0.026822801759507894, + "acc_norm": 0.36728395061728397, + "acc_norm_stderr": 0.026822801759507894 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41100917431192663, + "acc_stderr": 0.021095050687277638, + "acc_norm": 0.41100917431192663, + "acc_norm_stderr": 0.021095050687277638 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.027732834353363944, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.027732834353363944 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.019450768432505518, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.019450768432505518 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28156424581005585, + "acc_stderr": 0.015042290171866113, + "acc_norm": 0.28156424581005585, + "acc_norm_stderr": 0.015042290171866113 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687758, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687758 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45147679324894513, + "acc_stderr": 0.0323936001739747, + "acc_norm": 0.45147679324894513, + "acc_norm_stderr": 0.0323936001739747 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3220338983050847, + "acc_stderr": 0.011933936071891098, + "acc_norm": 0.3220338983050847, + "acc_norm_stderr": 0.011933936071891098 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37745098039215685, + "acc_stderr": 0.03402272044340703, + "acc_norm": 0.37745098039215685, + "acc_norm_stderr": 0.03402272044340703 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.01613222972815506, + "mc2": 0.49534794672294, + "mc2_stderr": 0.01594575351345387 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4132231404958678, + "acc_stderr": 0.01692948023449523, + "acc_norm": 0.43211334120425027, + "acc_norm_stderr": 0.017031170198851753 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssma/ko-solar-10.7b-v0.6", + "model_sha": "e81ec37fbba491a12f3263b1fc1c194ea9d61059", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssma/ko-solar-10.7b-v0.7/result_2024-06-07 07:48:12.json b/mssma/ko-solar-10.7b-v0.7/result_2024-06-07 07:48:12.json new file mode 100644 index 0000000000000000000000000000000000000000..fc7514828b698be8200d3b01046a78a57b04f3e8 --- /dev/null +++ b/mssma/ko-solar-10.7b-v0.7/result_2024-06-07 07:48:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4257679180887372, + "acc_stderr": 0.01444946427886881, + "acc_norm": 0.4812286689419795, + "acc_norm_stderr": 0.014601090150633964 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4166500697072296, + "acc_stderr": 0.004919962822208316, + "acc_norm": 0.5664210316669986, + "acc_norm_stderr": 0.004945558069852528 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.42718446601941745, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.42718446601941745, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.42656449553001274, + "acc_stderr": 0.01768606697567564, + "acc_norm": 0.42656449553001274, + "acc_norm_stderr": 0.01768606697567564 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.037117251907407486, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.037117251907407486 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.34726688102893893, + "acc_stderr": 0.027040745502307336, + "acc_norm": 0.34726688102893893, + "acc_norm_stderr": 0.027040745502307336 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3282442748091603, + "acc_stderr": 0.041184385658062976, + "acc_norm": 0.3282442748091603, + "acc_norm_stderr": 0.041184385658062976 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4292929292929293, + "acc_stderr": 0.03526552724601199, + "acc_norm": 0.4292929292929293, + "acc_norm_stderr": 0.03526552724601199 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03724563619774634, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03724563619774634 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.040233822736177455, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.040233822736177455 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3739495798319328, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.3739495798319328, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33076923076923076, + "acc_stderr": 0.023854795680971142, + "acc_norm": 0.33076923076923076, + "acc_norm_stderr": 0.023854795680971142 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.03178529710642751, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.03178529710642751 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3935483870967742, + "acc_stderr": 0.027791878753132274, + "acc_norm": 0.3935483870967742, + "acc_norm_stderr": 0.027791878753132274 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5769230769230769, + "acc_stderr": 0.032366121762202014, + "acc_norm": 0.5769230769230769, + "acc_norm_stderr": 0.032366121762202014 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342596, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.04760548821460325, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.04760548821460325 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.44776119402985076, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.44776119402985076, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267437, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267437 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.291005291005291, + "acc_stderr": 0.023393826500484875, + "acc_norm": 0.291005291005291, + "acc_norm_stderr": 0.023393826500484875 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.025906632631016124, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.025906632631016124 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3395061728395062, + "acc_stderr": 0.026348564412011628, + "acc_norm": 0.3395061728395062, + "acc_norm_stderr": 0.026348564412011628 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3471502590673575, + "acc_stderr": 0.03435696168361356, + "acc_norm": 0.3471502590673575, + "acc_norm_stderr": 0.03435696168361356 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3908256880733945, + "acc_stderr": 0.020920058346111055, + "acc_norm": 0.3908256880733945, + "acc_norm_stderr": 0.020920058346111055 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790606, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.32679738562091504, + "acc_stderr": 0.026857294663281413, + "acc_norm": 0.32679738562091504, + "acc_norm_stderr": 0.026857294663281413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03583496176361064, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03583496176361064 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.019333142020797063, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.019333142020797063 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.02971127586000533, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.02971127586000533 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2324022346368715, + "acc_stderr": 0.014125968754673392, + "acc_norm": 0.2324022346368715, + "acc_norm_stderr": 0.014125968754673392 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2426470588235294, + "acc_stderr": 0.026040662474201257, + "acc_norm": 0.2426470588235294, + "acc_norm_stderr": 0.026040662474201257 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27346938775510204, + "acc_stderr": 0.028535560337128438, + "acc_norm": 0.27346938775510204, + "acc_norm_stderr": 0.028535560337128438 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45569620253164556, + "acc_stderr": 0.03241920684693334, + "acc_norm": 0.45569620253164556, + "acc_norm_stderr": 0.03241920684693334 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2835723598435463, + "acc_stderr": 0.011511900775968325, + "acc_norm": 0.2835723598435463, + "acc_norm_stderr": 0.011511900775968325 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.03374499356319355, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.03374499356319355 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.037694303145125674, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.037694303145125674 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3219094247246022, + "mc1_stderr": 0.016355567611960393, + "mc2": 0.49596301282377164, + "mc2_stderr": 0.01583649347283567 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45218417945690675, + "acc_stderr": 0.017111567130916785, + "acc_norm": 0.45808736717827625, + "acc_norm_stderr": 0.017129852117911147 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssma/ko-solar-10.7b-v0.7", + "model_sha": "d5f7cc575a22efc6bee5e6694dc04a5104b0a5c4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssma/ko-solar-10.7b-v0.8/result_2024-06-07 07:48:58.json b/mssma/ko-solar-10.7b-v0.8/result_2024-06-07 07:48:58.json new file mode 100644 index 0000000000000000000000000000000000000000..4ff9856c2b2e374fffa3daaa862fba5f4c06b9e5 --- /dev/null +++ b/mssma/ko-solar-10.7b-v0.8/result_2024-06-07 07:48:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.47952218430034127, + "acc_stderr": 0.014599131353035012, + "acc_norm": 0.5366894197952219, + "acc_norm_stderr": 0.014572000527756993 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4714200358494324, + "acc_stderr": 0.00498162329219619, + "acc_norm": 0.6426010754829715, + "acc_norm_stderr": 0.004782542754102074 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5614035087719298, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.5614035087719298, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6628352490421456, + "acc_stderr": 0.016905207420803547, + "acc_norm": 0.6628352490421456, + "acc_norm_stderr": 0.016905207420803547 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5209003215434084, + "acc_stderr": 0.02837327096106942, + "acc_norm": 0.5209003215434084, + "acc_norm_stderr": 0.02837327096106942 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.043285772152629715, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.043285772152629715 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.03437305501980619, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.03437305501980619 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.041227371113703296, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.041227371113703296 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.025323990861736246, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.025323990861736246 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5516129032258065, + "acc_stderr": 0.028292056830112728, + "acc_norm": 0.5516129032258065, + "acc_norm_stderr": 0.028292056830112728 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.030351527323344944, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.030351527323344944 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670239, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670239 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.0284934650910286, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.0284934650910286 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.034815208033673474, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.034815208033673474 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.03765746693865151, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.03765746693865151 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.02455229220934267, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.02455229220934267 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377906, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377906 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5154320987654321, + "acc_stderr": 0.02780749004427621, + "acc_norm": 0.5154320987654321, + "acc_norm_stderr": 0.02780749004427621 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6373056994818653, + "acc_stderr": 0.03469713791704371, + "acc_norm": 0.6373056994818653, + "acc_norm_stderr": 0.03469713791704371 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6440366972477064, + "acc_stderr": 0.020528559278244218, + "acc_norm": 0.6440366972477064, + "acc_norm_stderr": 0.020528559278244218 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5359477124183006, + "acc_stderr": 0.02855582751652879, + "acc_norm": 0.5359477124183006, + "acc_norm_stderr": 0.02855582751652879 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5460526315789473, + "acc_stderr": 0.04051646342874143, + "acc_norm": 0.5460526315789473, + "acc_norm_stderr": 0.04051646342874143 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.020200164564804588, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.020200164564804588 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611313, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611313 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608042, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608042 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.01453033020146865, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.01453033020146865 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.029624663581159685, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.029624663581159685 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.031601069934496004, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.031601069934496004 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702368, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702368 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34810951760104303, + "acc_stderr": 0.012166738993698198, + "acc_norm": 0.34810951760104303, + "acc_norm_stderr": 0.012166738993698198 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.034542365853806094, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.034542365853806094 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398394, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398394 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3953488372093023, + "mc1_stderr": 0.017115815632418183, + "mc2": 0.5743237581776016, + "mc2_stderr": 0.016034790178035534 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5537190082644629, + "acc_stderr": 0.017090852631668332, + "acc_norm": 0.5560802833530106, + "acc_norm_stderr": 0.01708188462354254 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssma/ko-solar-10.7b-v0.8", + "model_sha": "f2f397ead566daa813a54a64c65dc159e3c495e8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssma/ko-solar-10.7b-v0.9/result_2024-06-17 07:20:36.json b/mssma/ko-solar-10.7b-v0.9/result_2024-06-17 07:20:36.json new file mode 100644 index 0000000000000000000000000000000000000000..6860a0d16fd2f0df7623bbcaec610710f44ea84d --- /dev/null +++ b/mssma/ko-solar-10.7b-v0.9/result_2024-06-17 07:20:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.49402730375426623, + "acc_stderr": 0.014610348300255795, + "acc_norm": 0.5349829351535836, + "acc_norm_stderr": 0.014575583922019667 + }, + "harness|ko_hellaswag|10": { + "acc": 0.47560246962756425, + "acc_stderr": 0.0049838376415028965, + "acc_norm": 0.6467835092611034, + "acc_norm_stderr": 0.004769924131304652 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6140350877192983, + "acc_stderr": 0.03733756969066165, + "acc_norm": 0.6140350877192983, + "acc_norm_stderr": 0.03733756969066165 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041696, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6768837803320562, + "acc_stderr": 0.016723726512343044, + "acc_norm": 0.6768837803320562, + "acc_norm_stderr": 0.016723726512343044 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.032662042990646775, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.032662042990646775 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840625, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840625 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.033586181457325226, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.033586181457325226 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.041379310344827586, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.041379310344827586 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.03238546948758979, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.03238546948758979 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.025317649726448666, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.025317649726448666 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.033959703819985726, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.033959703819985726 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5741935483870968, + "acc_stderr": 0.028129112709165904, + "acc_norm": 0.5741935483870968, + "acc_norm_stderr": 0.028129112709165904 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.02704685763071667, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.02704685763071667 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465083, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465083 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681681, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681681 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.03400598505599015, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.03400598505599015 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.02479606060269994, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.02479606060269994 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5154320987654321, + "acc_stderr": 0.0278074900442762, + "acc_norm": 0.5154320987654321, + "acc_norm_stderr": 0.0278074900442762 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6787564766839378, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.6787564766839378, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070435, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070435 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.618348623853211, + "acc_stderr": 0.020828148517022603, + "acc_norm": 0.618348623853211, + "acc_norm_stderr": 0.020828148517022603 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.040179012759817494, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.040179012759817494 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.020203517280261447, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.020203517280261447 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997865, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997865 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20893854748603352, + "acc_stderr": 0.013597079518495253, + "acc_norm": 0.20893854748603352, + "acc_norm_stderr": 0.013597079518495253 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312548, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312548 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.03164209487942942, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.03164209487942942 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6751054852320675, + "acc_stderr": 0.030486039389105296, + "acc_norm": 0.6751054852320675, + "acc_norm_stderr": 0.030486039389105296 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3650586701434159, + "acc_stderr": 0.012296373743443476, + "acc_norm": 0.3650586701434159, + "acc_norm_stderr": 0.012296373743443476 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6225490196078431, + "acc_stderr": 0.03402272044340703, + "acc_norm": 0.6225490196078431, + "acc_norm_stderr": 0.03402272044340703 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.03843566993588717, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.03843566993588717 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.397796817625459, + "mc1_stderr": 0.017133934248559628, + "mc2": 0.5817250710472455, + "mc2_stderr": 0.015878859094042453 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5395513577331759, + "acc_stderr": 0.017136487626049846, + "acc_norm": 0.5419126328217237, + "acc_norm_stderr": 0.017129852117911147 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssma/ko-solar-10.7b-v0.9", + "model_sha": "1e6811280c7d31548c5d0edb8194a8c046db6160", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssma/mssmako_Solar10.7B_dpo_v0.1/result_2024-03-12 01:12:56.json b/mssma/mssmako_Solar10.7B_dpo_v0.1/result_2024-03-12 01:12:56.json new file mode 100644 index 0000000000000000000000000000000000000000..f2bcd564af52e91d05c688c69b0eefe5315b5998 --- /dev/null +++ b/mssma/mssmako_Solar10.7B_dpo_v0.1/result_2024-03-12 01:12:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.197098976109215, + "acc_stderr": 0.011625047669880621, + "acc_norm": 0.26023890784982934, + "acc_norm_stderr": 0.012821930225112568 + }, + "harness|ko_hellaswag|10": { + "acc": 0.24905397331208923, + "acc_stderr": 0.004315812968431592, + "acc_norm": 0.2517426807408883, + "acc_norm_stderr": 0.0043312717177738545 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2046783625730994, + "acc_stderr": 0.030944459778533214, + "acc_norm": 0.2046783625730994, + "acc_norm_stderr": 0.030944459778533214 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.04498676320572922, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.04498676320572922 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28607918263090676, + "acc_stderr": 0.01616087140512753, + "acc_norm": 0.28607918263090676, + "acc_norm_stderr": 0.01616087140512753 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.03712537833614865, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.03712537833614865 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231004, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231004 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.0368078369072758, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.0368078369072758 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.02558306248998484, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.02558306248998484 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416827, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416827 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596919, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596919 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.21379310344827587, + "acc_stderr": 0.03416520447747549, + "acc_norm": 0.21379310344827587, + "acc_norm_stderr": 0.03416520447747549 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.02702543349888238, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.02702543349888238 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.020932445774463168, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.020932445774463168 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678243, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678243 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.046075820907199756, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.046075820907199756 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473835, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473835 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.031265112061730424, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.031265112061730424 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.022698657167855713, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.022698657167855713 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2716049382716049, + "acc_stderr": 0.02474862449053737, + "acc_norm": 0.2716049382716049, + "acc_norm_stderr": 0.02474862449053737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.02925282329180363, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.02925282329180363 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22385321100917432, + "acc_stderr": 0.01787121776779024, + "acc_norm": 0.22385321100917432, + "acc_norm_stderr": 0.01787121776779024 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.02417084087934101, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.02417084087934101 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.039849796533028704, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.039849796533028704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.017776947157528037, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.017776947157528037 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.0358870281282637, + "acc_norm": 0.15, + "acc_norm_stderr": 0.0358870281282637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.02533684856333237, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.02533684856333237 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.1673469387755102, + "acc_stderr": 0.02389714476891452, + "acc_norm": 0.1673469387755102, + "acc_norm_stderr": 0.02389714476891452 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.0284588209914603, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.0284588209914603 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676651, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676651 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591362, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591362 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139404, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139404 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752329, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09799291617473435, + "acc_stderr": 0.010221558855214877, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.0161734232988457 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssma/mssmako_Solar10.7B_dpo_v0.1", + "model_sha": "a421b42cf5672ae4a918019ef5b294ef791e1e37", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssongit/Koala-12.8b-v1/result_2023-10-24 06:08:21.json b/mssongit/Koala-12.8b-v1/result_2023-10-24 06:08:21.json new file mode 100644 index 0000000000000000000000000000000000000000..f4bef19ba0f66c4ca2dc9dbd2fdf45c6df637f60 --- /dev/null +++ b/mssongit/Koala-12.8b-v1/result_2023-10-24 06:08:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21245733788395904, + "acc_stderr": 0.01195348290658295, + "acc_norm": 0.2431740614334471, + "acc_norm_stderr": 0.012536554144587096 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2568213503286198, + "acc_stderr": 0.00435987151963954, + "acc_norm": 0.27106154152559253, + "acc_norm_stderr": 0.00443599349258387 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21839080459770116, + "acc_stderr": 0.014774358319934486, + "acc_norm": 0.21839080459770116, + "acc_norm_stderr": 0.014774358319934486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977112, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977112 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23829787234042554, + "acc_stderr": 0.027851252973889802, + "acc_norm": 0.23829787234042554, + "acc_norm_stderr": 0.027851252973889802 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1927710843373494, + "acc_stderr": 0.03070982405056527, + "acc_norm": 0.1927710843373494, + "acc_norm_stderr": 0.03070982405056527 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.13901345291479822, + "acc_stderr": 0.0232193528344745, + "acc_norm": 0.13901345291479822, + "acc_norm_stderr": 0.0232193528344745 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30808080808080807, + "acc_stderr": 0.03289477330098615, + "acc_norm": 0.30808080808080807, + "acc_norm_stderr": 0.03289477330098615 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.037528339580033376, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.037528339580033376 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.02820554503327773, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.02820554503327773 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.29743589743589743, + "acc_stderr": 0.023177408131465932, + "acc_norm": 0.29743589743589743, + "acc_norm_stderr": 0.023177408131465932 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.17592592592592593, + "acc_stderr": 0.036809181416738786, + "acc_norm": 0.17592592592592593, + "acc_norm_stderr": 0.036809181416738786 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.030903796952114468, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.030903796952114468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.026148685930671746, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.026148685930671746 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145668, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145668 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.036848815213890225, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.036848815213890225 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.03096590312357301, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.03096590312357301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.034355680560478746, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.034355680560478746 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113935, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113935 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.02402774515526502, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.02402774515526502 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02438366553103545, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02438366553103545 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3005181347150259, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.3005181347150259, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27522935779816515, + "acc_stderr": 0.019149093743155203, + "acc_norm": 0.27522935779816515, + "acc_norm_stderr": 0.019149093743155203 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.041349130183033156, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.041349130183033156 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.02625605383571896, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.02625605383571896 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.1652892561983471, + "acc_stderr": 0.03390780612972776, + "acc_norm": 0.1652892561983471, + "acc_norm_stderr": 0.03390780612972776 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.017077373377857, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.017077373377857 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.02484792135806396, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.02484792135806396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364555, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364555 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.027257202606114948, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.027257202606114948 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2979591836734694, + "acc_stderr": 0.02927956741106567, + "acc_norm": 0.2979591836734694, + "acc_norm_stderr": 0.02927956741106567 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2320675105485232, + "acc_stderr": 0.027479744550808503, + "acc_norm": 0.2320675105485232, + "acc_norm_stderr": 0.027479744550808503 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25358539765319427, + "acc_stderr": 0.011111715336101127, + "acc_norm": 0.25358539765319427, + "acc_norm_stderr": 0.011111715336101127 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.03077855467869326, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.03077855467869326 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511784, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511784 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2178702570379437, + "mc1_stderr": 0.014450846714123911, + "mc2": 0.4610937921300059, + "mc2_stderr": 0.017090763627039533 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.10979929161747344, + "acc_stderr": 0.010748764686721606, + "acc_norm": 0.22904368358913813, + "acc_norm_stderr": 0.01444737227725382 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssongit/Koala-12.8b-v1", + "model_sha": "6e6754abd5a99c7984aa31eff410d3b8ee611ee8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mssongit/llama3-8b-koalpaca/result_2024-05-22 02:38:52.json b/mssongit/llama3-8b-koalpaca/result_2024-05-22 02:38:52.json new file mode 100644 index 0000000000000000000000000000000000000000..71ac75bd41b96f67af4edba78a6e737ff83d081e --- /dev/null +++ b/mssongit/llama3-8b-koalpaca/result_2024-05-22 02:38:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34812286689419797, + "acc_stderr": 0.01392100859517935, + "acc_norm": 0.4138225255972696, + "acc_norm_stderr": 0.01439273000922101 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35391356303525195, + "acc_stderr": 0.004772054904404432, + "acc_norm": 0.4457279426409082, + "acc_norm_stderr": 0.004960299952519407 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4269005847953216, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.4269005847953216, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.42017879948914433, + "acc_stderr": 0.017650651363078, + "acc_norm": 0.42017879948914433, + "acc_norm_stderr": 0.017650651363078 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40192926045016075, + "acc_stderr": 0.027846476005930477, + "acc_norm": 0.40192926045016075, + "acc_norm_stderr": 0.027846476005930477 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3542600896860987, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.3542600896860987, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.043482080516448585, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.043482080516448585 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41025641025641024, + "acc_stderr": 0.024939313906940767, + "acc_norm": 0.41025641025641024, + "acc_norm_stderr": 0.024939313906940767 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.047803436269367894, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.047803436269367894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969566, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969566 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.028422687404312107, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.028422687404312107 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5726495726495726, + "acc_stderr": 0.032408473935163266, + "acc_norm": 0.5726495726495726, + "acc_norm_stderr": 0.032408473935163266 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.02441923496681906, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.02441923496681906 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41358024691358025, + "acc_stderr": 0.027402042040269955, + "acc_norm": 0.41358024691358025, + "acc_norm_stderr": 0.027402042040269955 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.0213641225338817, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.0213641225338817 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04545454545454546, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04545454545454546 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3300653594771242, + "acc_stderr": 0.01902372616072456, + "acc_norm": 0.3300653594771242, + "acc_norm_stderr": 0.01902372616072456 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064352, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064352 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.014551553659369918, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.014551553659369918 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681404, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681404 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4472573839662447, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.4472573839662447, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3005215123859192, + "acc_stderr": 0.011709918883039119, + "acc_norm": 0.3005215123859192, + "acc_norm_stderr": 0.011709918883039119 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.015201522246299946, + "mc2": 0.44254184468058216, + "mc2_stderr": 0.01617966560213706 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.269185360094451, + "acc_stderr": 0.015249098024144526, + "acc_norm": 0.31995277449822906, + "acc_norm_stderr": 0.01603715384028053 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mssongit/llama3-8b-koalpaca", + "model_sha": "42dc5b532c76a0249d92c7499d46348db5be0340", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/msy127/ft-240209-sft/result_2024-02-09 14:51:21.json b/msy127/ft-240209-sft/result_2024-02-09 14:51:21.json new file mode 100644 index 0000000000000000000000000000000000000000..c45b660a48555198e0c2f61d63724d45380b4122 --- /dev/null +++ b/msy127/ft-240209-sft/result_2024-02-09 14:51:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4274744027303754, + "acc_stderr": 0.01445686294465065, + "acc_norm": 0.48976109215017066, + "acc_norm_stderr": 0.014608326906285015 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44652459669388567, + "acc_stderr": 0.0049611615892284225, + "acc_norm": 0.608743278231428, + "acc_norm_stderr": 0.0048703425929150475 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.644955300127714, + "acc_stderr": 0.01711208577277298, + "acc_norm": 0.644955300127714, + "acc_norm_stderr": 0.01711208577277298 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5276595744680851, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.5276595744680851, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.027466610213140105, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.027466610213140105 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.547085201793722, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.547085201793722, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.047240073523838876, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.047240073523838876 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.03163145807552378, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.03163145807552378 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5692307692307692, + "acc_stderr": 0.025106820660539757, + "acc_norm": 0.5692307692307692, + "acc_norm_stderr": 0.025106820660539757 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5870967741935483, + "acc_stderr": 0.02800913812540038, + "acc_norm": 0.5870967741935483, + "acc_norm_stderr": 0.02800913812540038 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.026655699653922758, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.026655699653922758 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.03074634997572347, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.03074634997572347 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473075, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719197, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719197 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4312169312169312, + "acc_stderr": 0.025506481698138208, + "acc_norm": 0.4312169312169312, + "acc_norm_stderr": 0.025506481698138208 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5838150289017341, + "acc_stderr": 0.026538189104705474, + "acc_norm": 0.5838150289017341, + "acc_norm_stderr": 0.026538189104705474 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5709876543209876, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.5709876543209876, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.673394495412844, + "acc_stderr": 0.0201069908899373, + "acc_norm": 0.673394495412844, + "acc_norm_stderr": 0.0201069908899373 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.49206349206349204, + "acc_stderr": 0.044715725362943486, + "acc_norm": 0.49206349206349204, + "acc_norm_stderr": 0.044715725362943486 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.040335656678483205, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.040335656678483205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.020200164564804588, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.020200164564804588 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.028893955412115886, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.028893955412115886 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.288268156424581, + "acc_stderr": 0.015149132860209425, + "acc_norm": 0.288268156424581, + "acc_norm_stderr": 0.015149132860209425 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5, + "acc_stderr": 0.030372836961539352, + "acc_norm": 0.5, + "acc_norm_stderr": 0.030372836961539352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6122448979591837, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.6122448979591837, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.029818024749753095, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.029818024749753095 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4178617992177314, + "acc_stderr": 0.01259674410899856, + "acc_norm": 0.4178617992177314, + "acc_norm_stderr": 0.01259674410899856 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.03364487286088299, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.03364487286088299 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3806609547123623, + "mc1_stderr": 0.01699762787190793, + "mc2": 0.5480374129709, + "mc2_stderr": 0.015680630022687686 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6304604486422668, + "acc_stderr": 0.016594883405685434, + "acc_norm": 0.6576151121605667, + "acc_norm_stderr": 0.016313907844146384 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "msy127/ft-240209-sft", + "model_sha": "12efc89393b9ca1d6741eac29250234a023c3e66", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/msy127/ft_240201_01/result_2024-02-09 14:57:22.json b/msy127/ft_240201_01/result_2024-02-09 14:57:22.json new file mode 100644 index 0000000000000000000000000000000000000000..ef76fa471a6a8c614995176a0bc118edfce85fd7 --- /dev/null +++ b/msy127/ft_240201_01/result_2024-02-09 14:57:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32081911262798635, + "acc_stderr": 0.013640943091946526, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349815 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38090021907986454, + "acc_stderr": 0.004846156699486659, + "acc_norm": 0.49502091216889066, + "acc_norm_stderr": 0.0049895339988203545 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.017268607560005787, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.017268607560005787 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33191489361702126, + "acc_stderr": 0.030783736757745657, + "acc_norm": 0.33191489361702126, + "acc_norm_stderr": 0.030783736757745657 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36012861736334406, + "acc_stderr": 0.027264297599804015, + "acc_norm": 0.36012861736334406, + "acc_norm_stderr": 0.027264297599804015 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.041423137719966634, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.041423137719966634 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.033832012232444426, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.033832012232444426 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135303, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135303 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.029597329730978093, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.029597329730978093 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2923076923076923, + "acc_stderr": 0.023060438380857754, + "acc_norm": 0.2923076923076923, + "acc_norm_stderr": 0.023060438380857754 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2903225806451613, + "acc_stderr": 0.025822106119415895, + "acc_norm": 0.2903225806451613, + "acc_norm_stderr": 0.025822106119415895 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3547008547008547, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.3547008547008547, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.028049186315695245, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.028049186315695245 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302505, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302505 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.025644108639267645, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.025644108639267645 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.031524391865554016, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.031524391865554016 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.033450369167889925, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.033450369167889925 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.03309615177059006, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.03309615177059006 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2861271676300578, + "acc_stderr": 0.02433214677913413, + "acc_norm": 0.2861271676300578, + "acc_norm_stderr": 0.02433214677913413 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02584224870090217, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02584224870090217 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845436, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845436 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3669724770642202, + "acc_stderr": 0.020664675659520532, + "acc_norm": 0.3669724770642202, + "acc_norm_stderr": 0.020664675659520532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.033333333333333375, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.033333333333333375 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.02718449890994161, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.02718449890994161 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119669, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119669 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.01798661530403031, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.01798661530403031 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24822695035460993, + "acc_stderr": 0.0257700156442904, + "acc_norm": 0.24822695035460993, + "acc_norm_stderr": 0.0257700156442904 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802751, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802751 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225613, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225613 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483927, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483927 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.028123429335142787, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.028123429335142787 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.33755274261603374, + "acc_stderr": 0.030781549102026223, + "acc_norm": 0.33755274261603374, + "acc_norm_stderr": 0.030781549102026223 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2848761408083442, + "acc_stderr": 0.011527830846369009, + "acc_norm": 0.2848761408083442, + "acc_norm_stderr": 0.011527830846369009 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.03608541011573967, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.03608541011573967 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.01505186948671501, + "mc2": 0.3863566894465809, + "mc2_stderr": 0.014780217362798024 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27508854781582054, + "acc_stderr": 0.015353010757952654, + "acc_norm": 0.39315230224321135, + "acc_norm_stderr": 0.01679326280128708 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "msy127/ft_240201_01", + "model_sha": "d096447f338727096edab9335fda16e43af087a4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/msy127/mnsim-dpo-peftmerged-2-eos/result_2024-01-31 05:34:09.json b/msy127/mnsim-dpo-peftmerged-2-eos/result_2024-01-31 05:34:09.json new file mode 100644 index 0000000000000000000000000000000000000000..dd0b48dec2ec65a83152e91ebfc3ff56b79d8e80 --- /dev/null +++ b/msy127/mnsim-dpo-peftmerged-2-eos/result_2024-01-31 05:34:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3984641638225256, + "acc_stderr": 0.014306946052735567, + "acc_norm": 0.4539249146757679, + "acc_norm_stderr": 0.014549221105171864 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40888269269069905, + "acc_stderr": 0.004906227902850754, + "acc_norm": 0.5387373033260306, + "acc_norm_stderr": 0.004974783753309704 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5466155810983397, + "acc_stderr": 0.017802087135850308, + "acc_norm": 0.5466155810983397, + "acc_norm_stderr": 0.017802087135850308 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115476, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115476 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006939, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006939 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46218487394957986, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.46218487394957986, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.02528558599001783, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.02528558599001783 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.0305032920133426, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.0305032920133426 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5373134328358209, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.5373134328358209, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3819444444444444, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.3819444444444444, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.026817718130348916, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.026817718130348916 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.0409698513984367, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.0409698513984367 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.021387863350353996, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.021387863350353996 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147125, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147125 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510467998, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510467998 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577454, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577454 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199502, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199502 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714867, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714867 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.03175195237583322, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.03175195237583322 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5780590717299579, + "acc_stderr": 0.032148146302403695, + "acc_norm": 0.5780590717299579, + "acc_norm_stderr": 0.032148146302403695 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3578878748370274, + "acc_stderr": 0.012243563850490314, + "acc_norm": 0.3578878748370274, + "acc_norm_stderr": 0.012243563850490314 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606785, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606785 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.039042723414318574, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.039042723414318574 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30966952264381886, + "mc1_stderr": 0.016185744355144898, + "mc2": 0.47261381793247165, + "mc2_stderr": 0.015304094243943349 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43919716646989376, + "acc_stderr": 0.017062775744780705, + "acc_norm": 0.5159386068476978, + "acc_norm_stderr": 0.017181617837190195 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "msy127/mnsim-dpo-peftmerged-2-eos", + "model_sha": "d7cd607c37171ec7f3409b6db60aad727b055ff2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mu0gum/AIFT-42dot-LLM-PLM-1.3B-ao-instruct-all-v0.2/result_2024-01-16 12:36:05.json b/mu0gum/AIFT-42dot-LLM-PLM-1.3B-ao-instruct-all-v0.2/result_2024-01-16 12:36:05.json new file mode 100644 index 0000000000000000000000000000000000000000..26e6d8eedd134db6488be5f21407de8c9300a100 --- /dev/null +++ b/mu0gum/AIFT-42dot-LLM-PLM-1.3B-ao-instruct-all-v0.2/result_2024-01-16 12:36:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2645051194539249, + "acc_stderr": 0.012889272949313364, + "acc_norm": 0.32337883959044367, + "acc_norm_stderr": 0.013669421630012123 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3542123083051185, + "acc_stderr": 0.004772964697941343, + "acc_norm": 0.4486158135829516, + "acc_norm_stderr": 0.004963362085275564 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03188578017686398, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03188578017686398 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.039891398595317706, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.039891398595317706 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.22349936143039592, + "acc_stderr": 0.01489723522945071, + "acc_norm": 0.22349936143039592, + "acc_norm_stderr": 0.01489723522945071 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.1925925925925926, + "acc_stderr": 0.03406542058502652, + "acc_norm": 0.1925925925925926, + "acc_norm_stderr": 0.03406542058502652 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.0368078369072758, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.0368078369072758 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24437299035369775, + "acc_stderr": 0.024406162094668903, + "acc_norm": 0.24437299035369775, + "acc_norm_stderr": 0.024406162094668903 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.24663677130044842, + "acc_stderr": 0.028930413120910884, + "acc_norm": 0.24663677130044842, + "acc_norm_stderr": 0.028930413120910884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924811, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924811 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2815126050420168, + "acc_stderr": 0.029213549414372177, + "acc_norm": 0.2815126050420168, + "acc_norm_stderr": 0.029213549414372177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.021362027725222735, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.021362027725222735 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.031089826002937523, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.031089826002937523 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2064516129032258, + "acc_stderr": 0.023025899617188695, + "acc_norm": 0.2064516129032258, + "acc_norm_stderr": 0.023025899617188695 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.02760192138141759, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.02760192138141759 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.20754716981132076, + "acc_stderr": 0.02495991802891127, + "acc_norm": 0.20754716981132076, + "acc_norm_stderr": 0.02495991802891127 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.041723430387053825, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.041723430387053825 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276611, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276611 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119996, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119996 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014635, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014635 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749877, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749877 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918424, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.022698657167855713, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.022698657167855713 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724147, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724147 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25617283950617287, + "acc_stderr": 0.024288533637726095, + "acc_norm": 0.25617283950617287, + "acc_norm_stderr": 0.024288533637726095 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22279792746113988, + "acc_stderr": 0.03003114797764154, + "acc_norm": 0.22279792746113988, + "acc_norm_stderr": 0.03003114797764154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.018272575810231867, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.018272575810231867 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.038522733649243156, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.038522733649243156 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.025829163272757475, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.025829163272757475 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909281, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909281 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2892561983471074, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.15789473684210525, + "acc_stderr": 0.029674167520101456, + "acc_norm": 0.15789473684210525, + "acc_norm_stderr": 0.029674167520101456 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.017848089574913222, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.017848089574913222 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.024987106365642976, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.024987106365642976 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.028139689444859683, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.028139689444859683 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372432, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372432 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2426470588235294, + "acc_stderr": 0.02604066247420127, + "acc_norm": 0.2426470588235294, + "acc_norm_stderr": 0.02604066247420127 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.02904133351059804, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.02904133351059804 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26401564537157757, + "acc_stderr": 0.011258435537723814, + "acc_norm": 0.26401564537157757, + "acc_norm_stderr": 0.011258435537723814 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.02977177522814565, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.02977177522814565 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485081, + "mc2": 0.39878418673610283, + "mc2_stderr": 0.014922738214635979 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2597402597402597, + "acc_stderr": 0.01507566641123031, + "acc_norm": 0.3695395513577332, + "acc_norm_stderr": 0.016594883405685414 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mu0gum/AIFT-42dot-LLM-PLM-1.3B-ao-instruct-all-v0.2", + "model_sha": "7e755b1173f0eed9d4d9d649354c9c8c1d46e462", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mu0gum/AIFT-42dot-LLM-PLM-1.3B-instruct-slim-v1.5/result_2024-01-15 18:26:50.json b/mu0gum/AIFT-42dot-LLM-PLM-1.3B-instruct-slim-v1.5/result_2024-01-15 18:26:50.json new file mode 100644 index 0000000000000000000000000000000000000000..23556cc95064c1ebc93123147d8a10726fe7c74c --- /dev/null +++ b/mu0gum/AIFT-42dot-LLM-PLM-1.3B-instruct-slim-v1.5/result_2024-01-15 18:26:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25170648464163825, + "acc_stderr": 0.012682496334042968, + "acc_norm": 0.31569965870307165, + "acc_norm_stderr": 0.013582571095815291 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35012945628360886, + "acc_stderr": 0.004760354191370863, + "acc_norm": 0.4372634933280223, + "acc_norm_stderr": 0.0049503473337018334 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727665, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727665 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041694, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041694 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2081736909323116, + "acc_stderr": 0.014518592248904033, + "acc_norm": 0.2081736909323116, + "acc_norm_stderr": 0.014518592248904033 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838742, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838742 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.035509201856896294, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.035509201856896294 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.11659192825112108, + "acc_stderr": 0.021539639816244464, + "acc_norm": 0.11659192825112108, + "acc_norm_stderr": 0.021539639816244464 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306085, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306085 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35858585858585856, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.35858585858585856, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.047840607041056527, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.047840607041056527 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36153846153846153, + "acc_stderr": 0.024359581465396983, + "acc_norm": 0.36153846153846153, + "acc_norm_stderr": 0.024359581465396983 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.03194740072265541, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.03194740072265541 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493864, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493864 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577656, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577656 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.22254335260115607, + "acc_stderr": 0.02239421566194282, + "acc_norm": 0.22254335260115607, + "acc_norm_stderr": 0.02239421566194282 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25, + "acc_stderr": 0.02409347123262133, + "acc_norm": 0.25, + "acc_norm_stderr": 0.02409347123262133 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3431192660550459, + "acc_stderr": 0.020354777736086037, + "acc_norm": 0.3431192660550459, + "acc_norm_stderr": 0.020354777736086037 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.0256468630971379, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.0256468630971379 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.14049586776859505, + "acc_stderr": 0.031722334260021585, + "acc_norm": 0.14049586776859505, + "acc_norm_stderr": 0.031722334260021585 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21895424836601307, + "acc_stderr": 0.016729937565537537, + "acc_norm": 0.21895424836601307, + "acc_norm_stderr": 0.016729937565537537 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.02498710636564297, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.02498710636564297 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.17857142857142858, + "acc_stderr": 0.036352091215778065, + "acc_norm": 0.17857142857142858, + "acc_norm_stderr": 0.036352091215778065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20675105485232068, + "acc_stderr": 0.02636165166838909, + "acc_norm": 0.20675105485232068, + "acc_norm_stderr": 0.02636165166838909 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24119947848761408, + "acc_stderr": 0.010926496102034952, + "acc_norm": 0.24119947848761408, + "acc_norm_stderr": 0.010926496102034952 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511782, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511782 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777308, + "mc2": 0.4290345224674414, + "mc2_stderr": 0.014905683860832726 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.269185360094451, + "acc_stderr": 0.015249098024144526, + "acc_norm": 0.3435655253837072, + "acc_norm_stderr": 0.016327334806429145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mu0gum/AIFT-42dot-LLM-PLM-1.3B-instruct-slim-v1.5", + "model_sha": "4e2d01b68cd855b7066ebe481203434955d21dca", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mu0gum/AIFT-42dot-LLM-PLM-ao-instruct-all-v0.3/result_2024-01-16 17:11:32.json b/mu0gum/AIFT-42dot-LLM-PLM-ao-instruct-all-v0.3/result_2024-01-16 17:11:32.json new file mode 100644 index 0000000000000000000000000000000000000000..cc897910db7c8982256b2dfbb4ff2de342b0c6bb --- /dev/null +++ b/mu0gum/AIFT-42dot-LLM-PLM-ao-instruct-all-v0.3/result_2024-01-16 17:11:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25597269624573377, + "acc_stderr": 0.012753013241244521, + "acc_norm": 0.32593856655290104, + "acc_norm_stderr": 0.013697432466693239 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3549093806014738, + "acc_stderr": 0.004775079636567096, + "acc_norm": 0.4494124676359291, + "acc_norm_stderr": 0.004964177035221412 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.1871345029239766, + "acc_stderr": 0.029913127232368032, + "acc_norm": 0.1871345029239766, + "acc_norm_stderr": 0.029913127232368032 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.13592233009708737, + "acc_stderr": 0.03393295729761011, + "acc_norm": 0.13592233009708737, + "acc_norm_stderr": 0.03393295729761011 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24521072796934865, + "acc_stderr": 0.015384352284543943, + "acc_norm": 0.24521072796934865, + "acc_norm_stderr": 0.015384352284543943 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.03502553170678315, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.03502553170678315 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3021276595744681, + "acc_stderr": 0.03001755447188056, + "acc_norm": 0.3021276595744681, + "acc_norm_stderr": 0.03001755447188056 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632928, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632928 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.23766816143497757, + "acc_stderr": 0.028568079464714284, + "acc_norm": 0.23766816143497757, + "acc_norm_stderr": 0.028568079464714284 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768362, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768362 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732523, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732523 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171455, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171455 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2689075630252101, + "acc_stderr": 0.028801392193631273, + "acc_norm": 0.2689075630252101, + "acc_norm_stderr": 0.028801392193631273 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3128205128205128, + "acc_stderr": 0.023507579020645347, + "acc_norm": 0.3128205128205128, + "acc_norm_stderr": 0.023507579020645347 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.02850137816789395, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.02850137816789395 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23548387096774193, + "acc_stderr": 0.024137632429337703, + "acc_norm": 0.23548387096774193, + "acc_norm_stderr": 0.024137632429337703 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.20512820512820512, + "acc_stderr": 0.02645350805404033, + "acc_norm": 0.20512820512820512, + "acc_norm_stderr": 0.02645350805404033 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.04172343038705383, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.04172343038705383 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.02549753263960954, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.02549753263960954 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643895, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643895 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.20520231213872833, + "acc_stderr": 0.021742519835276277, + "acc_norm": 0.20520231213872833, + "acc_norm_stderr": 0.021742519835276277 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.03559039531617342, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.03559039531617342 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.024569223600460852, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.024569223600460852 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35233160621761656, + "acc_stderr": 0.034474782864143586, + "acc_norm": 0.35233160621761656, + "acc_norm_stderr": 0.034474782864143586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28256880733944956, + "acc_stderr": 0.01930424349770715, + "acc_norm": 0.28256880733944956, + "acc_norm_stderr": 0.01930424349770715 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2892561983471074, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.1513157894736842, + "acc_stderr": 0.029162631596843982, + "acc_norm": 0.1513157894736842, + "acc_norm_stderr": 0.029162631596843982 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.017704531653250075, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.017704531653250075 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2730496453900709, + "acc_stderr": 0.02657786094330786, + "acc_norm": 0.2730496453900709, + "acc_norm_stderr": 0.02657786094330786 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670736, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670736 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.30612244897959184, + "acc_stderr": 0.02950489645459595, + "acc_norm": 0.30612244897959184, + "acc_norm_stderr": 0.02950489645459595 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24185136897001303, + "acc_stderr": 0.01093655081382707, + "acc_norm": 0.24185136897001303, + "acc_norm_stderr": 0.01093655081382707 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885415, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885415 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.39393421388540395, + "mc2_stderr": 0.014833199927229288 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24203069657615112, + "acc_stderr": 0.014725696750525333, + "acc_norm": 0.36481700118063753, + "acc_norm_stderr": 0.016550144337046588 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mu0gum/AIFT-42dot-LLM-PLM-ao-instruct-all-v0.3", + "model_sha": "2152dcc9aa024ce305d318f98e44c5cca1d31ac2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.4/result_2024-01-17 17:51:56.json b/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.4/result_2024-01-17 17:51:56.json new file mode 100644 index 0000000000000000000000000000000000000000..6be13e6d579a2eca67e6f1bed94cc487cc787d04 --- /dev/null +++ b/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.4/result_2024-01-17 17:51:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2687713310580205, + "acc_stderr": 0.012955065963710672, + "acc_norm": 0.3216723549488055, + "acc_norm_stderr": 0.013650488084494166 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3602867954590719, + "acc_stderr": 0.004791024004588015, + "acc_norm": 0.46016729735112527, + "acc_norm_stderr": 0.004973922192982244 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579215, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579215 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1553398058252427, + "acc_stderr": 0.03586594738573974, + "acc_norm": 0.1553398058252427, + "acc_norm_stderr": 0.03586594738573974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.03633384414073465, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.03633384414073465 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.029513196625539355, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.029513196625539355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.20257234726688103, + "acc_stderr": 0.022827317491059682, + "acc_norm": 0.20257234726688103, + "acc_norm_stderr": 0.022827317491059682 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.32286995515695066, + "acc_stderr": 0.03138147637575499, + "acc_norm": 0.32286995515695066, + "acc_norm_stderr": 0.03138147637575499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.027886828078380554, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.027886828078380554 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.19487179487179487, + "acc_stderr": 0.020083167595181393, + "acc_norm": 0.19487179487179487, + "acc_norm_stderr": 0.020083167595181393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.1921182266009852, + "acc_stderr": 0.027719315709614778, + "acc_norm": 0.1921182266009852, + "acc_norm_stderr": 0.027719315709614778 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1870967741935484, + "acc_stderr": 0.022185710092252255, + "acc_norm": 0.1870967741935484, + "acc_norm_stderr": 0.022185710092252255 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.02905858830374884, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.02905858830374884 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2037735849056604, + "acc_stderr": 0.02479078450177541, + "acc_norm": 0.2037735849056604, + "acc_norm_stderr": 0.02479078450177541 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878284, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878284 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712152, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712152 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.035118075718047245, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.035118075718047245 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.031568093627031744, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.031568093627031744 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334845, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334845 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.023083658586984197, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.023083658586984197 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.0360251131880677, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.0360251131880677 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.23765432098765432, + "acc_stderr": 0.023683591837008553, + "acc_norm": 0.23765432098765432, + "acc_norm_stderr": 0.023683591837008553 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20207253886010362, + "acc_stderr": 0.02897908979429673, + "acc_norm": 0.20207253886010362, + "acc_norm_stderr": 0.02897908979429673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1908256880733945, + "acc_stderr": 0.016847676400091088, + "acc_norm": 0.1908256880733945, + "acc_norm_stderr": 0.016847676400091088 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.18253968253968253, + "acc_stderr": 0.03455071019102148, + "acc_norm": 0.18253968253968253, + "acc_norm_stderr": 0.03455071019102148 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.238562091503268, + "acc_stderr": 0.024404394928087866, + "acc_norm": 0.238562091503268, + "acc_norm_stderr": 0.024404394928087866 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3305785123966942, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.20394736842105263, + "acc_stderr": 0.0327900040631005, + "acc_norm": 0.20394736842105263, + "acc_norm_stderr": 0.0327900040631005 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.017848089574913222, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.017848089574913222 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340461004, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340461004 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025425, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025425 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23897058823529413, + "acc_stderr": 0.02590528064489301, + "acc_norm": 0.23897058823529413, + "acc_norm_stderr": 0.02590528064489301 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22857142857142856, + "acc_stderr": 0.02688214492230774, + "acc_norm": 0.22857142857142856, + "acc_norm_stderr": 0.02688214492230774 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598042, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598042 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.258148631029987, + "acc_stderr": 0.01117692371931339, + "acc_norm": 0.258148631029987, + "acc_norm_stderr": 0.01117692371931339 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.028867431449849303, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.028867431449849303 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.03192271569548299, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.03192271569548299 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522514, + "mc2": 0.4014873627610221, + "mc2_stderr": 0.01527136029083612 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2526564344746163, + "acc_stderr": 0.014939640598798426, + "acc_norm": 0.3447461629279811, + "acc_norm_stderr": 0.016340649905418683 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.4", + "model_sha": "530bae203e9677d471dea8cad1f9d5c67478c976", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.5/result_2024-01-18 16:36:20.json b/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.5/result_2024-01-18 16:36:20.json new file mode 100644 index 0000000000000000000000000000000000000000..6980a5b6895e841f6c8580cc68934c8f95df4880 --- /dev/null +++ b/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.5/result_2024-01-18 16:36:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26621160409556316, + "acc_stderr": 0.01291577478152322, + "acc_norm": 0.32337883959044367, + "acc_norm_stderr": 0.013669421630012123 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36317466640111534, + "acc_stderr": 0.004799317209902026, + "acc_norm": 0.4651463851822346, + "acc_norm_stderr": 0.004977643730848598 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03615507630310933, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03615507630310933 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24010217113665389, + "acc_stderr": 0.015274685213734195, + "acc_norm": 0.24010217113665389, + "acc_norm_stderr": 0.015274685213734195 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.035025531706783165, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.035025531706783165 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.029379170464124818, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.029379170464124818 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2891566265060241, + "acc_stderr": 0.035294868015111155, + "acc_norm": 0.2891566265060241, + "acc_norm_stderr": 0.035294868015111155 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.18006430868167203, + "acc_stderr": 0.021823422857744947, + "acc_norm": 0.18006430868167203, + "acc_norm_stderr": 0.021823422857744947 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572203, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572203 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306085, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306085 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.1919191919191919, + "acc_stderr": 0.02805779167298901, + "acc_norm": 0.1919191919191919, + "acc_norm_stderr": 0.02805779167298901 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868966, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868966 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.019982347208637313, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.019982347208637313 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22167487684729065, + "acc_stderr": 0.02922557589248962, + "acc_norm": 0.22167487684729065, + "acc_norm_stderr": 0.02922557589248962 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2032258064516129, + "acc_stderr": 0.022891687984554945, + "acc_norm": 0.2032258064516129, + "acc_norm_stderr": 0.022891687984554945 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.029872577708891162, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.029872577708891162 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.20754716981132076, + "acc_stderr": 0.024959918028911274, + "acc_norm": 0.20754716981132076, + "acc_norm_stderr": 0.024959918028911274 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878284, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878284 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844065, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804723, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804723 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.03076944496729601, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.03076944496729601 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.030631145539198816, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.030631145539198816 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643895, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643895 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.023357365785874027, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.023357365785874027 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22839506172839505, + "acc_stderr": 0.023358211840626267, + "acc_norm": 0.22839506172839505, + "acc_norm_stderr": 0.023358211840626267 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20207253886010362, + "acc_stderr": 0.02897908979429673, + "acc_norm": 0.20207253886010362, + "acc_norm_stderr": 0.02897908979429673 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.20550458715596331, + "acc_stderr": 0.01732435232501601, + "acc_norm": 0.20550458715596331, + "acc_norm_stderr": 0.01732435232501601 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790606, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.025261691219729494, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.025261691219729494 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3140495867768595, + "acc_stderr": 0.04236964753041019, + "acc_norm": 0.3140495867768595, + "acc_norm_stderr": 0.04236964753041019 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2630718954248366, + "acc_stderr": 0.017812676542320657, + "acc_norm": 0.2630718954248366, + "acc_norm_stderr": 0.017812676542320657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880585, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880585 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.03085199299325701, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.03085199299325701 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2867647058823529, + "acc_stderr": 0.02747227447323382, + "acc_norm": 0.2867647058823529, + "acc_norm_stderr": 0.02747227447323382 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.02653704531214529, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.02653704531214529 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24050632911392406, + "acc_stderr": 0.027820781981149675, + "acc_norm": 0.24050632911392406, + "acc_norm_stderr": 0.027820781981149675 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2653194263363755, + "acc_stderr": 0.011276198843958881, + "acc_norm": 0.2653194263363755, + "acc_norm_stderr": 0.011276198843958881 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237024, + "mc2": 0.4069793268503652, + "mc2_stderr": 0.015205684447857306 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24557260920897284, + "acc_stderr": 0.01479835715497282, + "acc_norm": 0.30342384887839435, + "acc_norm_stderr": 0.01580607271790957 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.5", + "model_sha": "f0fee9238d20b0de969734c895e97f3b85b2f7da", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.52/result_2024-01-20 00:34:29.json b/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.52/result_2024-01-20 00:34:29.json new file mode 100644 index 0000000000000000000000000000000000000000..cedd45476966ad1158aa711da858187f8a993aa6 --- /dev/null +++ b/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.52/result_2024-01-20 00:34:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2721843003412969, + "acc_stderr": 0.013006600406423709, + "acc_norm": 0.3225255972696246, + "acc_norm_stderr": 0.013659980894277371 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3635729934276041, + "acc_stderr": 0.004800446397653339, + "acc_norm": 0.46415056761601275, + "acc_norm_stderr": 0.004976939333240074 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822583, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822583 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23116219667943805, + "acc_stderr": 0.015075523238101098, + "acc_norm": 0.23116219667943805, + "acc_norm_stderr": 0.015075523238101098 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552004, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552004 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610344, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610344 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824664, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824664 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2540192926045016, + "acc_stderr": 0.02472386150477169, + "acc_norm": 0.2540192926045016, + "acc_norm_stderr": 0.02472386150477169 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3452914798206278, + "acc_stderr": 0.03191100192835795, + "acc_norm": 0.3452914798206278, + "acc_norm_stderr": 0.03191100192835795 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.04093329229834277, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.04093329229834277 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.029857515673386407, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.029857515673386407 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.22758620689655173, + "acc_stderr": 0.03493950380131184, + "acc_norm": 0.22758620689655173, + "acc_norm_stderr": 0.03493950380131184 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.02738140692786896, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.02738140692786896 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.22564102564102564, + "acc_stderr": 0.021193632525148533, + "acc_norm": 0.22564102564102564, + "acc_norm_stderr": 0.021193632525148533 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22660098522167488, + "acc_stderr": 0.029454863835292982, + "acc_norm": 0.22660098522167488, + "acc_norm_stderr": 0.029454863835292982 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24193548387096775, + "acc_stderr": 0.024362599693031086, + "acc_norm": 0.24193548387096775, + "acc_norm_stderr": 0.024362599693031086 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.26495726495726496, + "acc_stderr": 0.028911208802749482, + "acc_norm": 0.26495726495726496, + "acc_norm_stderr": 0.028911208802749482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.02634148037111835, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.02634148037111835 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724137, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724137 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.026466117538959916, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.026466117538959916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.031871875379197966, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.031871875379197966 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2275132275132275, + "acc_stderr": 0.021591269407823774, + "acc_norm": 0.2275132275132275, + "acc_norm_stderr": 0.021591269407823774 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.023786203255508277, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.023786203255508277 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2716049382716049, + "acc_stderr": 0.02474862449053738, + "acc_norm": 0.2716049382716049, + "acc_norm_stderr": 0.02474862449053738 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2849740932642487, + "acc_stderr": 0.03257714077709659, + "acc_norm": 0.2849740932642487, + "acc_norm_stderr": 0.03257714077709659 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.01822407811729908, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.01822407811729908 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.035670166752768614, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.035670166752768614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.16447368421052633, + "acc_stderr": 0.030167533468632723, + "acc_norm": 0.16447368421052633, + "acc_norm_stderr": 0.030167533468632723 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.017776947157528044, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.017776947157528044 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353604, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353604 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254174, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254174 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2571428571428571, + "acc_stderr": 0.02797982353874455, + "acc_norm": 0.2571428571428571, + "acc_norm_stderr": 0.02797982353874455 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.21940928270042195, + "acc_stderr": 0.026939106581553945, + "acc_norm": 0.21940928270042195, + "acc_norm_stderr": 0.026939106581553945 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26140808344198174, + "acc_stderr": 0.011222528169771316, + "acc_norm": 0.26140808344198174, + "acc_norm_stderr": 0.011222528169771316 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.031145570659486782, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.031145570659486782 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03477691162163659, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03477691162163659 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522509, + "mc2": 0.41090031320132664, + "mc2_stderr": 0.0151126039506734 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2408500590318772, + "acc_stderr": 0.014701172662583905, + "acc_norm": 0.30932703659976385, + "acc_norm_stderr": 0.01589132050552089 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.52", + "model_sha": "56d58bf91af3d6ee644a48ebf5a739f6365217a3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.9/result_2024-02-05 15:26:24.json b/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.9/result_2024-02-05 15:26:24.json new file mode 100644 index 0000000000000000000000000000000000000000..6358c8d1f0c167bc74243db08f3744c6a379348d --- /dev/null +++ b/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.9/result_2024-02-05 15:26:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2738907849829352, + "acc_stderr": 0.013032004972989501, + "acc_norm": 0.33447098976109213, + "acc_norm_stderr": 0.013787460322441382 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36446922923720376, + "acc_stderr": 0.004802974070507203, + "acc_norm": 0.46594303923521213, + "acc_norm_stderr": 0.004978192893406282 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21637426900584794, + "acc_stderr": 0.03158149539338733, + "acc_norm": 0.21637426900584794, + "acc_norm_stderr": 0.03158149539338733 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260597, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260597 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24904214559386972, + "acc_stderr": 0.015464676163395967, + "acc_norm": 0.24904214559386972, + "acc_norm_stderr": 0.015464676163395967 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.03547854198560826, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.03547854198560826 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.030472973363380045, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.030472973363380045 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824664, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824664 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2347266881028939, + "acc_stderr": 0.024071805887677048, + "acc_norm": 0.2347266881028939, + "acc_norm_stderr": 0.024071805887677048 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21973094170403587, + "acc_stderr": 0.027790177064383605, + "acc_norm": 0.21973094170403587, + "acc_norm_stderr": 0.027790177064383605 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.04093329229834278, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.04093329229834278 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.03664666337225256, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.03664666337225256 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643945, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643945 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31932773109243695, + "acc_stderr": 0.0302839955258844, + "acc_norm": 0.31932773109243695, + "acc_norm_stderr": 0.0302839955258844 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.30512820512820515, + "acc_stderr": 0.023346335293325884, + "acc_norm": 0.30512820512820515, + "acc_norm_stderr": 0.023346335293325884 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.02967833314144444, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.02967833314144444 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.026226485652553873, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.026226485652553873 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23931623931623933, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.23931623931623933, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878284, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878284 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2832369942196532, + "acc_stderr": 0.03435568056047875, + "acc_norm": 0.2832369942196532, + "acc_norm_stderr": 0.03435568056047875 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2275132275132275, + "acc_stderr": 0.02159126940782378, + "acc_norm": 0.2275132275132275, + "acc_norm_stderr": 0.02159126940782378 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.037161774375660185, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.037161774375660185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816503, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816503 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21965317919075145, + "acc_stderr": 0.0222896388526179, + "acc_norm": 0.21965317919075145, + "acc_norm_stderr": 0.0222896388526179 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24382716049382716, + "acc_stderr": 0.023891879541959607, + "acc_norm": 0.24382716049382716, + "acc_norm_stderr": 0.023891879541959607 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.33678756476683935, + "acc_stderr": 0.03410780251836184, + "acc_norm": 0.33678756476683935, + "acc_norm_stderr": 0.03410780251836184 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3394495412844037, + "acc_stderr": 0.02030210934266235, + "acc_norm": 0.3394495412844037, + "acc_norm_stderr": 0.02030210934266235 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.026568921015457162, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.026568921015457162 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17105263157894737, + "acc_stderr": 0.030643607071677098, + "acc_norm": 0.17105263157894737, + "acc_norm_stderr": 0.030643607071677098 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.017401816711427653, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.017401816711427653 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.029896163033125478, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.029896163033125478 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2816326530612245, + "acc_stderr": 0.02879518557429128, + "acc_norm": 0.2816326530612245, + "acc_norm_stderr": 0.02879518557429128 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2301173402868318, + "acc_stderr": 0.010750183177375562, + "acc_norm": 0.2301173402868318, + "acc_norm_stderr": 0.010750183177375562 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148125, + "mc2": 0.4136418052155051, + "mc2_stderr": 0.014886814233083138 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26210153482880755, + "acc_stderr": 0.015119864670254146, + "acc_norm": 0.3317591499409681, + "acc_norm_stderr": 0.01618798464215732 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.9", + "model_sha": "6bec193ce75052480eb15dbe5f39cba21b729891", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.91/result_2024-02-12 00:56:36.json b/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.91/result_2024-02-12 00:56:36.json new file mode 100644 index 0000000000000000000000000000000000000000..194d2feb312848a5edccd43057f80c6ee15e06ca --- /dev/null +++ b/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.91/result_2024-02-12 00:56:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2841296928327645, + "acc_stderr": 0.013179442447653887, + "acc_norm": 0.3302047781569966, + "acc_norm_stderr": 0.013743085603760431 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3648675562636925, + "acc_stderr": 0.004804091708812546, + "acc_norm": 0.466938856801434, + "acc_norm_stderr": 0.004978861409119815 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.033773102522091945, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.033773102522091945 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24648786717752236, + "acc_stderr": 0.015411308769686934, + "acc_norm": 0.24648786717752236, + "acc_norm_stderr": 0.015411308769686934 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102973, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102973 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.035716092300534796, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.035716092300534796 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2604501607717042, + "acc_stderr": 0.02492672322484555, + "acc_norm": 0.2604501607717042, + "acc_norm_stderr": 0.02492672322484555 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2242152466367713, + "acc_stderr": 0.02799153425851954, + "acc_norm": 0.2242152466367713, + "acc_norm_stderr": 0.02799153425851954 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.040393149787245605, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.040393149787245605 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.31313131313131315, + "acc_stderr": 0.033042050878136525, + "acc_norm": 0.31313131313131315, + "acc_norm_stderr": 0.033042050878136525 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993176, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993176 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31932773109243695, + "acc_stderr": 0.0302839955258844, + "acc_norm": 0.31932773109243695, + "acc_norm_stderr": 0.0302839955258844 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.29743589743589743, + "acc_stderr": 0.02317740813146594, + "acc_norm": 0.29743589743589743, + "acc_norm_stderr": 0.02317740813146594 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.039578354719809805, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.039578354719809805 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517414, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517414 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.028120966503914404, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.028120966503914404 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.02761116340239972, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.02761116340239972 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3841059602649007, + "acc_stderr": 0.03971301814719198, + "acc_norm": 0.3841059602649007, + "acc_norm_stderr": 0.03971301814719198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2935323383084577, + "acc_stderr": 0.032200241045342054, + "acc_norm": 0.2935323383084577, + "acc_norm_stderr": 0.032200241045342054 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483098, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483098 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816503, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816503 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.023786203255508283, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.023786203255508283 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3312883435582822, + "acc_stderr": 0.03697983910025588, + "acc_norm": 0.3312883435582822, + "acc_norm_stderr": 0.03697983910025588 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2808641975308642, + "acc_stderr": 0.025006469755799208, + "acc_norm": 0.2808641975308642, + "acc_norm_stderr": 0.025006469755799208 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3174311926605505, + "acc_stderr": 0.019957152198460497, + "acc_norm": 0.3174311926605505, + "acc_norm_stderr": 0.019957152198460497 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.0252616912197295, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.0252616912197295 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.34710743801652894, + "acc_stderr": 0.04345724570292535, + "acc_norm": 0.34710743801652894, + "acc_norm_stderr": 0.04345724570292535 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.033176727875331574, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.033176727875331574 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.017322789207784326, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.017322789207784326 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608044, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27346938775510204, + "acc_stderr": 0.02853556033712846, + "acc_norm": 0.27346938775510204, + "acc_norm_stderr": 0.02853556033712846 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24050632911392406, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.24050632911392406, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2542372881355932, + "acc_stderr": 0.011121129007840673, + "acc_norm": 0.2542372881355932, + "acc_norm_stderr": 0.011121129007840673 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.030190282453501943, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.030190282453501943 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2484700122399021, + "mc1_stderr": 0.015127427096520695, + "mc2": 0.4164200982514757, + "mc2_stderr": 0.014968651647163487 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2632821723730815, + "acc_stderr": 0.015141752199573203, + "acc_norm": 0.3270365997638725, + "acc_norm_stderr": 0.016129047485457026 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v0.91", + "model_sha": "14283b013d995614df1830afe6cb580d8d645a88", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v1.0/result_2024-02-13 17:04:46.json b/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v1.0/result_2024-02-13 17:04:46.json new file mode 100644 index 0000000000000000000000000000000000000000..109212a533c37ae2a55f9b1f695ded52bafa18e7 --- /dev/null +++ b/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v1.0/result_2024-02-13 17:04:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2832764505119454, + "acc_stderr": 0.013167478735134576, + "acc_norm": 0.33447098976109213, + "acc_norm_stderr": 0.01378746032244138 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36367257518422624, + "acc_stderr": 0.00480072813879239, + "acc_norm": 0.46574387572196774, + "acc_norm_stderr": 0.004978056798794864 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.26900584795321636, + "acc_stderr": 0.0340105262010409, + "acc_norm": 0.26900584795321636, + "acc_norm_stderr": 0.0340105262010409 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.03989139859531771, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.03989139859531771 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24393358876117496, + "acc_stderr": 0.015357212665829484, + "acc_norm": 0.24393358876117496, + "acc_norm_stderr": 0.015357212665829484 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.035914440841969694, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.035914440841969694 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610344, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610344 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2861736334405145, + "acc_stderr": 0.025670259242188947, + "acc_norm": 0.2861736334405145, + "acc_norm_stderr": 0.025670259242188947 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.25112107623318386, + "acc_stderr": 0.02910522083322461, + "acc_norm": 0.25112107623318386, + "acc_norm_stderr": 0.02910522083322461 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.04010358942462203, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.04010358942462203 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.32323232323232326, + "acc_stderr": 0.03332299921070644, + "acc_norm": 0.32323232323232326, + "acc_norm_stderr": 0.03332299921070644 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727772, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727772 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31932773109243695, + "acc_stderr": 0.030283995525884396, + "acc_norm": 0.31932773109243695, + "acc_norm_stderr": 0.030283995525884396 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.023119362758232294, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.023119362758232294 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.21674876847290642, + "acc_stderr": 0.02899033125251624, + "acc_norm": 0.21674876847290642, + "acc_norm_stderr": 0.02899033125251624 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.32903225806451614, + "acc_stderr": 0.026729499068349965, + "acc_norm": 0.32903225806451614, + "acc_norm_stderr": 0.026729499068349965 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.027236013946196704, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.027236013946196704 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.29056603773584905, + "acc_stderr": 0.027943219989337156, + "acc_norm": 0.29056603773584905, + "acc_norm_stderr": 0.027943219989337156 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959323, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959323 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.36318407960199006, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.36318407960199006, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643895, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643895 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.033961162058453336, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.033961162058453336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816503, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816503 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.022698657167855706, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.022698657167855706 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.025251173936495026, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.025251173936495026 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3160621761658031, + "acc_stderr": 0.03355397369686173, + "acc_norm": 0.3160621761658031, + "acc_norm_stderr": 0.03355397369686173 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29724770642201837, + "acc_stderr": 0.01959570722464354, + "acc_norm": 0.29724770642201837, + "acc_norm_stderr": 0.01959570722464354 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.0255531699918265, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.0255531699918265 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.017479487001364764, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.017479487001364764 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266726, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266726 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602159, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602159 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877746, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2571428571428571, + "acc_stderr": 0.027979823538744553, + "acc_norm": 0.2571428571428571, + "acc_norm_stderr": 0.027979823538744553 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.28270042194092826, + "acc_stderr": 0.02931281415395592, + "acc_norm": 0.28270042194092826, + "acc_norm_stderr": 0.02931281415395592 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2607561929595828, + "acc_stderr": 0.011213471559602345, + "acc_norm": 0.2607561929595828, + "acc_norm_stderr": 0.011213471559602345 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.029554292605695053, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.029554292605695053 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476187, + "mc2": 0.4134131092061002, + "mc2_stderr": 0.014930017621504936 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.282172373081464, + "acc_stderr": 0.015473271583988433, + "acc_norm": 0.35182998819362454, + "acc_norm_stderr": 0.016418206451218057 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v1.0", + "model_sha": "6f00d4861122ed12e700f500dcd8130c1e65e4cb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v1.3/result_2024-02-28 13:59:20.json b/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v1.3/result_2024-02-28 13:59:20.json new file mode 100644 index 0000000000000000000000000000000000000000..b8756d2e9385b3e336b90e797f40b6844ac825b3 --- /dev/null +++ b/mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v1.3/result_2024-02-28 13:59:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2790102389078498, + "acc_stderr": 0.013106784883601352, + "acc_norm": 0.33361774744027306, + "acc_norm_stderr": 0.013778687054176536 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3668591913961362, + "acc_stderr": 0.004809626723626824, + "acc_norm": 0.47122087233618803, + "acc_norm_stderr": 0.004981509099276349 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2573099415204678, + "acc_stderr": 0.03352799844161865, + "acc_norm": 0.2573099415204678, + "acc_norm_stderr": 0.03352799844161865 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.14563106796116504, + "acc_stderr": 0.03492606476623791, + "acc_norm": 0.14563106796116504, + "acc_norm_stderr": 0.03492606476623791 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.015818450894777552, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.015818450894777552 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.035716092300534796, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.035716092300534796 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31511254019292606, + "acc_stderr": 0.026385273703464492, + "acc_norm": 0.31511254019292606, + "acc_norm_stderr": 0.026385273703464492 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.28699551569506726, + "acc_stderr": 0.030360379710291957, + "acc_norm": 0.28699551569506726, + "acc_norm_stderr": 0.030360379710291957 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.041423137719966634, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.041423137719966634 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.031156269519646836, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.031156269519646836 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309993, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309993 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24789915966386555, + "acc_stderr": 0.028047967224176892, + "acc_norm": 0.24789915966386555, + "acc_norm_stderr": 0.028047967224176892 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.021763733684173926, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.021763733684173926 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052192, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733552, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733552 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3225806451612903, + "acc_stderr": 0.026593084516572288, + "acc_norm": 0.3225806451612903, + "acc_norm_stderr": 0.026593084516572288 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2863247863247863, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.2863247863247863, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.026199808807561942, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.026199808807561942 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910508, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145675, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145675 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.27860696517412936, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.27860696517412936, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1907514450867052, + "acc_stderr": 0.029957851329869327, + "acc_norm": 0.1907514450867052, + "acc_norm_stderr": 0.029957851329869327 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.035146974678623884, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.035146974678623884 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.1994219653179191, + "acc_stderr": 0.021511900654252552, + "acc_norm": 0.1994219653179191, + "acc_norm_stderr": 0.021511900654252552 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.32515337423312884, + "acc_stderr": 0.036803503712864616, + "acc_norm": 0.32515337423312884, + "acc_norm_stderr": 0.036803503712864616 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02492200116888633, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02492200116888633 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22279792746113988, + "acc_stderr": 0.030031147977641538, + "acc_norm": 0.22279792746113988, + "acc_norm_stderr": 0.030031147977641538 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29357798165137616, + "acc_stderr": 0.019525151122639667, + "acc_norm": 0.29357798165137616, + "acc_norm_stderr": 0.019525151122639667 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.0264930332251459, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.0264930332251459 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19736842105263158, + "acc_stderr": 0.03238981601699397, + "acc_norm": 0.19736842105263158, + "acc_norm_stderr": 0.03238981601699397 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27124183006535946, + "acc_stderr": 0.017986615304030305, + "acc_norm": 0.27124183006535946, + "acc_norm_stderr": 0.017986615304030305 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460983, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460983 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687765, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687765 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.027833023871399683, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.027833023871399683 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.32489451476793246, + "acc_stderr": 0.03048603938910531, + "acc_norm": 0.32489451476793246, + "acc_norm_stderr": 0.03048603938910531 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2542372881355932, + "acc_stderr": 0.011121129007840673, + "acc_norm": 0.2542372881355932, + "acc_norm_stderr": 0.011121129007840673 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31334149326805383, + "mc1_stderr": 0.016238065069059608, + "mc2": 0.4557110778835503, + "mc2_stderr": 0.015073891611402771 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27390791027154665, + "acc_stderr": 0.015332499474791024, + "acc_norm": 0.3152302243211334, + "acc_norm_stderr": 0.01597353492379447 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mu0gum/AIFT-42dot_LLM-PLM-1.3B-ao-instruct-all-v1.3", + "model_sha": "126f0ea594c978320232c3bb7c774fb8d2ce3633", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mu0gum/AIFT-42dot_LLM-SFT-1.3B-ao-instruct-all-v0.9/result_2024-02-07 10:22:36.json b/mu0gum/AIFT-42dot_LLM-SFT-1.3B-ao-instruct-all-v0.9/result_2024-02-07 10:22:36.json new file mode 100644 index 0000000000000000000000000000000000000000..46ec6e6a2fe6873906a45a31992c3b81d4d7209d --- /dev/null +++ b/mu0gum/AIFT-42dot_LLM-SFT-1.3B-ao-instruct-all-v0.9/result_2024-02-07 10:22:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27474402730375425, + "acc_stderr": 0.013044617212771227, + "acc_norm": 0.3370307167235495, + "acc_norm_stderr": 0.013813476652902274 + }, + "harness|ko_hellaswag|10": { + "acc": 0.369946225851424, + "acc_stderr": 0.00481803139613893, + "acc_norm": 0.469627564230233, + "acc_norm_stderr": 0.004980566907790451 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.03301405946987251, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.03301405946987251 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690876, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690876 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23627075351213284, + "acc_stderr": 0.01519047371703751, + "acc_norm": 0.23627075351213284, + "acc_norm_stderr": 0.01519047371703751 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174022, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174022 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.029379170464124818, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.029379170464124818 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26688102893890675, + "acc_stderr": 0.02512263760881664, + "acc_norm": 0.26688102893890675, + "acc_norm_stderr": 0.02512263760881664 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.24663677130044842, + "acc_stderr": 0.028930413120910867, + "acc_norm": 0.24663677130044842, + "acc_norm_stderr": 0.028930413120910867 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677698, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677698 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.031353050095330855, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.031353050095330855 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2689655172413793, + "acc_stderr": 0.03695183311650232, + "acc_norm": 0.2689655172413793, + "acc_norm_stderr": 0.03695183311650232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2773109243697479, + "acc_stderr": 0.029079374539480007, + "acc_norm": 0.2773109243697479, + "acc_norm_stderr": 0.029079374539480007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02242127361292371, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02242127361292371 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.029678333141444458, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.029678333141444458 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27741935483870966, + "acc_stderr": 0.025470196835900055, + "acc_norm": 0.27741935483870966, + "acc_norm_stderr": 0.025470196835900055 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.26495726495726496, + "acc_stderr": 0.028911208802749475, + "acc_norm": 0.26495726495726496, + "acc_norm_stderr": 0.028911208802749475 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.21851851851851853, + "acc_stderr": 0.025195752251823793, + "acc_norm": 0.21851851851851853, + "acc_norm_stderr": 0.025195752251823793 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594295, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.02344582627654554, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.02344582627654554 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25617283950617287, + "acc_stderr": 0.0242885336377261, + "acc_norm": 0.25617283950617287, + "acc_norm_stderr": 0.0242885336377261 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.18134715025906736, + "acc_stderr": 0.027807032360686088, + "acc_norm": 0.18134715025906736, + "acc_norm_stderr": 0.027807032360686088 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27889908256880735, + "acc_stderr": 0.01922746887646351, + "acc_norm": 0.27889908256880735, + "acc_norm_stderr": 0.01922746887646351 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924317, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924317 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.025829163272757475, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.025829163272757475 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882924, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2630718954248366, + "acc_stderr": 0.017812676542320657, + "acc_norm": 0.2630718954248366, + "acc_norm_stderr": 0.017812676542320657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266722, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266722 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03141554629402546, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03141554629402546 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372432, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372432 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19183673469387755, + "acc_stderr": 0.02520696315422538, + "acc_norm": 0.19183673469387755, + "acc_norm_stderr": 0.02520696315422538 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3291139240506329, + "acc_stderr": 0.030587326294702358, + "acc_norm": 0.3291139240506329, + "acc_norm_stderr": 0.030587326294702358 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25554106910039115, + "acc_stderr": 0.011139857833598514, + "acc_norm": 0.25554106910039115, + "acc_norm_stderr": 0.011139857833598514 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02910225438967409, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02910225438967409 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359649, + "mc2": 0.41661679187553124, + "mc2_stderr": 0.014995883966939919 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.25737898465171194, + "acc_stderr": 0.015030899730346756, + "acc_norm": 0.2668240850059032, + "acc_norm_stderr": 0.015206575684565897 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mu0gum/AIFT-42dot_LLM-SFT-1.3B-ao-instruct-all-v0.9", + "model_sha": "157558647c5a9c00ea34f56c0e179a179251c2a1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mu0gum/AIFT-42dot_LLM-SFT-1.3B-ao-instruct-all-v1.1/result_2024-02-21 14:22:30.json b/mu0gum/AIFT-42dot_LLM-SFT-1.3B-ao-instruct-all-v1.1/result_2024-02-21 14:22:30.json new file mode 100644 index 0000000000000000000000000000000000000000..21464f635b2a9a9c6e77f9f1173e4a07091011d7 --- /dev/null +++ b/mu0gum/AIFT-42dot_LLM-SFT-1.3B-ao-instruct-all-v1.1/result_2024-02-21 14:22:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.28242320819112626, + "acc_stderr": 0.01315545688409722, + "acc_norm": 0.3430034129692833, + "acc_norm_stderr": 0.013872423223718174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3625771758613822, + "acc_stderr": 0.004797616754372308, + "acc_norm": 0.46026687910774744, + "acc_norm_stderr": 0.0049740015155809655 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.34502923976608185, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.34502923976608185, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822583, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822583 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24904214559386972, + "acc_stderr": 0.015464676163395976, + "acc_norm": 0.24904214559386972, + "acc_norm_stderr": 0.015464676163395976 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.03502553170678318, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.03502553170678318 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.030251237579213174, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.030251237579213174 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824664, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824664 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24758842443729903, + "acc_stderr": 0.024513879973621967, + "acc_norm": 0.24758842443729903, + "acc_norm_stderr": 0.024513879973621967 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2645739910313901, + "acc_stderr": 0.02960510321703834, + "acc_norm": 0.2645739910313901, + "acc_norm_stderr": 0.02960510321703834 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.0359546161177469, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.0359546161177469 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.029126522834586832, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.029126522834586832 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.03664666337225256, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.03664666337225256 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.029472485833136094, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.029472485833136094 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.021444547301560483, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.021444547301560483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.17733990147783252, + "acc_stderr": 0.026874337276808342, + "acc_norm": 0.17733990147783252, + "acc_norm_stderr": 0.026874337276808342 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23548387096774193, + "acc_stderr": 0.02413763242933772, + "acc_norm": 0.23548387096774193, + "acc_norm_stderr": 0.02413763242933772 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2339622641509434, + "acc_stderr": 0.02605529690115292, + "acc_norm": 0.2339622641509434, + "acc_norm_stderr": 0.02605529690115292 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072775, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145668, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145668 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.19653179190751446, + "acc_stderr": 0.030299574664788137, + "acc_norm": 0.19653179190751446, + "acc_norm_stderr": 0.030299574664788137 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.02226181769240018, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.02226181769240018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.22254335260115607, + "acc_stderr": 0.02239421566194282, + "acc_norm": 0.22254335260115607, + "acc_norm_stderr": 0.02239421566194282 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.0329109957861577, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.0329109957861577 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.025171041915309684, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.025171041915309684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.029252823291803617, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.029252823291803617 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25137614678899084, + "acc_stderr": 0.01859920636028741, + "acc_norm": 0.25137614678899084, + "acc_norm_stderr": 0.01859920636028741 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.02591780611714716, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.02591780611714716 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516304, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516304 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.017776947157528037, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.017776947157528037 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.026244920349843007, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.026244920349843007 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.03191923445686185, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.03191923445686185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.028064998167040094, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.028064998167040094 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.20816326530612245, + "acc_stderr": 0.0259911176728133, + "acc_norm": 0.20816326530612245, + "acc_norm_stderr": 0.0259911176728133 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3037974683544304, + "acc_stderr": 0.029936696387138615, + "acc_norm": 0.3037974683544304, + "acc_norm_stderr": 0.029936696387138615 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.242503259452412, + "acc_stderr": 0.010946570966348783, + "acc_norm": 0.242503259452412, + "acc_norm_stderr": 0.010946570966348783 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.028379449451588667, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.028379449451588667 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557963, + "mc2": 0.4157573626919763, + "mc2_stderr": 0.014972199402773433 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26564344746162927, + "acc_stderr": 0.015185107107791248, + "acc_norm": 0.33530106257378983, + "acc_norm_stderr": 0.016230981232989817 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mu0gum/AIFT-42dot_LLM-SFT-1.3B-ao-instruct-all-v1.1", + "model_sha": "64e938c88056380d2b3aaa1ba3e766c7e631009c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mu0gum/AIFT-Yi-Ko-6B-v1.11/result_2024-02-23 16:05:42.json b/mu0gum/AIFT-Yi-Ko-6B-v1.11/result_2024-02-23 16:05:42.json new file mode 100644 index 0000000000000000000000000000000000000000..c637ed6ccf63698f03b8ac96cb8a20877579bd31 --- /dev/null +++ b/mu0gum/AIFT-Yi-Ko-6B-v1.11/result_2024-02-23 16:05:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35238907849829354, + "acc_stderr": 0.01396014260059869, + "acc_norm": 0.4087030716723549, + "acc_norm_stderr": 0.014365750345427006 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39852619000199163, + "acc_stderr": 0.004885942040894558, + "acc_norm": 0.5333598884684326, + "acc_norm_stderr": 0.004978662946687279 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299794, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299794 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.037658451171688624, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.037658451171688624 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4694533762057878, + "acc_stderr": 0.02834504586484068, + "acc_norm": 0.4694533762057878, + "acc_norm_stderr": 0.02834504586484068 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234355, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234355 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.043285772152629735, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.043285772152629735 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145631, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145631 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.041546596717075474, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.041546596717075474 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.032363611119519416, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.032363611119519416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561063, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561063 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.030463656747340247, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.030463656747340247 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.03067609659938918, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.03067609659938918 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952168, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952168 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307702, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307702 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.041227287076512825, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.041227287076512825 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.03889066619112722, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.03889066619112722 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.027786800931427453, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.027786800931427453 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5743119266055046, + "acc_stderr": 0.021199235972470795, + "acc_norm": 0.5743119266055046, + "acc_norm_stderr": 0.021199235972470795 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.02827549015679143, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.02827549015679143 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3937908496732026, + "acc_stderr": 0.01976621199107307, + "acc_norm": 0.3937908496732026, + "acc_norm_stderr": 0.01976621199107307 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.044939490686135404, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.044939490686135404 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.030058202704309846, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.030058202704309846 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925284, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925284 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254177, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254177 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.03151236044674281, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.03151236044674281 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32073011734028684, + "acc_stderr": 0.011921199991782611, + "acc_norm": 0.32073011734028684, + "acc_norm_stderr": 0.011921199991782611 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163614, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.015846315101394816, + "mc2": 0.43962611965878157, + "mc2_stderr": 0.015059997584340207 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5537190082644629, + "acc_stderr": 0.017090852631668332, + "acc_norm": 0.5655253837072018, + "acc_norm_stderr": 0.01704209862082494 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mu0gum/AIFT-Yi-Ko-6B-v1.11", + "model_sha": "8e425514692a233e873d0d3a7842c6ffb195317d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mu0gum/AIFT-polyglot-ko-1.3b-ao-instruct-v0.91/result_2024-02-08 17:24:16.json b/mu0gum/AIFT-polyglot-ko-1.3b-ao-instruct-v0.91/result_2024-02-08 17:24:16.json new file mode 100644 index 0000000000000000000000000000000000000000..0ee7c11474b9346e32d2ef4ded8eeee99ee9b027 --- /dev/null +++ b/mu0gum/AIFT-polyglot-ko-1.3b-ao-instruct-v0.91/result_2024-02-08 17:24:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2354948805460751, + "acc_stderr": 0.012399451855004741, + "acc_norm": 0.2960750853242321, + "acc_norm_stderr": 0.01334091608524627 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33638717386974704, + "acc_stderr": 0.004715075119834519, + "acc_norm": 0.4182433778131846, + "acc_norm_stderr": 0.004922624636945241 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03188578017686398, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03188578017686398 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2796934865900383, + "acc_stderr": 0.016050792148036546, + "acc_norm": 0.2796934865900383, + "acc_norm_stderr": 0.016050792148036546 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.029241883869628824, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.029241883869628824 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.29518072289156627, + "acc_stderr": 0.035509201856896294, + "acc_norm": 0.29518072289156627, + "acc_norm_stderr": 0.035509201856896294 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2508038585209003, + "acc_stderr": 0.024619771956697165, + "acc_norm": 0.2508038585209003, + "acc_norm_stderr": 0.024619771956697165 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3452914798206278, + "acc_stderr": 0.031911001928357954, + "acc_norm": 0.3452914798206278, + "acc_norm_stderr": 0.031911001928357954 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23737373737373738, + "acc_stderr": 0.0303137105381989, + "acc_norm": 0.23737373737373738, + "acc_norm_stderr": 0.0303137105381989 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.1724137931034483, + "acc_stderr": 0.03147830790259575, + "acc_norm": 0.1724137931034483, + "acc_norm_stderr": 0.03147830790259575 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863814, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863814 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2358974358974359, + "acc_stderr": 0.021525965407408726, + "acc_norm": 0.2358974358974359, + "acc_norm_stderr": 0.021525965407408726 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.03108982600293752, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.03108982600293752 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764815, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764815 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.027601921381417597, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.027601921381417597 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.04172343038705383, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.04172343038705383 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008936, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008936 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21393034825870647, + "acc_stderr": 0.02899690969332891, + "acc_norm": 0.21393034825870647, + "acc_norm_stderr": 0.02899690969332891 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.02465968518596728, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.02465968518596728 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22279792746113988, + "acc_stderr": 0.030031147977641545, + "acc_norm": 0.22279792746113988, + "acc_norm_stderr": 0.030031147977641545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03835153954399421, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03835153954399421 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21651376146788992, + "acc_stderr": 0.017658710594443135, + "acc_norm": 0.21651376146788992, + "acc_norm_stderr": 0.017658710594443135 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604673, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604673 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023805186524888142, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023805186524888142 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.03896878985070417, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.03896878985070417 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.017848089574913226, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.017848089574913226 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.026011992930902006, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.026011992930902006 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787317, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787317 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26927374301675977, + "acc_stderr": 0.014835616582882606, + "acc_norm": 0.26927374301675977, + "acc_norm_stderr": 0.014835616582882606 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824855, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824855 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788163, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788163 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.0284588209914603, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.0284588209914603 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23402868318122555, + "acc_stderr": 0.010813585552659695, + "acc_norm": 0.23402868318122555, + "acc_norm_stderr": 0.010813585552659695 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02933116229425173, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02933116229425173 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.034531318018854146, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.034531318018854146 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485081, + "mc2": 0.42171298348482467, + "mc2_stderr": 0.015178113043491578 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28807556080283353, + "acc_stderr": 0.015569869674838373, + "acc_norm": 0.3364817001180638, + "acc_norm_stderr": 0.01624508529438656 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mu0gum/AIFT-polyglot-ko-1.3b-ao-instruct-v0.91", + "model_sha": "a8c813a880f63bfca7ab510f3aa07e31d715abff", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mu0gum/polyglot-ko-1.3b-slim_orca_10000-epoch2/result_2024-01-10 11:36:21.json b/mu0gum/polyglot-ko-1.3b-slim_orca_10000-epoch2/result_2024-01-10 11:36:21.json new file mode 100644 index 0000000000000000000000000000000000000000..9fc74db8faab8541614e70569d58f30e1a66bab4 --- /dev/null +++ b/mu0gum/polyglot-ko-1.3b-slim_orca_10000-epoch2/result_2024-01-10 11:36:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2235494880546075, + "acc_stderr": 0.012174896631202607, + "acc_norm": 0.26706484641638223, + "acc_norm_stderr": 0.012928933196496342 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32991435968930494, + "acc_stderr": 0.004692208279690582, + "acc_norm": 0.4058952399920335, + "acc_norm_stderr": 0.00490060852977861 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.03446296217088426, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.03446296217088426 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23116219667943805, + "acc_stderr": 0.015075523238101084, + "acc_norm": 0.23116219667943805, + "acc_norm_stderr": 0.015075523238101084 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.21481481481481482, + "acc_stderr": 0.03547854198560826, + "acc_norm": 0.21481481481481482, + "acc_norm_stderr": 0.03547854198560826 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.026355158413349424, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.026355158413349424 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.031069390260789406, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.031069390260789406 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.13004484304932734, + "acc_stderr": 0.02257451942417487, + "acc_norm": 0.13004484304932734, + "acc_norm_stderr": 0.02257451942417487 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.03274287914026867, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.03274287914026867 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36153846153846153, + "acc_stderr": 0.024359581465396983, + "acc_norm": 0.36153846153846153, + "acc_norm_stderr": 0.024359581465396983 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678242, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678242 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21367521367521367, + "acc_stderr": 0.026853450377009164, + "acc_norm": 0.21367521367521367, + "acc_norm_stderr": 0.026853450377009164 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072773, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072773 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.03076944496729601, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.03076944496729601 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.037455547914624576, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.037455547914624576 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2398843930635838, + "acc_stderr": 0.022989592543123567, + "acc_norm": 0.2398843930635838, + "acc_norm_stderr": 0.022989592543123567 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.03351953879521271, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.03351953879521271 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.023468429832451163, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.023468429832451163 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3357798165137615, + "acc_stderr": 0.02024808139675293, + "acc_norm": 0.3357798165137615, + "acc_norm_stderr": 0.02024808139675293 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653697, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653697 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.32894736842105265, + "acc_stderr": 0.03823428969926604, + "acc_norm": 0.32894736842105265, + "acc_norm_stderr": 0.03823428969926604 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.016774672365468528, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.016774672365468528 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180844, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180844 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.13392857142857142, + "acc_stderr": 0.032326001191085627, + "acc_norm": 0.13392857142857142, + "acc_norm_stderr": 0.032326001191085627 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.027682979522960224, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.027682979522960224 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24050632911392406, + "acc_stderr": 0.027820781981149675, + "acc_norm": 0.24050632911392406, + "acc_norm_stderr": 0.027820781981149675 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23989569752281617, + "acc_stderr": 0.010906282617981655, + "acc_norm": 0.23989569752281617, + "acc_norm_stderr": 0.010906282617981655 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.01505186948671501, + "mc2": 0.40586360458546566, + "mc2_stderr": 0.014979447412682959 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2987012987012987, + "acc_stderr": 0.01573565739143829, + "acc_norm": 0.36481700118063753, + "acc_norm_stderr": 0.016550144337046595 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mu0gum/polyglot-ko-1.3b-slim_orca_10000-epoch2", + "model_sha": "25b492a411a52422f576e875a68f9c5491e976c0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/mumu-97/SOLAR-KO-various-v0.1/result_2024-02-01 01:17:12.json b/mumu-97/SOLAR-KO-various-v0.1/result_2024-02-01 01:17:12.json new file mode 100644 index 0000000000000000000000000000000000000000..9af4c423ad60ef282e771293b9bde91b1a57f217 --- /dev/null +++ b/mumu-97/SOLAR-KO-various-v0.1/result_2024-02-01 01:17:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4453924914675768, + "acc_stderr": 0.014523987638344083, + "acc_norm": 0.4974402730375427, + "acc_norm_stderr": 0.014611199329843774 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4448317068313085, + "acc_stderr": 0.00495931519801116, + "acc_norm": 0.6058554072893846, + "acc_norm_stderr": 0.004876674814874706 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6491228070175439, + "acc_stderr": 0.03660298834049162, + "acc_norm": 0.6491228070175439, + "acc_norm_stderr": 0.03660298834049162 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6934865900383141, + "acc_stderr": 0.01648695289304152, + "acc_norm": 0.6934865900383141, + "acc_norm_stderr": 0.01648695289304152 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.0325005368436584, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.0325005368436584 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5466237942122186, + "acc_stderr": 0.02827435985489424, + "acc_norm": 0.5466237942122186, + "acc_norm_stderr": 0.02827435985489424 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03358618145732524, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03358618145732524 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.025342671293807247, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.025342671293807247 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145631, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145631 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5580645161290323, + "acc_stderr": 0.028251557906849748, + "acc_norm": 0.5580645161290323, + "acc_norm_stderr": 0.028251557906849748 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392923, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083032, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083032 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6965174129353234, + "acc_stderr": 0.03251006816458618, + "acc_norm": 0.6965174129353234, + "acc_norm_stderr": 0.03251006816458618 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887249, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887249 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.02437319786798307, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.02437319786798307 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5401234567901234, + "acc_stderr": 0.027731022753539277, + "acc_norm": 0.5401234567901234, + "acc_norm_stderr": 0.027731022753539277 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.0352607709554824, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.0352607709554824 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5908256880733945, + "acc_stderr": 0.021080670264433738, + "acc_norm": 0.5908256880733945, + "acc_norm_stderr": 0.021080670264433738 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309172, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309172 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02007125788688653, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02007125788688653 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611327, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611327 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176851, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176851 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.01493131670322051, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.01493131670322051 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.029624663581159685, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.029624663581159685 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5469387755102041, + "acc_stderr": 0.031867859300041275, + "acc_norm": 0.5469387755102041, + "acc_norm_stderr": 0.031867859300041275 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.029818024749753095, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.029818024749753095 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214934, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214934 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.593939393939394, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.593939393939394, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29498164014687883, + "mc1_stderr": 0.015964400965589667, + "mc2": 0.45301341553948044, + "mc2_stderr": 0.01508425397745297 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6080283353010626, + "acc_stderr": 0.016784332119424084, + "acc_norm": 0.6422668240850059, + "acc_norm_stderr": 0.016479808935749983 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "mumu-97/SOLAR-KO-various-v0.1", + "model_sha": "cf2a9e9a99181388685b7ff68bc6f340c8385a04", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nakhyeon/llama-2-ko-qlora4/result_2023-10-20 05:31:57.json b/nakhyeon/llama-2-ko-qlora4/result_2023-10-20 05:31:57.json new file mode 100644 index 0000000000000000000000000000000000000000..827919db8ac6777d56b56cfabd0f507b44b2fc6e --- /dev/null +++ b/nakhyeon/llama-2-ko-qlora4/result_2023-10-20 05:31:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.0136216961191733, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668525 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3826926906990639, + "acc_stderr": 0.004850508945116094, + "acc_norm": 0.49571798446524595, + "acc_norm_stderr": 0.004989598426249537 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.01690520742080355, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.01690520742080355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572196, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852732, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852732 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.022815813098896628, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.022815813098896628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.02622648565255388, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.02622648565255388 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36324786324786323, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.36324786324786323, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.020828148517022593, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.020828148517022593 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961438, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961438 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.031299208255302136, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.031299208255302136 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733095, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.37061566370146265, + "mc2_stderr": 0.014735163251703702 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28807556080283353, + "acc_stderr": 0.01556986967483836, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.016943586313076575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nakhyeon/llama-2-ko-qlora4", + "model_sha": "759cf82ec24f0bd625edfa916f22701d30517591", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nakhyeon/llama-ko-qlora-1024/result_2023-10-21 07:57:16.json b/nakhyeon/llama-ko-qlora-1024/result_2023-10-21 07:57:16.json new file mode 100644 index 0000000000000000000000000000000000000000..3f5f860e021c2fc2e6c43701301225326c82b3d4 --- /dev/null +++ b/nakhyeon/llama-ko-qlora-1024/result_2023-10-21 07:57:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.0136216961191733, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668525 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3826926906990639, + "acc_stderr": 0.004850508945116094, + "acc_norm": 0.49571798446524595, + "acc_norm_stderr": 0.004989598426249537 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.01690520742080355, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.01690520742080355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572196, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852732, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852732 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.022815813098896628, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.022815813098896628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.02622648565255388, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.02622648565255388 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36324786324786323, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.36324786324786323, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.020828148517022593, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.020828148517022593 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961438, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961438 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.031299208255302136, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.031299208255302136 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733095, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.37061566370146265, + "mc2_stderr": 0.014735163251703702 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28807556080283353, + "acc_stderr": 0.01556986967483836, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.016943586313076575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nakhyeon/llama-ko-qlora-1024", + "model_sha": "10f5e7aa49eb466a26eb3c696b72fff0e003a954", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nakhyeon/polyglot-ko-12b-qlora/result_2023-11-04 05:48:27.json b/nakhyeon/polyglot-ko-12b-qlora/result_2023-11-04 05:48:27.json new file mode 100644 index 0000000000000000000000000000000000000000..d01e216052e8b0c0ed2cc2a55303326bd87a3354 --- /dev/null +++ b/nakhyeon/polyglot-ko-12b-qlora/result_2023-11-04 05:48:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.013203196088537365, + "acc_norm": 0.33447098976109213, + "acc_norm_stderr": 0.013787460322441374 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3855805616411073, + "acc_stderr": 0.004857374133246887, + "acc_norm": 0.5027882891854212, + "acc_norm_stderr": 0.004989703824167094 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393161, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393161 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.015671006009339572, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.015671006009339572 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.026754391348039787, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.026754391348039787 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31189710610932475, + "acc_stderr": 0.02631185807185416, + "acc_norm": 0.31189710610932475, + "acc_norm_stderr": 0.02631185807185416 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03053289223393203, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03053289223393203 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.038061426873099935, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.038061426873099935 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868963, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868963 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.21025641025641026, + "acc_stderr": 0.020660597485026928, + "acc_norm": 0.21025641025641026, + "acc_norm_stderr": 0.020660597485026928 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.26108374384236455, + "acc_stderr": 0.0309037969521145, + "acc_norm": 0.26108374384236455, + "acc_norm_stderr": 0.0309037969521145 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239963, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239963 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23076923076923078, + "acc_stderr": 0.027601921381417604, + "acc_norm": 0.23076923076923078, + "acc_norm_stderr": 0.027601921381417604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.23773584905660378, + "acc_stderr": 0.026199808807561932, + "acc_norm": 0.23773584905660378, + "acc_norm_stderr": 0.026199808807561932 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935554, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935554 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776578, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776578 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.036539469694421, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.036539469694421 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.022698657167855716, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.022698657167855716 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.024922001168886338, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.024922001168886338 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26422018348623855, + "acc_stderr": 0.0189041641715102, + "acc_norm": 0.26422018348623855, + "acc_norm_stderr": 0.0189041641715102 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020534, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020534 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.02505850331695815, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.02505850331695815 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322674, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322674 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.20098039215686275, + "acc_stderr": 0.016211938889655574, + "acc_norm": 0.20098039215686275, + "acc_norm_stderr": 0.016211938889655574 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.024847921358063962, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.024847921358063962 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.03275773486100999, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.03275773486100999 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22857142857142856, + "acc_stderr": 0.026882144922307748, + "acc_norm": 0.22857142857142856, + "acc_norm_stderr": 0.026882144922307748 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.32489451476793246, + "acc_stderr": 0.030486039389105303, + "acc_norm": 0.32489451476793246, + "acc_norm_stderr": 0.030486039389105303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25749674054758803, + "acc_stderr": 0.011167706014904136, + "acc_norm": 0.25749674054758803, + "acc_norm_stderr": 0.011167706014904136 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23030303030303031, + "acc_stderr": 0.032876667586034886, + "acc_norm": 0.23030303030303031, + "acc_norm_stderr": 0.032876667586034886 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731613, + "mc2": 0.390673097215474, + "mc2_stderr": 0.014736542111904073 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30932703659976385, + "acc_stderr": 0.01589132050552089, + "acc_norm": 0.3990554899645809, + "acc_norm_stderr": 0.0168363772928493 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nakhyeon/polyglot-ko-12b-qlora", + "model_sha": "67243c8d6550f974faf3b6dc3a09ede91e7fda55", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nakhyeonn/llama-2-ko-qlora-prompt/result_2023-10-23 21:41:32.json b/nakhyeonn/llama-2-ko-qlora-prompt/result_2023-10-23 21:41:32.json new file mode 100644 index 0000000000000000000000000000000000000000..76646c486b8c4ec81178b1b2db8eebde05d6192b --- /dev/null +++ b/nakhyeonn/llama-2-ko-qlora-prompt/result_2023-10-23 21:41:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.0136216961191733, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668525 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3826926906990639, + "acc_stderr": 0.004850508945116094, + "acc_norm": 0.49571798446524595, + "acc_norm_stderr": 0.004989598426249537 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.01690520742080355, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.01690520742080355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572196, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852732, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852732 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.022815813098896628, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.022815813098896628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.02622648565255388, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.02622648565255388 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36324786324786323, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.36324786324786323, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.020828148517022593, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.020828148517022593 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961438, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961438 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.031299208255302136, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.031299208255302136 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733095, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.37061566370146265, + "mc2_stderr": 0.014735163251703702 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28807556080283353, + "acc_stderr": 0.01556986967483836, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.016943586313076575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nakhyeonn/llama-2-ko-qlora-prompt", + "model_sha": "3c10df72b42af16132ec1528e2892ef74b65ae4b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nakhyeonn/llama-2-ko-qlora-prompt_1024/result_2023-10-23 21:46:13.json b/nakhyeonn/llama-2-ko-qlora-prompt_1024/result_2023-10-23 21:46:13.json new file mode 100644 index 0000000000000000000000000000000000000000..271962a0c4598362ad1916df6ba2bc941998b5d6 --- /dev/null +++ b/nakhyeonn/llama-2-ko-qlora-prompt_1024/result_2023-10-23 21:46:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.0136216961191733, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668525 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3826926906990639, + "acc_stderr": 0.004850508945116094, + "acc_norm": 0.49571798446524595, + "acc_norm_stderr": 0.004989598426249537 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.01690520742080355, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.01690520742080355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572196, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852732, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852732 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.022815813098896628, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.022815813098896628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.02622648565255388, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.02622648565255388 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36324786324786323, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.36324786324786323, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.020828148517022593, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.020828148517022593 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961438, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961438 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.031299208255302136, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.031299208255302136 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733095, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.37061566370146265, + "mc2_stderr": 0.014735163251703702 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28807556080283353, + "acc_stderr": 0.01556986967483836, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.016943586313076575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nakhyeonn/llama-2-ko-qlora-prompt_1024", + "model_sha": "1d8e0cc8d22540be3c50816571d0ef34a98aecd3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nakhyeonn/llama-2-ko-qlora-prompt_1024_new/result_2023-10-25 10:59:57.json b/nakhyeonn/llama-2-ko-qlora-prompt_1024_new/result_2023-10-25 10:59:57.json new file mode 100644 index 0000000000000000000000000000000000000000..822d079a63b65f65c8b9dfeeb3c2258f40e50e05 --- /dev/null +++ b/nakhyeonn/llama-2-ko-qlora-prompt_1024_new/result_2023-10-25 10:59:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.0136216961191733, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668525 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3826926906990639, + "acc_stderr": 0.004850508945116094, + "acc_norm": 0.49571798446524595, + "acc_norm_stderr": 0.004989598426249537 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.01690520742080355, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.01690520742080355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572196, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852732, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852732 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.022815813098896628, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.022815813098896628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.02622648565255388, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.02622648565255388 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36324786324786323, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.36324786324786323, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.020828148517022593, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.020828148517022593 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961438, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961438 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.031299208255302136, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.031299208255302136 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733095, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.37061566370146265, + "mc2_stderr": 0.014735163251703702 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28807556080283353, + "acc_stderr": 0.01556986967483836, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.016943586313076575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nakhyeonn/llama-2-ko-qlora-prompt_1024_new", + "model_sha": "4738337870d8e87a2f9a8aac64fcc6935d24afdc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nakhyeonn/llama-2-ko-qlora-prompt_1024_new_2/result_2023-10-28 12:36:11.json b/nakhyeonn/llama-2-ko-qlora-prompt_1024_new_2/result_2023-10-28 12:36:11.json new file mode 100644 index 0000000000000000000000000000000000000000..3f384076ad074aa0f4a583f79a28e4d8b25e4276 --- /dev/null +++ b/nakhyeonn/llama-2-ko-qlora-prompt_1024_new_2/result_2023-10-28 12:36:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.0136216961191733, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668525 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3826926906990639, + "acc_stderr": 0.004850508945116094, + "acc_norm": 0.49571798446524595, + "acc_norm_stderr": 0.004989598426249537 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.01690520742080355, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.01690520742080355 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572196, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852732, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852732 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.022815813098896628, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.022815813098896628 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.02622648565255388, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.02622648565255388 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36324786324786323, + "acc_stderr": 0.03150712523091264, + "acc_norm": 0.36324786324786323, + "acc_norm_stderr": 0.03150712523091264 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113942, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113942 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.020828148517022593, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.020828148517022593 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.018054027458815194, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.018054027458815194 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140242, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140242 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961438, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961438 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.031299208255302136, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.031299208255302136 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733095, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733095 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.37060775290008985, + "mc2_stderr": 0.014735131838171926 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28807556080283353, + "acc_stderr": 0.01556986967483836, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.016943586313076575 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nakhyeonn/llama-2-ko-qlora-prompt_1024_new_2", + "model_sha": "d9c7865e0ec6916275f5760289e9671df6aca2b5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/natong19/Qwen2-7B-Instruct-abliterated/result_2024-07-29 19:22:37.json b/natong19/Qwen2-7B-Instruct-abliterated/result_2024-07-29 19:22:37.json new file mode 100644 index 0000000000000000000000000000000000000000..7d3296abda69193f77fe565932c3f81186595a42 --- /dev/null +++ b/natong19/Qwen2-7B-Instruct-abliterated/result_2024-07-29 19:22:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39505119453924914, + "acc_stderr": 0.014285898292938177, + "acc_norm": 0.4445392491467577, + "acc_norm_stderr": 0.01452122640562707 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2504481179047998, + "acc_stderr": 0.004323856300539177, + "acc_norm": 0.2504481179047998, + "acc_norm_stderr": 0.004323856300539177 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7864077669902912, + "acc_stderr": 0.04058042015646036, + "acc_norm": 0.7864077669902912, + "acc_norm_stderr": 0.04058042015646036 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6615581098339719, + "acc_stderr": 0.016920869586210682, + "acc_norm": 0.6615581098339719, + "acc_norm_stderr": 0.016920869586210682 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.6, + "acc_stderr": 0.03202563076101738, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03202563076101738 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.027368078243971642, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.027368078243971642 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6278026905829597, + "acc_stderr": 0.0324430528300873, + "acc_norm": 0.6278026905829597, + "acc_norm_stderr": 0.0324430528300873 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.03154449888270285, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.03154449888270285 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6275862068965518, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.6275862068965518, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6554621848739496, + "acc_stderr": 0.030868682604121615, + "acc_norm": 0.6554621848739496, + "acc_norm_stderr": 0.030868682604121615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.024121125416941197, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.024121125416941197 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.7037037037037037, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.7037037037037037, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.5270935960591133, + "acc_stderr": 0.03512819077876106, + "acc_norm": 0.5270935960591133, + "acc_norm_stderr": 0.03512819077876106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6387096774193548, + "acc_stderr": 0.02732754844795754, + "acc_norm": 0.6387096774193548, + "acc_norm_stderr": 0.02732754844795754 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8247863247863247, + "acc_stderr": 0.024904439098918225, + "acc_norm": 0.8247863247863247, + "acc_norm_stderr": 0.024904439098918225 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6113207547169811, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.6113207547169811, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.5, + "acc_stderr": 0.030485538042484616, + "acc_norm": 0.5, + "acc_norm_stderr": 0.030485538042484616 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935556, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935556 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.03789401760283646, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.03789401760283646 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5793650793650794, + "acc_stderr": 0.025424835086923992, + "acc_norm": 0.5793650793650794, + "acc_norm_stderr": 0.025424835086923992 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5347222222222222, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.5347222222222222, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.73, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.630057803468208, + "acc_stderr": 0.025992472029306386, + "acc_norm": 0.630057803468208, + "acc_norm_stderr": 0.025992472029306386 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.588957055214724, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.588957055214724, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.02700252103451648, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.02700252103451648 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6528497409326425, + "acc_stderr": 0.03435696168361356, + "acc_norm": 0.6528497409326425, + "acc_norm_stderr": 0.03435696168361356 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.047028804320496165, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.047028804320496165 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6862385321100918, + "acc_stderr": 0.019894723341469134, + "acc_norm": 0.6862385321100918, + "acc_norm_stderr": 0.019894723341469134 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.5317460317460317, + "acc_stderr": 0.04463112720677174, + "acc_norm": 0.5317460317460317, + "acc_norm_stderr": 0.04463112720677174 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6078431372549019, + "acc_stderr": 0.027956046165424523, + "acc_norm": 0.6078431372549019, + "acc_norm_stderr": 0.027956046165424523 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7933884297520661, + "acc_stderr": 0.03695980128098825, + "acc_norm": 0.7933884297520661, + "acc_norm_stderr": 0.03695980128098825 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.7039473684210527, + "acc_stderr": 0.03715062154998905, + "acc_norm": 0.7039473684210527, + "acc_norm_stderr": 0.03715062154998905 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.020206653187884786, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.020206653187884786 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.46099290780141844, + "acc_stderr": 0.02973659252642444, + "acc_norm": 0.46099290780141844, + "acc_norm_stderr": 0.02973659252642444 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053756, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053756 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5694444444444444, + "acc_stderr": 0.03376922151252336, + "acc_norm": 0.5694444444444444, + "acc_norm_stderr": 0.03376922151252336 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.4569832402234637, + "acc_stderr": 0.016660498580509175, + "acc_norm": 0.4569832402234637, + "acc_norm_stderr": 0.016660498580509175 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6448979591836734, + "acc_stderr": 0.030635655150387634, + "acc_norm": 0.6448979591836734, + "acc_norm_stderr": 0.030635655150387634 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7383966244725738, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.7383966244725738, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39895697522816165, + "acc_stderr": 0.012506757655293679, + "acc_norm": 0.39895697522816165, + "acc_norm_stderr": 0.012506757655293679 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7107843137254902, + "acc_stderr": 0.03182231867647554, + "acc_norm": 0.7107843137254902, + "acc_norm_stderr": 0.03182231867647554 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.703030303030303, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.703030303030303, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3708690330477356, + "mc1_stderr": 0.016909693580248797, + "mc2": 0.5500326690191786, + "mc2_stderr": 0.015937428888761346 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5785123966942148, + "acc_stderr": 0.016977101932601518, + "acc_norm": 0.5974025974025974, + "acc_norm_stderr": 0.016861020486407776 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "natong19/Qwen2-7B-Instruct-abliterated", + "model_sha": "127962453ae87879719a82a97384ac1859787a25", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nayohan/ko-ref-llama2-7b-Inst/result_2023-10-28 05:54:53.json b/nayohan/ko-ref-llama2-7b-Inst/result_2023-10-28 05:54:53.json new file mode 100644 index 0000000000000000000000000000000000000000..d681cee3ec94097bf2618710dd685a44f8d53455 --- /dev/null +++ b/nayohan/ko-ref-llama2-7b-Inst/result_2023-10-28 05:54:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3174061433447099, + "acc_stderr": 0.01360223908803817, + "acc_norm": 0.38310580204778155, + "acc_norm_stderr": 0.014206472661672874 + }, + "harness|ko_hellaswag|10": { + "acc": 0.373132842063334, + "acc_stderr": 0.004826485582191013, + "acc_norm": 0.48287193786098387, + "acc_norm_stderr": 0.004986852842576722 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.036155076303109344, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.036155076303109344 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690879, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690879 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.30268199233716475, + "acc_stderr": 0.016428781581749367, + "acc_norm": 0.30268199233716475, + "acc_norm_stderr": 0.016428781581749367 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800254, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800254 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3054662379421222, + "acc_stderr": 0.02616058445014049, + "acc_norm": 0.3054662379421222, + "acc_norm_stderr": 0.02616058445014049 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.03318833286217283, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.03318833286217283 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.040393149787245605, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.040393149787245605 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.031353050095330855, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.031353050095330855 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.02851025151234193, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.02851025151234193 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.022139081103971524, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.022139081103971524 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.03108982600293752, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.03108982600293752 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.024892469172462833, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.024892469172462833 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.026480357179895705, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.026480357179895705 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.044612721759105085, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.044612721759105085 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.02646611753895992, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.02646611753895992 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360385, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360385 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.30845771144278605, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.30845771144278605, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.02218203720294837, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.02218203720294837 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28034682080924855, + "acc_stderr": 0.024182427496577622, + "acc_norm": 0.28034682080924855, + "acc_norm_stderr": 0.024182427496577622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30246913580246915, + "acc_stderr": 0.02555765398186805, + "acc_norm": 0.30246913580246915, + "acc_norm_stderr": 0.02555765398186805 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909902, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909902 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25137614678899084, + "acc_stderr": 0.018599206360287415, + "acc_norm": 0.25137614678899084, + "acc_norm_stderr": 0.018599206360287415 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020534, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020534 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.024739981355113596, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.024739981355113596 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.034260594244031654, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.034260594244031654 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26633986928104575, + "acc_stderr": 0.017883188134667192, + "acc_norm": 0.26633986928104575, + "acc_norm_stderr": 0.017883188134667192 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.04327040932578729, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.04327040932578729 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103982, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103982 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.02456220431414232, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.02456220431414232 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3306122448979592, + "acc_stderr": 0.030116426296540582, + "acc_norm": 0.3306122448979592, + "acc_norm_stderr": 0.030116426296540582 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25684485006518903, + "acc_stderr": 0.011158455853098858, + "acc_norm": 0.25684485006518903, + "acc_norm_stderr": 0.011158455853098858 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.029102254389674082, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.029102254389674082 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23623011015911874, + "mc1_stderr": 0.014869755015871082, + "mc2": 0.3939753520317938, + "mc2_stderr": 0.01467030453530785 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3754427390791027, + "acc_stderr": 0.016648411589511095, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.017189767032130824 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nayohan/ko-ref-llama2-7b-Inst", + "model_sha": "1be158f488fbac5269d11273e9660aaed798540f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nayohan/llama-2-ko-7b-Inst/result_2023-10-25 05:26:11.json b/nayohan/llama-2-ko-7b-Inst/result_2023-10-25 05:26:11.json new file mode 100644 index 0000000000000000000000000000000000000000..9c5c7fd94bbc8a8c3fbfa364e038e7e14c8b82b0 --- /dev/null +++ b/nayohan/llama-2-ko-7b-Inst/result_2023-10-25 05:26:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32337883959044367, + "acc_stderr": 0.013669421630012122, + "acc_norm": 0.37372013651877134, + "acc_norm_stderr": 0.014137708601759091 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38548097988448515, + "acc_stderr": 0.004857140410776749, + "acc_norm": 0.4992033459470225, + "acc_norm_stderr": 0.004989775077835649 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.04354631077260595, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.04354631077260595 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.388250319284802, + "acc_stderr": 0.017427673295544333, + "acc_norm": 0.388250319284802, + "acc_norm_stderr": 0.017427673295544333 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.029644006577009618, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.029644006577009618 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36977491961414793, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.36977491961414793, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.042258754519696386, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.042258754519696386 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.03208779558786752, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.03208779558786752 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.039215453124671215, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.039215453124671215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277726, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277726 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2282051282051282, + "acc_stderr": 0.021278393863586282, + "acc_norm": 0.2282051282051282, + "acc_norm_stderr": 0.021278393863586282 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.031089826002937523, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.031089826002937523 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042767, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042767 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.39316239316239315, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.39316239316239315, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432118, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432118 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302505, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302505 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.02564410863926763, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.02564410863926763 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389024, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3283582089552239, + "acc_stderr": 0.03320685889744324, + "acc_norm": 0.3283582089552239, + "acc_norm_stderr": 0.03320685889744324 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918417, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106133, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106133 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2861271676300578, + "acc_stderr": 0.02433214677913413, + "acc_norm": 0.2861271676300578, + "acc_norm_stderr": 0.02433214677913413 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.036429145782924055, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.036429145782924055 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.30864197530864196, + "acc_stderr": 0.025702640260603746, + "acc_norm": 0.30864197530864196, + "acc_norm_stderr": 0.025702640260603746 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565317, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565317 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29908256880733947, + "acc_stderr": 0.019630417285415175, + "acc_norm": 0.29908256880733947, + "acc_norm_stderr": 0.019630417285415175 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.026493033225145898, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.026493033225145898 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4462809917355372, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.4462809917355372, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.27631578947368424, + "acc_stderr": 0.03639057569952925, + "acc_norm": 0.27631578947368424, + "acc_norm_stderr": 0.03639057569952925 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.018521756215423024, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.018521756215423024 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460997, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460997 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.044939490686135404, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.044939490686135404 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.028963702570791033, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.028963702570791033 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653696, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653696 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3014705882352941, + "acc_stderr": 0.027875982114273168, + "acc_norm": 0.3014705882352941, + "acc_norm_stderr": 0.027875982114273168 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.02783302387139968, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.02783302387139968 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2757496740547588, + "acc_stderr": 0.011413813609160989, + "acc_norm": 0.2757496740547588, + "acc_norm_stderr": 0.011413813609160989 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3151515151515151, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.3151515151515151, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22276621787025705, + "mc1_stderr": 0.01456650696139675, + "mc2": 0.36506276866988424, + "mc2_stderr": 0.014809047702061968 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30342384887839435, + "acc_stderr": 0.015806072717909573, + "acc_norm": 0.38488783943329397, + "acc_norm_stderr": 0.01672857970149866 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nayohan/llama-2-ko-7b-Inst", + "model_sha": "6d4b2a4bc363d79aa03edc287f8921dc1056262f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nayohan/polyglot-ko-1.3b-Inst/result_2023-10-21 12:29:48.json b/nayohan/polyglot-ko-1.3b-Inst/result_2023-10-21 12:29:48.json new file mode 100644 index 0000000000000000000000000000000000000000..99074466660798836e6a25ca770d6dfc6d85afd9 --- /dev/null +++ b/nayohan/polyglot-ko-1.3b-Inst/result_2023-10-21 12:29:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2354948805460751, + "acc_stderr": 0.012399451855004746, + "acc_norm": 0.28924914675767915, + "acc_norm_stderr": 0.013250012579393443 + }, + "harness|ko_hellaswag|10": { + "acc": 0.335291774546903, + "acc_stderr": 0.004711275408138412, + "acc_norm": 0.4166500697072296, + "acc_norm_stderr": 0.004919962822208309 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21637426900584794, + "acc_stderr": 0.03158149539338731, + "acc_norm": 0.21637426900584794, + "acc_norm_stderr": 0.03158149539338731 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.39805825242718446, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.39805825242718446, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26181353767560667, + "acc_stderr": 0.015720838678445266, + "acc_norm": 0.26181353767560667, + "acc_norm_stderr": 0.015720838678445266 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.19574468085106383, + "acc_stderr": 0.025937853139977155, + "acc_norm": 0.19574468085106383, + "acc_norm_stderr": 0.025937853139977155 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.18674698795180722, + "acc_stderr": 0.030338749144500594, + "acc_norm": 0.18674698795180722, + "acc_norm_stderr": 0.030338749144500594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.24663677130044842, + "acc_stderr": 0.028930413120910877, + "acc_norm": 0.24663677130044842, + "acc_norm_stderr": 0.028930413120910877 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.19083969465648856, + "acc_stderr": 0.03446513350752599, + "acc_norm": 0.19083969465648856, + "acc_norm_stderr": 0.03446513350752599 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124498, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124498 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.031041941304059288, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.031041941304059288 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35384615384615387, + "acc_stderr": 0.02424378399406217, + "acc_norm": 0.35384615384615387, + "acc_norm_stderr": 0.02424378399406217 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368466, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368466 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132977, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132977 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.026226485652553873, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.026226485652553873 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.025819233256483727, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.025819233256483727 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.04172343038705383, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.04172343038705383 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22388059701492538, + "acc_stderr": 0.029475250236017193, + "acc_norm": 0.22388059701492538, + "acc_norm_stderr": 0.029475250236017193 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.0355068398916558, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.0355068398916558 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.22832369942196531, + "acc_stderr": 0.022598703804321624, + "acc_norm": 0.22832369942196531, + "acc_norm_stderr": 0.022598703804321624 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.023788583551658526, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.023788583551658526 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23853211009174313, + "acc_stderr": 0.018272575810231857, + "acc_norm": 0.23853211009174313, + "acc_norm_stderr": 0.018272575810231857 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924316, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23202614379084968, + "acc_stderr": 0.02417084087934101, + "acc_norm": 0.23202614379084968, + "acc_norm_stderr": 0.02417084087934101 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.04026187527591205, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.04026187527591205 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03583496176361062, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03583496176361062 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21405228758169934, + "acc_stderr": 0.01659342966232903, + "acc_norm": 0.21405228758169934, + "acc_norm_stderr": 0.01659342966232903 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.027187127011503803, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.027187127011503803 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2320675105485232, + "acc_stderr": 0.02747974455080852, + "acc_norm": 0.2320675105485232, + "acc_norm_stderr": 0.02747974455080852 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.22685788787483702, + "acc_stderr": 0.010696348133569929, + "acc_norm": 0.22685788787483702, + "acc_norm_stderr": 0.010696348133569929 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923393, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923393 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23623011015911874, + "mc1_stderr": 0.014869755015871082, + "mc2": 0.4038819958960065, + "mc2_stderr": 0.014994809766039018 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.34238488783943327, + "acc_stderr": 0.016313907844146373, + "acc_norm": 0.4309327036599764, + "acc_norm_stderr": 0.017025558196043136 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nayohan/polyglot-ko-1.3b-Inst", + "model_sha": "00eff028320c1d4483e112b7706119c4b7972948", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nayohan/polyglot-ko-12.8b-Inst/result_2023-11-08 17:26:10.json b/nayohan/polyglot-ko-12.8b-Inst/result_2023-11-08 17:26:10.json new file mode 100644 index 0000000000000000000000000000000000000000..c172791b80932ed7ace2c1aa0d25f6dd7a5ef31a --- /dev/null +++ b/nayohan/polyglot-ko-12.8b-Inst/result_2023-11-08 17:26:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.013203196088537365, + "acc_norm": 0.3412969283276451, + "acc_norm_stderr": 0.013855831287497724 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38787094204341765, + "acc_stderr": 0.004862690594815711, + "acc_norm": 0.5082652857996415, + "acc_norm_stderr": 0.0049890996115368146 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1553398058252427, + "acc_stderr": 0.03586594738573974, + "acc_norm": 0.1553398058252427, + "acc_norm_stderr": 0.03586594738573974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2681992337164751, + "acc_stderr": 0.015842430835269428, + "acc_norm": 0.2681992337164751, + "acc_norm_stderr": 0.015842430835269428 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.02675439134803978, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.02675439134803978 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.03384429155233136, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.03384429155233136 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.02575586592263294, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.02575586592263294 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2242152466367713, + "acc_stderr": 0.027991534258519524, + "acc_norm": 0.2242152466367713, + "acc_norm_stderr": 0.027991534258519524 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2878787878787879, + "acc_stderr": 0.03225883512300992, + "acc_norm": 0.2878787878787879, + "acc_norm_stderr": 0.03225883512300992 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993178, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993178 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.022139081103971545, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.022139081103971545 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.17733990147783252, + "acc_stderr": 0.026874337276808342, + "acc_norm": 0.17733990147783252, + "acc_norm_stderr": 0.026874337276808342 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.22903225806451613, + "acc_stderr": 0.023904914311782655, + "acc_norm": 0.22903225806451613, + "acc_norm_stderr": 0.023904914311782655 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2863247863247863, + "acc_stderr": 0.02961432369045665, + "acc_norm": 0.2863247863247863, + "acc_norm_stderr": 0.02961432369045665 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708097, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708097 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782855, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782855 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22388059701492538, + "acc_stderr": 0.02947525023601718, + "acc_norm": 0.22388059701492538, + "acc_norm_stderr": 0.02947525023601718 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.022019080012217893, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.022019080012217893 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2398843930635838, + "acc_stderr": 0.022989592543123567, + "acc_norm": 0.2398843930635838, + "acc_norm_stderr": 0.022989592543123567 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.024922001168886338, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.024922001168886338 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.02869787397186068, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.02869787397186068 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.042663394431593935, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.042663394431593935 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24220183486238533, + "acc_stderr": 0.018368176306598618, + "acc_norm": 0.24220183486238533, + "acc_norm_stderr": 0.018368176306598618 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03333333333333338, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03333333333333338 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.026415601914388995, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.026415601914388995 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2892561983471074, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2236842105263158, + "acc_stderr": 0.033911609343436025, + "acc_norm": 0.2236842105263158, + "acc_norm_stderr": 0.033911609343436025 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24673202614379086, + "acc_stderr": 0.0174408203674025, + "acc_norm": 0.24673202614379086, + "acc_norm_stderr": 0.0174408203674025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.02484792135806396, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.02484792135806396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321616, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321616 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174917, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174917 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2320675105485232, + "acc_stderr": 0.02747974455080852, + "acc_norm": 0.2320675105485232, + "acc_norm_stderr": 0.02747974455080852 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2627118644067797, + "acc_stderr": 0.01124054551499567, + "acc_norm": 0.2627118644067797, + "acc_norm_stderr": 0.01124054551499567 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.03198001660115071, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.03198001660115071 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.20606060606060606, + "acc_stderr": 0.03158415324047709, + "acc_norm": 0.20606060606060606, + "acc_norm_stderr": 0.03158415324047709 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.014896277441041869, + "mc2": 0.4089067508722681, + "mc2_stderr": 0.014954626572503958 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3187721369539551, + "acc_stderr": 0.01602142705530958, + "acc_norm": 0.42621015348288077, + "acc_norm_stderr": 0.01700212260948926 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nayohan/polyglot-ko-12.8b-Inst", + "model_sha": "bf3d8224ee9b52ea97f48f624c446f8c5b8e7e39", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nayohan/polyglot-ko-5.8b-Inst-All/result_2023-10-25 17:49:08.json b/nayohan/polyglot-ko-5.8b-Inst-All/result_2023-10-25 17:49:08.json new file mode 100644 index 0000000000000000000000000000000000000000..ad0373b599c4eea8e85b6e0b4d886f108e57fa60 --- /dev/null +++ b/nayohan/polyglot-ko-5.8b-Inst-All/result_2023-10-25 17:49:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2773037542662116, + "acc_stderr": 0.013082095839059374, + "acc_norm": 0.3191126279863481, + "acc_norm_stderr": 0.0136216961191733 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3692491535550687, + "acc_stderr": 0.004816152074023089, + "acc_norm": 0.47321250746863175, + "acc_norm_stderr": 0.00498261523305711 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23391812865497075, + "acc_stderr": 0.03246721765117827, + "acc_norm": 0.23391812865497075, + "acc_norm_stderr": 0.03246721765117827 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.26309067688378035, + "acc_stderr": 0.015745497169049057, + "acc_norm": 0.26309067688378035, + "acc_norm_stderr": 0.015745497169049057 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03820169914517905, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03820169914517905 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.03036358219723816, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.03036358219723816 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.036293353299478595, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.036293353299478595 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.27009646302250806, + "acc_stderr": 0.025218040373410616, + "acc_norm": 0.27009646302250806, + "acc_norm_stderr": 0.025218040373410616 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22137404580152673, + "acc_stderr": 0.0364129708131373, + "acc_norm": 0.22137404580152673, + "acc_norm_stderr": 0.0364129708131373 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.21379310344827587, + "acc_stderr": 0.034165204477475494, + "acc_norm": 0.21379310344827587, + "acc_norm_stderr": 0.034165204477475494 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062947, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062947 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3487179487179487, + "acc_stderr": 0.02416278028401772, + "acc_norm": 0.3487179487179487, + "acc_norm_stderr": 0.02416278028401772 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3225806451612903, + "acc_stderr": 0.026593084516572284, + "acc_norm": 0.3225806451612903, + "acc_norm_stderr": 0.026593084516572284 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708087, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708087 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.30057803468208094, + "acc_stderr": 0.03496101481191181, + "acc_norm": 0.30057803468208094, + "acc_norm_stderr": 0.03496101481191181 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2328042328042328, + "acc_stderr": 0.02176596167215453, + "acc_norm": 0.2328042328042328, + "acc_norm_stderr": 0.02176596167215453 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.21098265895953758, + "acc_stderr": 0.021966309947043117, + "acc_norm": 0.21098265895953758, + "acc_norm_stderr": 0.021966309947043117 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.03291099578615769, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.03291099578615769 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.024477222856135107, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.024477222856135107 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3316062176165803, + "acc_stderr": 0.03397636541089116, + "acc_norm": 0.3316062176165803, + "acc_norm_stderr": 0.03397636541089116 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3137614678899083, + "acc_stderr": 0.019894723341469127, + "acc_norm": 0.3137614678899083, + "acc_norm_stderr": 0.019894723341469127 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924316, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.025553169991826514, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.025553169991826514 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3305785123966942, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.016819028375736386, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.016819028375736386 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404565, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404565 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.23628691983122363, + "acc_stderr": 0.02765215314415926, + "acc_norm": 0.23628691983122363, + "acc_norm_stderr": 0.02765215314415926 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2607561929595828, + "acc_stderr": 0.011213471559602336, + "acc_norm": 0.2607561929595828, + "acc_norm_stderr": 0.011213471559602336 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.0331750593000918, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.0331750593000918 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.014948812679062137, + "mc2": 0.40001430050776826, + "mc2_stderr": 0.014747441557861264 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3447461629279811, + "acc_stderr": 0.016340649905418683, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.0171191722080615 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nayohan/polyglot-ko-5.8b-Inst-All", + "model_sha": "08a90add0a4c6508a97387d75823333449728533", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nayohan/polyglot-ko-5.8b-Inst/result_2023-10-12 03:26:44.json b/nayohan/polyglot-ko-5.8b-Inst/result_2023-10-12 03:26:44.json new file mode 100644 index 0000000000000000000000000000000000000000..40fed0273c4b499aff2493a1a4598ae04415a91b --- /dev/null +++ b/nayohan/polyglot-ko-5.8b-Inst/result_2023-10-12 03:26:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27474402730375425, + "acc_stderr": 0.013044617212771227, + "acc_norm": 0.3191126279863481, + "acc_norm_stderr": 0.013621696119173307 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37950607448715395, + "acc_stderr": 0.004842723234022034, + "acc_norm": 0.4827723561043617, + "acc_norm_stderr": 0.004986818680313436 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.15789473684210525, + "acc_stderr": 0.027966785859160893, + "acc_norm": 0.15789473684210525, + "acc_norm_stderr": 0.027966785859160893 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.04541609446503948, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.04541609446503948 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21966794380587484, + "acc_stderr": 0.014805384478371162, + "acc_norm": 0.21966794380587484, + "acc_norm_stderr": 0.014805384478371162 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.14042553191489363, + "acc_stderr": 0.022712077616627864, + "acc_norm": 0.14042553191489363, + "acc_norm_stderr": 0.022712077616627864 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.24096385542168675, + "acc_stderr": 0.03329394119073528, + "acc_norm": 0.24096385542168675, + "acc_norm_stderr": 0.03329394119073528 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26688102893890675, + "acc_stderr": 0.025122637608816646, + "acc_norm": 0.26688102893890675, + "acc_norm_stderr": 0.025122637608816646 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.14349775784753363, + "acc_stderr": 0.02352937126961819, + "acc_norm": 0.14349775784753363, + "acc_norm_stderr": 0.02352937126961819 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3434343434343434, + "acc_stderr": 0.03383201223244442, + "acc_norm": 0.3434343434343434, + "acc_norm_stderr": 0.03383201223244442 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006718, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006718 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35128205128205126, + "acc_stderr": 0.024203665177902796, + "acc_norm": 0.35128205128205126, + "acc_norm_stderr": 0.024203665177902796 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.030108330718011625, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.030108330718011625 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.0259885007924119, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.0259885007924119 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27547169811320754, + "acc_stderr": 0.027495663683724067, + "acc_norm": 0.27547169811320754, + "acc_norm_stderr": 0.027495663683724067 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721377, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721377 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.030769444967296018, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.030769444967296018 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.035839017547364134, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.035839017547364134 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.02185150982203172, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.02185150982203172 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566016, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.22254335260115607, + "acc_stderr": 0.02239421566194282, + "acc_norm": 0.22254335260115607, + "acc_norm_stderr": 0.02239421566194282 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2392638036809816, + "acc_stderr": 0.0335195387952127, + "acc_norm": 0.2392638036809816, + "acc_norm_stderr": 0.0335195387952127 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2345679012345679, + "acc_stderr": 0.023576881744005723, + "acc_norm": 0.2345679012345679, + "acc_norm_stderr": 0.023576881744005723 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3229357798165138, + "acc_stderr": 0.020048115923415318, + "acc_norm": 0.3229357798165138, + "acc_norm_stderr": 0.020048115923415318 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.025738854797818733, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.025738854797818733 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.03896878985070417, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.03896878985070417 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403165, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403165 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.016819028375736386, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.016819028375736386 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.02484792135806396, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.02484792135806396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20675105485232068, + "acc_stderr": 0.026361651668389094, + "acc_norm": 0.20675105485232068, + "acc_norm_stderr": 0.026361651668389094 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23663624511082137, + "acc_stderr": 0.010855137351572746, + "acc_norm": 0.23663624511082137, + "acc_norm_stderr": 0.010855137351572746 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752329, + "mc2": 0.40162480294038216, + "mc2_stderr": 0.015030387645461886 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33412042502951594, + "acc_stderr": 0.016216763304239688, + "acc_norm": 0.4179456906729634, + "acc_norm_stderr": 0.016957292005279713 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nayohan/polyglot-ko-5.8b-Inst", + "model_sha": "f2d30b16043455a6303d11f28cfd012c46edc4cf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nlpai-lab/KULLM3/result_2024-04-08 05:16:47.json b/nlpai-lab/KULLM3/result_2024-04-08 05:16:47.json new file mode 100644 index 0000000000000000000000000000000000000000..9900e7cad85071f72ff09e663972cd73d5593ed5 --- /dev/null +++ b/nlpai-lab/KULLM3/result_2024-04-08 05:16:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42918088737201365, + "acc_stderr": 0.014464085894870651, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.014575583922019672 + }, + "harness|ko_hellaswag|10": { + "acc": 0.445628360884286, + "acc_stderr": 0.004960191341430244, + "acc_norm": 0.589523999203346, + "acc_norm_stderr": 0.004909148239488273 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6432748538011696, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.6432748538011696, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6155810983397191, + "acc_stderr": 0.01739568874281962, + "acc_norm": 0.6155810983397191, + "acc_norm_stderr": 0.01739568874281962 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.04319223625811331, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.04319223625811331 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340354, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340354 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5755627009646302, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.5755627009646302, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5650224215246636, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.5650224215246636, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5756302521008403, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.5756302521008403, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.541025641025641, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.541025641025641, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5774193548387097, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.5774193548387097, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.027236013946196673, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.027236013946196673 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075657, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075657 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844082, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844082 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.032200241045342054, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.032200241045342054 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273958, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273958 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137602, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5751445086705202, + "acc_stderr": 0.026613350840261733, + "acc_norm": 0.5751445086705202, + "acc_norm_stderr": 0.026613350840261733 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6217616580310881, + "acc_stderr": 0.034998072761933376, + "acc_norm": 0.6217616580310881, + "acc_norm_stderr": 0.034998072761933376 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6385321100917432, + "acc_stderr": 0.02059808200993736, + "acc_norm": 0.6385321100917432, + "acc_norm_stderr": 0.02059808200993736 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.028590752958852387, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.028590752958852387 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.03941897526516304, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.03941897526516304 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.040179012759817494, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.040179012759817494 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.020200164564804588, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.020200164564804588 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265013, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265013 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21675977653631284, + "acc_stderr": 0.013780598486443363, + "acc_norm": 0.21675977653631284, + "acc_norm_stderr": 0.013780598486443363 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0301619119307671, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0301619119307671 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.03093285879278986, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.03093285879278986 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.029818024749753095, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.029818024749753095 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.378748370273794, + "acc_stderr": 0.012389052105003741, + "acc_norm": 0.378748370273794, + "acc_norm_stderr": 0.012389052105003741 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6225490196078431, + "acc_stderr": 0.03402272044340703, + "acc_norm": 0.6225490196078431, + "acc_norm_stderr": 0.03402272044340703 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03681050869161549, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03681050869161549 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33659730722154224, + "mc1_stderr": 0.016542412809494877, + "mc2": 0.49995145184296846, + "mc2_stderr": 0.015887726098900913 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.564344746162928, + "acc_stderr": 0.017047415229476316, + "acc_norm": 0.6068476977567887, + "acc_norm_stderr": 0.016793262801287068 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nlpai-lab/KULLM3", + "model_sha": "5a6bcd0fc7f240460eb6d57016f7b4060bc1f43b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nlpai-lab/kullm-polyglot-12.8b-v2/result_2023-09-27 05:32:23.json b/nlpai-lab/kullm-polyglot-12.8b-v2/result_2023-09-27 05:32:23.json new file mode 100644 index 0000000000000000000000000000000000000000..43d51b1b80daf8ec56d92a4e477399bcd4809735 --- /dev/null +++ b/nlpai-lab/kullm-polyglot-12.8b-v2/result_2023-09-27 05:32:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2790102389078498, + "acc_stderr": 0.013106784883601346, + "acc_norm": 0.32764505119453924, + "acc_norm_stderr": 0.013715847940719344 + }, + "harness|ko_hellaswag|10": { + "acc": 0.386476797450707, + "acc_stderr": 0.004859467984155259, + "acc_norm": 0.4987054371639116, + "acc_norm_stderr": 0.00498976468673883 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24776500638569604, + "acc_stderr": 0.015438083080568961, + "acc_norm": 0.24776500638569604, + "acc_norm_stderr": 0.015438083080568961 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313141, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313141 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.02964400657700962, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.02964400657700962 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.03384429155233136, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.03384429155233136 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2958199356913183, + "acc_stderr": 0.025922371788818784, + "acc_norm": 0.2958199356913183, + "acc_norm_stderr": 0.025922371788818784 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21076233183856502, + "acc_stderr": 0.027373095500540193, + "acc_norm": 0.21076233183856502, + "acc_norm_stderr": 0.027373095500540193 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727772, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727772 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277726, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277726 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02242127361292371, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02242127361292371 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.042365112580946336, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.042365112580946336 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.02967833314144444, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.02967833314144444 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1967741935483871, + "acc_stderr": 0.02261640942074203, + "acc_norm": 0.1967741935483871, + "acc_norm_stderr": 0.02261640942074203 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.02948036054954119, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.02948036054954119 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.25660377358490566, + "acc_stderr": 0.026880647889051982, + "acc_norm": 0.25660377358490566, + "acc_norm_stderr": 0.026880647889051982 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721375, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721375 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766104, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766104 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21890547263681592, + "acc_stderr": 0.029239174636647, + "acc_norm": 0.21890547263681592, + "acc_norm_stderr": 0.029239174636647 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483098, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483098 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.02264421261552521, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.02264421261552521 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.023176298203992005, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.023176298203992005 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.32515337423312884, + "acc_stderr": 0.036803503712864616, + "acc_norm": 0.32515337423312884, + "acc_norm_stderr": 0.036803503712864616 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25617283950617287, + "acc_stderr": 0.024288533637726095, + "acc_norm": 0.25617283950617287, + "acc_norm_stderr": 0.024288533637726095 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.29533678756476683, + "acc_stderr": 0.03292296639155139, + "acc_norm": 0.29533678756476683, + "acc_norm_stderr": 0.03292296639155139 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.25137614678899084, + "acc_stderr": 0.018599206360287415, + "acc_norm": 0.25137614678899084, + "acc_norm_stderr": 0.018599206360287415 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242515, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242515 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.024288619466046102, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.024288619466046102 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.18181818181818182, + "acc_stderr": 0.035208939510976534, + "acc_norm": 0.18181818181818182, + "acc_norm_stderr": 0.035208939510976534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.017401816711427657, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.017401816711427657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23049645390070922, + "acc_stderr": 0.025123739226872405, + "acc_norm": 0.23049645390070922, + "acc_norm_stderr": 0.025123739226872405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.014219570788103987, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.014219570788103987 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1875, + "acc_stderr": 0.023709788253811766, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.023709788253811766 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3142857142857143, + "acc_stderr": 0.02971932942241748, + "acc_norm": 0.3142857142857143, + "acc_norm_stderr": 0.02971932942241748 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25316455696202533, + "acc_stderr": 0.028304657943035293, + "acc_norm": 0.25316455696202533, + "acc_norm_stderr": 0.028304657943035293 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2522816166883963, + "acc_stderr": 0.011092789056875232, + "acc_norm": 0.2522816166883963, + "acc_norm_stderr": 0.011092789056875232 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.03077855467869326, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.03077855467869326 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511783, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511783 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.01497482727975233, + "mc2": 0.39040412705496613, + "mc2_stderr": 0.01471780652709213 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3246753246753247, + "acc_stderr": 0.016098883939346453, + "acc_norm": 0.3955135773317591, + "acc_norm_stderr": 0.016810815902206035 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nlpai-lab/kullm-polyglot-12.8b-v2", + "model_sha": "9e0c9be881f663ca088b10faad15b54ea3ba779c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/nlpai-lab/kullm-polyglot-5.8b-v2/result_2023-10-10 08:19:05.json b/nlpai-lab/kullm-polyglot-5.8b-v2/result_2023-10-10 08:19:05.json new file mode 100644 index 0000000000000000000000000000000000000000..6976ee510001edac5afc6f7d609dc81edc2cf61c --- /dev/null +++ b/nlpai-lab/kullm-polyglot-5.8b-v2/result_2023-10-10 08:19:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2841296928327645, + "acc_stderr": 0.013179442447653887, + "acc_norm": 0.3293515358361775, + "acc_norm_stderr": 0.013734057652635474 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3694483170683131, + "acc_stderr": 0.004816690123209753, + "acc_norm": 0.47301334395538736, + "acc_norm_stderr": 0.004982508198584259 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727654, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727654 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.36893203883495146, + "acc_stderr": 0.04777615181156739, + "acc_norm": 0.36893203883495146, + "acc_norm_stderr": 0.04777615181156739 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24648786717752236, + "acc_stderr": 0.015411308769686936, + "acc_norm": 0.24648786717752236, + "acc_norm_stderr": 0.015411308769686936 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.028020226271200214, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.028020226271200214 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.16265060240963855, + "acc_stderr": 0.02873023789261379, + "acc_norm": 0.16265060240963855, + "acc_norm_stderr": 0.02873023789261379 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.02531176597542612, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.02531176597542612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21524663677130046, + "acc_stderr": 0.027584066602208256, + "acc_norm": 0.21524663677130046, + "acc_norm_stderr": 0.027584066602208256 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3383838383838384, + "acc_stderr": 0.033711241426263014, + "acc_norm": 0.3383838383838384, + "acc_norm_stderr": 0.033711241426263014 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.037245636197746325, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.037245636197746325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.023661296393964273, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.023661296393964273 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536975, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.14, + "acc_stderr": 0.03487350880197773, + "acc_norm": 0.14, + "acc_norm_stderr": 0.03487350880197773 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358611, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358611 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24516129032258063, + "acc_stderr": 0.02447224384089553, + "acc_norm": 0.24516129032258063, + "acc_norm_stderr": 0.02447224384089553 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23504273504273504, + "acc_stderr": 0.027778835904935434, + "acc_norm": 0.23504273504273504, + "acc_norm_stderr": 0.027778835904935434 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.29056603773584905, + "acc_stderr": 0.02794321998933715, + "acc_norm": 0.29056603773584905, + "acc_norm_stderr": 0.02794321998933715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.0449429086625209, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.0449429086625209 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945277, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945277 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2885572139303483, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.2885572139303483, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736412, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21957671957671956, + "acc_stderr": 0.021320018599770355, + "acc_norm": 0.21957671957671956, + "acc_norm_stderr": 0.021320018599770355 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869334, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071145, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071145 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02378858355165852, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02378858355165852 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.03423465100104284, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.03423465100104284 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28807339449541286, + "acc_stderr": 0.019416445892636018, + "acc_norm": 0.28807339449541286, + "acc_norm_stderr": 0.019416445892636018 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.025553169991826528, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.025553169991826528 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2644628099173554, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.2644628099173554, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.0378272898086547, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.0378272898086547 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.016774672365468514, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.016774672365468514 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.025892151156709405, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.025892151156709405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602158, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602158 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036625, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036625 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312547, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312547 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3551020408163265, + "acc_stderr": 0.03063565515038764, + "acc_norm": 0.3551020408163265, + "acc_norm_stderr": 0.03063565515038764 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.23628691983122363, + "acc_stderr": 0.027652153144159256, + "acc_norm": 0.23628691983122363, + "acc_norm_stderr": 0.027652153144159256 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23728813559322035, + "acc_stderr": 0.010865436690780267, + "acc_norm": 0.23728813559322035, + "acc_norm_stderr": 0.010865436690780267 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923403, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923403 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139405, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139405 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.01539211880501501, + "mc2": 0.42389862375590953, + "mc2_stderr": 0.015026306992823544 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3152302243211334, + "acc_stderr": 0.01597353492379446, + "acc_norm": 0.3695395513577332, + "acc_norm_stderr": 0.016594883405685424 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "nlpai-lab/kullm-polyglot-5.8b-v2", + "model_sha": "5981236c4fd4e624eca2326312d40419e6441256", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/norispace/marcoroni-kopenorcav3/result_2023-12-26 01:51:58.json b/norispace/marcoroni-kopenorcav3/result_2023-12-26 01:51:58.json new file mode 100644 index 0000000000000000000000000000000000000000..0c21656a86d0cede100419e1cf901440d8fd3c44 --- /dev/null +++ b/norispace/marcoroni-kopenorcav3/result_2023-12-26 01:51:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3660409556313993, + "acc_stderr": 0.014077223108470137, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398324 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3906592312288389, + "acc_stderr": 0.004869010152280753, + "acc_norm": 0.49970125473013344, + "acc_norm_stderr": 0.004989780520782244 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.017852981266633955, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.017852981266633955 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.033141902221106564, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.033141902221106564 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237653, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237653 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5672268907563025, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.5672268907563025, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954953, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954953 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467506, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467506 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131133, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131133 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.02771666165019404, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.02771666165019404 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.03590910952235525, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.03590910952235525 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.021364122533881688, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.021364122533881688 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296559, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296559 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.019643801557924806, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.019643801557924806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2122905027932961, + "acc_stderr": 0.013676644685831726, + "acc_norm": 0.2122905027932961, + "acc_norm_stderr": 0.013676644685831726 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5387755102040817, + "acc_stderr": 0.03191282052669278, + "acc_norm": 0.5387755102040817, + "acc_norm_stderr": 0.03191282052669278 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.03184399873811225, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.03184399873811225 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3239895697522816, + "acc_stderr": 0.011952840809646568, + "acc_norm": 0.3239895697522816, + "acc_norm_stderr": 0.011952840809646568 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.035077938347913236, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.035077938347913236 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30354957160342716, + "mc1_stderr": 0.016095884155386844, + "mc2": 0.47655278688381186, + "mc2_stderr": 0.01571570439093294 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4025974025974026, + "acc_stderr": 0.01686102048640778, + "acc_norm": 0.45808736717827625, + "acc_norm_stderr": 0.017129852117911144 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "norispace/marcoroni-kopenorcav3", + "model_sha": "9beb5bf9e2cdc666413d90c7886c1eda1ab740dd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/norispace/marcoroni-openorca/result_2023-12-17 23:22:43.json b/norispace/marcoroni-openorca/result_2023-12-17 23:22:43.json new file mode 100644 index 0000000000000000000000000000000000000000..dc4b2151e5d2d7b6c6a507ef73ae88e6b562737a --- /dev/null +++ b/norispace/marcoroni-openorca/result_2023-12-17 23:22:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20477815699658702, + "acc_stderr": 0.011792544338513407, + "acc_norm": 0.24744027303754265, + "acc_norm_stderr": 0.012610352663292673 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2599083847839076, + "acc_stderr": 0.004376877619234108, + "acc_norm": 0.2613025293766182, + "acc_norm_stderr": 0.004384465219070753 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.015201522246299956, + "mc2": 0.5171681414876445, + "mc2_stderr": 0.016488373677157792 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.13105076741440377, + "acc_stderr": 0.011601971778212315, + "acc_norm": 0.40731995277449823, + "acc_norm_stderr": 0.01689245669519127 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "norispace/marcoroni-openorca", + "model_sha": "dae17311f94b03522dc99a87484652d0b919350b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oh-yeontaek/llama-2-13B-LoRA-assemble/result_2023-09-28 00:33:01.json b/oh-yeontaek/llama-2-13B-LoRA-assemble/result_2023-09-28 00:33:01.json new file mode 100644 index 0000000000000000000000000000000000000000..da3597044366fe27adba376c30139f45e22a767c --- /dev/null +++ b/oh-yeontaek/llama-2-13B-LoRA-assemble/result_2023-09-28 00:33:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35921501706484643, + "acc_stderr": 0.014020224155839162, + "acc_norm": 0.4052901023890785, + "acc_norm_stderr": 0.014346869060229325 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36496713802031466, + "acc_stderr": 0.004804370563856225, + "acc_norm": 0.4689304919338777, + "acc_norm_stderr": 0.004980138679161039 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49936143039591313, + "acc_stderr": 0.01787994891443166, + "acc_norm": 0.49936143039591313, + "acc_norm_stderr": 0.01787994891443166 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42765273311897106, + "acc_stderr": 0.028099240775809563, + "acc_norm": 0.42765273311897106, + "acc_norm_stderr": 0.028099240775809563 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.0379328118530781, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.0379328118530781 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33004926108374383, + "acc_stderr": 0.03308530426228258, + "acc_norm": 0.33004926108374383, + "acc_norm_stderr": 0.03308530426228258 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6324786324786325, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.6324786324786325, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425082, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425082 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.03889066619112723, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.03889066619112723 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.037752050135836386, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.037752050135836386 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47706422018348627, + "acc_stderr": 0.0214147570581755, + "acc_norm": 0.47706422018348627, + "acc_norm_stderr": 0.0214147570581755 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.02843109544417664, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.02843109544417664 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223977, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223977 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.02847350127296376, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.02847350127296376 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.03038805130167812, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.03038805130167812 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3229050279329609, + "acc_stderr": 0.01563844038024149, + "acc_norm": 0.3229050279329609, + "acc_norm_stderr": 0.01563844038024149 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682486, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937599, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937599 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32529335071707954, + "acc_stderr": 0.011965311536571531, + "acc_norm": 0.32529335071707954, + "acc_norm_stderr": 0.011965311536571531 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03902551007374449, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03902551007374449 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35128518971848227, + "mc1_stderr": 0.0167113581635444, + "mc2": 0.5184394133098864, + "mc2_stderr": 0.01600771387375644 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40613931523022434, + "acc_stderr": 0.016884749503191385, + "acc_norm": 0.41440377804014167, + "acc_norm_stderr": 0.01693658338394363 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oh-yeontaek/llama-2-13B-LoRA-assemble", + "model_sha": "85bb49d333dba4a08b051418663d16853ce30cee", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oneonlee/KoSOLAR-v0.2-gugutypus-10.7B/result_2024-01-30 13:49:20.json b/oneonlee/KoSOLAR-v0.2-gugutypus-10.7B/result_2024-01-30 13:49:20.json new file mode 100644 index 0000000000000000000000000000000000000000..17ae9e5abacf5441d89a5f8107fedab866ff3627 --- /dev/null +++ b/oneonlee/KoSOLAR-v0.2-gugutypus-10.7B/result_2024-01-30 13:49:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42662116040955633, + "acc_stderr": 0.014453185592920293, + "acc_norm": 0.4778156996587031, + "acc_norm_stderr": 0.014597001927076133 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43377813184624575, + "acc_stderr": 0.004945824056501825, + "acc_norm": 0.5828520215096594, + "acc_norm_stderr": 0.00492080031323274 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5274584929757343, + "acc_stderr": 0.017852981266633938, + "acc_norm": 0.5274584929757343, + "acc_norm_stderr": 0.017852981266633938 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653696, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653696 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.03260038511835771, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.03260038511835771 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079023, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6616161616161617, + "acc_stderr": 0.03371124142626303, + "acc_norm": 0.6616161616161617, + "acc_norm_stderr": 0.03371124142626303 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5798319327731093, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.5798319327731093, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.517948717948718, + "acc_stderr": 0.02533466708095489, + "acc_norm": 0.517948717948718, + "acc_norm_stderr": 0.02533466708095489 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909281, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909281 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4967741935483871, + "acc_stderr": 0.02844341422643833, + "acc_norm": 0.4967741935483871, + "acc_norm_stderr": 0.02844341422643833 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253252, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253252 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.02455229220934266, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.02455229220934266 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.02684298551961537, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.02684298551961537 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.039158572914369714, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.039158572914369714 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594384, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594384 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6269430051813472, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.6269430051813472, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336938, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336938 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5908256880733945, + "acc_stderr": 0.02108067026443373, + "acc_norm": 0.5908256880733945, + "acc_norm_stderr": 0.02108067026443373 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.47619047619047616, + "acc_stderr": 0.04467062628403273, + "acc_norm": 0.47619047619047616, + "acc_norm_stderr": 0.04467062628403273 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556054, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556054 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.01994491413687358, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.01994491413687358 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36879432624113473, + "acc_stderr": 0.028782227561347243, + "acc_norm": 0.36879432624113473, + "acc_norm_stderr": 0.028782227561347243 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24581005586592178, + "acc_stderr": 0.014400296429225598, + "acc_norm": 0.24581005586592178, + "acc_norm_stderr": 0.014400296429225598 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.030254372573976687, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.030254372573976687 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6040816326530613, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.6040816326530613, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6877637130801688, + "acc_stderr": 0.030165137867847008, + "acc_norm": 0.6877637130801688, + "acc_norm_stderr": 0.030165137867847008 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3917861799217731, + "acc_stderr": 0.012467564418145114, + "acc_norm": 0.3917861799217731, + "acc_norm_stderr": 0.012467564418145114 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.034658681963807614, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.034658681963807614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31211750305997554, + "mc1_stderr": 0.016220756769520905, + "mc2": 0.48305439632404645, + "mc2_stderr": 0.01546150208213013 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47461629279811096, + "acc_stderr": 0.017168187201429246, + "acc_norm": 0.5419126328217237, + "acc_norm_stderr": 0.017129852117911147 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oneonlee/KoSOLAR-v0.2-gugutypus-10.7B", + "model_sha": "56841d5eb18e91a4d622265a349a83b0f49ba08a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oneonlee/LDCC-SOLAR-gugutypus-10.7B/result_2024-02-07 12:19:50.json b/oneonlee/LDCC-SOLAR-gugutypus-10.7B/result_2024-02-07 12:19:50.json new file mode 100644 index 0000000000000000000000000000000000000000..023ca300938e1a7c88fc68ca13181f8f46adea05 --- /dev/null +++ b/oneonlee/LDCC-SOLAR-gugutypus-10.7B/result_2024-02-07 12:19:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3993174061433447, + "acc_stderr": 0.014312094557946707, + "acc_norm": 0.4590443686006826, + "acc_norm_stderr": 0.014562291073601227 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4186417048396734, + "acc_stderr": 0.004923281841828515, + "acc_norm": 0.5545708026289584, + "acc_norm_stderr": 0.004959973514772513 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5632183908045977, + "acc_stderr": 0.017736470837800698, + "acc_norm": 0.5632183908045977, + "acc_norm_stderr": 0.017736470837800698 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108101, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108101 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.03836722176598052, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.03836722176598052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6565656565656566, + "acc_stderr": 0.03383201223244443, + "acc_norm": 0.6565656565656566, + "acc_norm_stderr": 0.03383201223244443 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.047840607041056527, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.047840607041056527 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5798319327731093, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.5798319327731093, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.02535100632816969, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02535100632816969 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5225806451612903, + "acc_stderr": 0.02841498501970786, + "acc_norm": 0.5225806451612903, + "acc_norm_stderr": 0.02841498501970786 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524572, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524572 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.03804749744364763, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.03804749744364763 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115978, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.02678881193156276, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.02678881193156276 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6269430051813472, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.6269430051813472, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.045796394220704355, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.045796394220704355 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5871559633027523, + "acc_stderr": 0.021109128133413927, + "acc_norm": 0.5871559633027523, + "acc_norm_stderr": 0.021109128133413927 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.47619047619047616, + "acc_stderr": 0.04467062628403273, + "acc_norm": 0.47619047619047616, + "acc_norm_stderr": 0.04467062628403273 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.028568699752225868, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.028568699752225868 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.019977422600227467, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.019977422600227467 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.03395322726375797, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.03395322726375797 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19888268156424582, + "acc_stderr": 0.013349892983092512, + "acc_norm": 0.19888268156424582, + "acc_norm_stderr": 0.013349892983092512 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252609, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252609 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.46691176470588236, + "acc_stderr": 0.030306257722468317, + "acc_norm": 0.46691176470588236, + "acc_norm_stderr": 0.030306257722468317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.378748370273794, + "acc_stderr": 0.012389052105003738, + "acc_norm": 0.378748370273794, + "acc_norm_stderr": 0.012389052105003738 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.038783721137112745, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.038783721137112745 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32313341493268055, + "mc1_stderr": 0.016371836286454604, + "mc2": 0.48929864528589495, + "mc2_stderr": 0.01557587457858491 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4332939787485242, + "acc_stderr": 0.017036683641893098, + "acc_norm": 0.4899645808736718, + "acc_norm_stderr": 0.017186891286894053 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oneonlee/LDCC-SOLAR-gugutypus-10.7B", + "model_sha": "aa3fc92ac789814857b71f0c7ee557de45626e01", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-ENin-test-v1/result_2023-12-13 08:24:14.json b/oopsung/Yi-Ko-6B-ENin-test-v1/result_2023-12-13 08:24:14.json new file mode 100644 index 0000000000000000000000000000000000000000..5395b0339de309577b33fb57a690f40a1ec4cd21 --- /dev/null +++ b/oopsung/Yi-Ko-6B-ENin-test-v1/result_2023-12-13 08:24:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3395904436860068, + "acc_stderr": 0.013839039762820166, + "acc_norm": 0.4061433447098976, + "acc_norm_stderr": 0.014351656690097858 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3959370643298148, + "acc_stderr": 0.004880515431323158, + "acc_norm": 0.5326628161720772, + "acc_norm_stderr": 0.004979123236507971 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5363984674329502, + "acc_stderr": 0.017832524079593258, + "acc_norm": 0.5363984674329502, + "acc_norm_stderr": 0.017832524079593258 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.03394853965156402, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.03394853965156402 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736118, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736118 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653333, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431177, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431177 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911498, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911498 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.037894017602836484, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.037894017602836484 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.024026846392873502, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.024026846392873502 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.02780749004427619, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.02780749004427619 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873632, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873632 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6275229357798165, + "acc_stderr": 0.020728368457638494, + "acc_norm": 0.6275229357798165, + "acc_norm_stderr": 0.020728368457638494 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.028541722692618877, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.028541722692618877 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.019933627776857425, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.019933627776857425 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.02872386385328128, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.02872386385328128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.030254372573976694, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.030254372573976694 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4530612244897959, + "acc_stderr": 0.03186785930004129, + "acc_norm": 0.4530612244897959, + "acc_norm_stderr": 0.03186785930004129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452225, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452225 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087305, + "mc2": 0.425101073539653, + "mc2_stderr": 0.014864041881952731 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5619834710743802, + "acc_stderr": 0.01705775370216029, + "acc_norm": 0.6174734356552538, + "acc_norm_stderr": 0.01670916538722882 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-ENin-test-v1", + "model_sha": "fb559edd7e4a2809686425c555a38cda8e61c41e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-Exo-test-v1/result_2023-12-06 23:00:01.json b/oopsung/Yi-Ko-6B-Exo-test-v1/result_2023-12-06 23:00:01.json new file mode 100644 index 0000000000000000000000000000000000000000..d9669e57aa41e03e8ca5c55aab92c7b9a33890d1 --- /dev/null +++ b/oopsung/Yi-Ko-6B-Exo-test-v1/result_2023-12-06 23:00:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.013855831287497723, + "acc_norm": 0.4087030716723549, + "acc_norm_stderr": 0.014365750345427005 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39782911770563634, + "acc_stderr": 0.00488449506945969, + "acc_norm": 0.5329615614419438, + "acc_norm_stderr": 0.004978927164792888 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5338441890166028, + "acc_stderr": 0.017838956009136805, + "acc_norm": 0.5338441890166028, + "acc_norm_stderr": 0.017838956009136805 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758396, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758396 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547155, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.02533466708095495, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.02533466708095495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438804, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438804 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5290322580645161, + "acc_stderr": 0.02839601640276099, + "acc_norm": 0.5290322580645161, + "acc_norm_stderr": 0.02839601640276099 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.03070948699255654, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.03070948699255654 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911498, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911498 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.0379401267469703, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.0379401267469703 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.02413015829976262, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.02413015829976262 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6311926605504588, + "acc_stderr": 0.020686227560729548, + "acc_norm": 0.6311926605504588, + "acc_norm_stderr": 0.020686227560729548 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576073, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576073 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.04060127035236395, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.04060127035236395 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.019910377463105935, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.019910377463105935 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330361, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330361 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237035, + "mc2": 0.41691402541412415, + "mc2_stderr": 0.014819797591371593 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5678866587957497, + "acc_stderr": 0.017031170198851746, + "acc_norm": 0.615112160566706, + "acc_norm_stderr": 0.016728579701498658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-Exo-test-v1", + "model_sha": "ea7a32987d14dc84615ee31959e4edc36487da7a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-Exogen-test-v1/result_2023-12-12 23:02:24.json b/oopsung/Yi-Ko-6B-Exogen-test-v1/result_2023-12-12 23:02:24.json new file mode 100644 index 0000000000000000000000000000000000000000..54e4206bf9bd9a5db15649d05b2445ee46db09b6 --- /dev/null +++ b/oopsung/Yi-Ko-6B-Exogen-test-v1/result_2023-12-12 23:02:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34215017064846415, + "acc_stderr": 0.01386415215917728, + "acc_norm": 0.4087030716723549, + "acc_norm_stderr": 0.014365750345427005 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39772953594901417, + "acc_stderr": 0.004884287515461508, + "acc_norm": 0.533559051981677, + "acc_norm_stderr": 0.004978529642140935 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5376756066411239, + "acc_stderr": 0.01782913176428719, + "acc_norm": 0.5376756066411239, + "acc_norm_stderr": 0.01782913176428719 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547155, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062946, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062946 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438804, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438804 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653333, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.03070948699255654, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.03070948699255654 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911498, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911498 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.03794012674697031, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.03794012674697031 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425072, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6311926605504588, + "acc_stderr": 0.020686227560729548, + "acc_norm": 0.6311926605504588, + "acc_norm_stderr": 0.020686227560729548 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.019944914136873583, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.019944914136873583 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0286638201471995, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0286638201471995 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.031987615467631264, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.031987615467631264 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330364, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330364 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03888176921674101, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03888176921674101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237035, + "mc2": 0.415869370781035, + "mc2_stderr": 0.014811673986495334 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5690672963400236, + "acc_stderr": 0.01702555819604314, + "acc_norm": 0.615112160566706, + "acc_norm_stderr": 0.01672857970149866 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-Exogen-test-v1", + "model_sha": "01f5d976626f1326236a5d2522eb0612c5306289", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-N-test-v1/result_2023-12-06 22:59:48.json b/oopsung/Yi-Ko-6B-N-test-v1/result_2023-12-06 22:59:48.json new file mode 100644 index 0000000000000000000000000000000000000000..9fdad033ca2124ba4ca59cbc3ece724604b5e105 --- /dev/null +++ b/oopsung/Yi-Ko-6B-N-test-v1/result_2023-12-06 22:59:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3387372013651877, + "acc_stderr": 0.01383056892797433, + "acc_norm": 0.4061433447098976, + "acc_norm_stderr": 0.014351656690097862 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39673371838279226, + "acc_stderr": 0.004882200364432364, + "acc_norm": 0.5327623979286995, + "acc_norm_stderr": 0.0049790580784786955 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5338441890166028, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.5338441890166028, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.04385162325601553, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.04385162325601553 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547155, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.025317649726448652, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.025317649726448652 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649038, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649038 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5161290322580645, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.5161290322580645, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.02948036054954119, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.02948036054954119 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5433962264150943, + "acc_stderr": 0.030656748696739435, + "acc_norm": 0.5433962264150943, + "acc_norm_stderr": 0.030656748696739435 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871916, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871916 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.038016851045244604, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.038016851045244604 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.024419234966819064, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.024419234966819064 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.37719298245614036, + "acc_stderr": 0.04559522141958216, + "acc_norm": 0.37719298245614036, + "acc_norm_stderr": 0.04559522141958216 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6201834862385321, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.6201834862385321, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.028614624752805434, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.028614624752805434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775087, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775087 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4199346405228758, + "acc_stderr": 0.019966811178256483, + "acc_norm": 0.4199346405228758, + "acc_norm_stderr": 0.019966811178256483 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.0302114796091216, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.0302114796091216 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3350717079530639, + "acc_stderr": 0.012055499471330366, + "acc_norm": 0.3350717079530639, + "acc_norm_stderr": 0.012055499471330366 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522505, + "mc2": 0.4189628761359413, + "mc2_stderr": 0.014837511319155058 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5596221959858324, + "acc_stderr": 0.01706769977431298, + "acc_norm": 0.6080283353010626, + "acc_norm_stderr": 0.016784332119424077 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-N-test-v1", + "model_sha": "21013e0de8b706a0462a2a0ebc7f7e1f9be4b5ab", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-all-test-v1/result_2023-12-14 08:29:43.json b/oopsung/Yi-Ko-6B-all-test-v1/result_2023-12-14 08:29:43.json new file mode 100644 index 0000000000000000000000000000000000000000..9097df49405a157604eb5d79f24c6e812ea9ada6 --- /dev/null +++ b/oopsung/Yi-Ko-6B-all-test-v1/result_2023-12-14 08:29:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3361774744027304, + "acc_stderr": 0.013804855026205763, + "acc_norm": 0.4035836177474403, + "acc_norm_stderr": 0.014337158914268447 + }, + "harness|ko_hellaswag|10": { + "acc": 0.395538737303326, + "acc_stderr": 0.004879667889198489, + "acc_norm": 0.5326628161720772, + "acc_norm_stderr": 0.004979123236507971 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5389527458492975, + "acc_stderr": 0.017825621793239016, + "acc_norm": 0.5389527458492975, + "acc_norm_stderr": 0.017825621793239016 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.03394853965156402, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.03394853965156402 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.02531063925493391, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.02531063925493391 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162933, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162933 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.535483870967742, + "acc_stderr": 0.02837228779796295, + "acc_norm": 0.535483870967742, + "acc_norm_stderr": 0.02837228779796295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431177, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431177 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114982, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114982 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.037894017602836484, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.037894017602836484 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008746, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008746 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.020707458164352984, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.020707458164352984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138293, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.0198984127176359, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.0198984127176359 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3376792698826597, + "acc_stderr": 0.012078563777145548, + "acc_norm": 0.3376792698826597, + "acc_norm_stderr": 0.012078563777145548 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03888176921674102, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03888176921674102 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2717258261933905, + "mc1_stderr": 0.01557284045287583, + "mc2": 0.4224730625453278, + "mc2_stderr": 0.014842293442821076 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5608028335301063, + "acc_stderr": 0.017062775744780705, + "acc_norm": 0.6127508854781583, + "acc_norm_stderr": 0.016747577991642785 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-all-test-v1", + "model_sha": "06d9f443c5219ec69bd214d1f7d8c08668104548", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-com-test-v1/result_2023-12-21 05:57:50.json b/oopsung/Yi-Ko-6B-com-test-v1/result_2023-12-21 05:57:50.json new file mode 100644 index 0000000000000000000000000000000000000000..a3ef3b920e7852cca483df096744ef77fc2aaf5a --- /dev/null +++ b/oopsung/Yi-Ko-6B-com-test-v1/result_2023-12-21 05:57:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.01388881628678211, + "acc_norm": 0.4138225255972696, + "acc_norm_stderr": 0.014392730009221007 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3973312089225254, + "acc_stderr": 0.00488345518890897, + "acc_norm": 0.5322644891455885, + "acc_norm_stderr": 0.004979381876712618 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5338441890166028, + "acc_stderr": 0.017838956009136805, + "acc_norm": 0.5338441890166028, + "acc_norm_stderr": 0.017838956009136805 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.028386198084177687, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.028386198084177687 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.03394853965156402, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.03394853965156402 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062946, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062946 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.025310639254933903, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.025310639254933903 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653333, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653333 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809447, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809447 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524586, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524586 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.02422996529842507, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.02422996529842507 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860807, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6275229357798165, + "acc_stderr": 0.020728368457638497, + "acc_norm": 0.6275229357798165, + "acc_norm_stderr": 0.020728368457638497 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.028614624752805434, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.028614624752805434 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.04060127035236395, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.04060127035236395 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596455, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596455 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897639, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897639 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087305, + "mc2": 0.42063207226082105, + "mc2_stderr": 0.014843379672251859 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5726092089728453, + "acc_stderr": 0.017008129844823156, + "acc_norm": 0.6139315230224321, + "acc_norm_stderr": 0.01673813076032175 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-com-test-v1", + "model_sha": "0ca70a1f4310f7f0603e9eaa80f22b68102a5755", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-orcapus-test-v1/result_2023-12-06 22:59:32.json b/oopsung/Yi-Ko-6B-orcapus-test-v1/result_2023-12-06 22:59:32.json new file mode 100644 index 0000000000000000000000000000000000000000..da9231750c4ce9474301b6e44d30c8846724c3e4 --- /dev/null +++ b/oopsung/Yi-Ko-6B-orcapus-test-v1/result_2023-12-06 22:59:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3464163822525597, + "acc_stderr": 0.013905011180063246, + "acc_norm": 0.4180887372013652, + "acc_norm_stderr": 0.01441398839699608 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39494124676359293, + "acc_stderr": 0.004878390226591715, + "acc_norm": 0.5323640709022107, + "acc_norm_stderr": 0.004979317515432522 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5312899106002554, + "acc_stderr": 0.01784491809046855, + "acc_norm": 0.5312899106002554, + "acc_norm_stderr": 0.01784491809046855 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234355, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234355 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.03437305501980619, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.03437305501980619 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736125, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736125 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5451612903225806, + "acc_stderr": 0.028327743091561088, + "acc_norm": 0.5451612903225806, + "acc_norm_stderr": 0.028327743091561088 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431183, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431183 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389177, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389177 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.02422996529842508, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.02422996529842508 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723368, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723368 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.02780165621232366, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.02780165621232366 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6330275229357798, + "acc_stderr": 0.020664675659520532, + "acc_norm": 0.6330275229357798, + "acc_norm_stderr": 0.020664675659520532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963775, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963775 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.042878587513404544, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.042878587513404544 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452225, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452225 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557956, + "mc2": 0.41368059870095875, + "mc2_stderr": 0.014743626921643821 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.564344746162928, + "acc_stderr": 0.017047415229476316, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.01675692157106942 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-orcapus-test-v1", + "model_sha": "aaf03f76d00856f3e4a43a182bf04ea58a914d68", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-tech-test-v1/result_2023-12-06 23:00:10.json b/oopsung/Yi-Ko-6B-tech-test-v1/result_2023-12-06 23:00:10.json new file mode 100644 index 0000000000000000000000000000000000000000..0495055be90a4eeac16b3b17c5b199a748ceb982 --- /dev/null +++ b/oopsung/Yi-Ko-6B-tech-test-v1/result_2023-12-06 23:00:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.01384746051889298, + "acc_norm": 0.4069965870307167, + "acc_norm_stderr": 0.014356399418009126 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3963353913563035, + "acc_stderr": 0.0048813595891489935, + "acc_norm": 0.5315674168492333, + "acc_norm_stderr": 0.004979826829400774 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5338441890166028, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.5338441890166028, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.028386198084177687, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.028386198084177687 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.02530295889085015, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.02530295889085015 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5129032258064516, + "acc_stderr": 0.028434533152681876, + "acc_norm": 0.5129032258064516, + "acc_norm_stderr": 0.028434533152681876 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809447, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809447 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389177, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389177 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066468, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066468 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.037894017602836484, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.037894017602836484 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.02422996529842508, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.02422996529842508 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723368, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723368 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008746, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008746 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.634862385321101, + "acc_stderr": 0.020642801454384, + "acc_norm": 0.634862385321101, + "acc_norm_stderr": 0.020642801454384 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138293, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138293 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.019910377463105935, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.019910377463105935 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.0302114796091216, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.0302114796091216 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.031052391937584346, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.031052391937584346 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33702737940026073, + "acc_stderr": 0.012072836273691327, + "acc_norm": 0.33702737940026073, + "acc_norm_stderr": 0.012072836273691327 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766372, + "mc2": 0.4089346755804923, + "mc2_stderr": 0.014760925941294287 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5726092089728453, + "acc_stderr": 0.017008129844823156, + "acc_norm": 0.6139315230224321, + "acc_norm_stderr": 0.01673813076032175 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-tech-test-v1", + "model_sha": "f47a57c15cae3832335d3df550ff08f20c717822", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-6B-wiki-test-v1/result_2023-12-07 00:36:08.json b/oopsung/Yi-Ko-6B-wiki-test-v1/result_2023-12-07 00:36:08.json new file mode 100644 index 0000000000000000000000000000000000000000..6acf4adaa0b4e6779d8a6e6b6dcd81ce500c9c72 --- /dev/null +++ b/oopsung/Yi-Ko-6B-wiki-test-v1/result_2023-12-07 00:36:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3438566552901024, + "acc_stderr": 0.013880644570156211, + "acc_norm": 0.4121160409556314, + "acc_norm_stderr": 0.0143839153022254 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39842660824536946, + "acc_stderr": 0.004885735963346905, + "acc_norm": 0.5349531965743876, + "acc_norm_stderr": 0.004977574188421319 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5312899106002554, + "acc_stderr": 0.01784491809046855, + "acc_norm": 0.5312899106002554, + "acc_norm_stderr": 0.01784491809046855 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547155, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.532258064516129, + "acc_stderr": 0.02838474778881334, + "acc_norm": 0.532258064516129, + "acc_norm_stderr": 0.02838474778881334 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.029343114798094462, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.029343114798094462 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5358490566037736, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.5358490566037736, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.02803792996911498, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.02803792996911498 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.0379401267469703, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.0379401267469703 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149138, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860807, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.020707458164352984, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.020707458164352984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319772, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319772 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.019898412717635896, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.019898412717635896 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0286638201471995, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0286638201471995 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.011977676704716004, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.011977676704716004 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03888176921674101, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03888176921674101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522509, + "mc2": 0.41603835953324, + "mc2_stderr": 0.014820168398190375 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5655253837072018, + "acc_stderr": 0.017042098620824935, + "acc_norm": 0.615112160566706, + "acc_norm_stderr": 0.016728579701498658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-6B-wiki-test-v1", + "model_sha": "aacbf8f82cb6c76af34851c83f2269d1ee45ee70", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-ENC-v1/result_2024-01-04 02:13:31.json b/oopsung/Yi-Ko-ENC-v1/result_2024-01-04 02:13:31.json new file mode 100644 index 0000000000000000000000000000000000000000..0ab9afd11365c23c25bf9e22ec34a636f5c559a7 --- /dev/null +++ b/oopsung/Yi-Ko-ENC-v1/result_2024-01-04 02:13:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3310580204778157, + "acc_stderr": 0.013752062419817836, + "acc_norm": 0.40187713310580203, + "acc_norm_stderr": 0.014327268614578276 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3951404102768373, + "acc_stderr": 0.004878816961012046, + "acc_norm": 0.530372435769767, + "acc_norm_stderr": 0.0049805669077904536 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5402298850574713, + "acc_stderr": 0.017821994096933535, + "acc_norm": 0.5402298850574713, + "acc_norm_stderr": 0.017821994096933535 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6565656565656566, + "acc_stderr": 0.03383201223244444, + "acc_norm": 0.6565656565656566, + "acc_norm_stderr": 0.03383201223244444 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48717948717948717, + "acc_stderr": 0.025342671293807264, + "acc_norm": 0.48717948717948717, + "acc_norm_stderr": 0.025342671293807264 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490385, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490385 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5161290322580645, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.5161290322580645, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.02974504857267406, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.02974504857267406 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.02794045713622841, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02794045713622841 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.038016851045244604, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.038016851045244604 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.02422996529842508, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.02422996529842508 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4845679012345679, + "acc_stderr": 0.02780749004427619, + "acc_norm": 0.4845679012345679, + "acc_norm_stderr": 0.02780749004427619 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6238532110091743, + "acc_stderr": 0.020769231968205074, + "acc_norm": 0.6238532110091743, + "acc_norm_stderr": 0.020769231968205074 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523811, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523811 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4199346405228758, + "acc_stderr": 0.01996681117825648, + "acc_norm": 0.4199346405228758, + "acc_norm_stderr": 0.01996681117825648 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281285, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281285 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.032468872436376486, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.032468872436376486 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.46691176470588236, + "acc_stderr": 0.030306257722468317, + "acc_norm": 0.46691176470588236, + "acc_norm_stderr": 0.030306257722468317 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.0318421386668758, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.0318421386668758 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33833116036505867, + "acc_stderr": 0.01208426562634422, + "acc_norm": 0.33833116036505867, + "acc_norm_stderr": 0.01208426562634422 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5049019607843137, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.5049019607843137, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015, + "mc2": 0.42083960965137773, + "mc2_stderr": 0.014833347503944825 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5584415584415584, + "acc_stderr": 0.017072525875563103, + "acc_norm": 0.5985832349468713, + "acc_norm_stderr": 0.01685290785872906 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-ENC-v1", + "model_sha": "ce6094e6bac3ae4771b04f539dc3a322676cf27d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-ENCdpo/result_2024-01-15 23:04:18.json b/oopsung/Yi-Ko-ENCdpo/result_2024-01-15 23:04:18.json new file mode 100644 index 0000000000000000000000000000000000000000..45965fb0a9e2380df1ab52fc1be311a17324721f --- /dev/null +++ b/oopsung/Yi-Ko-ENCdpo/result_2024-01-15 23:04:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36177474402730375, + "acc_stderr": 0.014041957945038078, + "acc_norm": 0.4232081911262799, + "acc_norm_stderr": 0.014438036220848027 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4054969129655447, + "acc_stderr": 0.004899845087183104, + "acc_norm": 0.5448117904799841, + "acc_norm_stderr": 0.0049697010810683786 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5261813537675607, + "acc_stderr": 0.01785543455404199, + "acc_norm": 0.5261813537675607, + "acc_norm_stderr": 0.01785543455404199 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.03446897738659332, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.03446897738659332 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736118, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736118 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649037, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649037 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5225806451612903, + "acc_stderr": 0.02841498501970786, + "acc_norm": 0.5225806451612903, + "acc_norm_stderr": 0.02841498501970786 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066468, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066468 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425072, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.03919415545048409, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.03919415545048409 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.035260770955482405, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.035260770955482405 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6220183486238532, + "acc_stderr": 0.020789187066728117, + "acc_norm": 0.6220183486238532, + "acc_norm_stderr": 0.020789187066728117 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5359477124183006, + "acc_stderr": 0.02855582751652878, + "acc_norm": 0.5359477124183006, + "acc_norm_stderr": 0.02855582751652878 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.01993362777685742, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.01993362777685742 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199502, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199502 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802748, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121603, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121603 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.031987615467631264, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.031987615467631264 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452225, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452225 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237033, + "mc2": 0.41465394556319485, + "mc2_stderr": 0.014829289481179515 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.551357733175915, + "acc_stderr": 0.01709943051472578, + "acc_norm": 0.5985832349468713, + "acc_norm_stderr": 0.01685290785872906 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-ENCdpo", + "model_sha": "b62036b0e3dee2cb1e72a0f16425c802ac0e1a74", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-ENW-v1/result_2024-01-11 05:46:35.json b/oopsung/Yi-Ko-ENW-v1/result_2024-01-11 05:46:35.json new file mode 100644 index 0000000000000000000000000000000000000000..5481aef0503ada226c99a94a1d44ca248141a49b --- /dev/null +++ b/oopsung/Yi-Ko-ENW-v1/result_2024-01-11 05:46:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33361774744027306, + "acc_stderr": 0.013778687054176536, + "acc_norm": 0.39505119453924914, + "acc_norm_stderr": 0.014285898292938169 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39454291973710415, + "acc_stderr": 0.0048775342159870895, + "acc_norm": 0.5304720175263892, + "acc_norm_stderr": 0.004980506329407592 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5466155810983397, + "acc_stderr": 0.0178020871358503, + "acc_norm": 0.5466155810983397, + "acc_norm_stderr": 0.0178020871358503 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.49327354260089684, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.49327354260089684, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6414141414141414, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.6414141414141414, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736118, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736118 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883231, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883231 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5129032258064516, + "acc_stderr": 0.028434533152681876, + "acc_norm": 0.5129032258064516, + "acc_norm_stderr": 0.028434533152681876 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.02987257770889118, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.02987257770889118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.02794045713622842, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02794045713622842 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255168, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255168 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.024026846392873502, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.024026846392873502 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594384, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594384 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.035260770955482405, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.035260770955482405 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6091743119266055, + "acc_stderr": 0.02092005834611107, + "acc_norm": 0.6091743119266055, + "acc_norm_stderr": 0.02092005834611107 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.03764950879790605, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.03764950879790605 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.01997742260022747, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.01997742260022747 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963768, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963768 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176852, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176852 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4163265306122449, + "acc_stderr": 0.03155782816556164, + "acc_norm": 0.4163265306122449, + "acc_norm_stderr": 0.03155782816556164 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452225, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452225 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.01532182168847619, + "mc2": 0.417295473151574, + "mc2_stderr": 0.01479169446306661 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5419126328217237, + "acc_stderr": 0.017129852117911144, + "acc_norm": 0.5832349468713105, + "acc_norm_stderr": 0.01695048914610882 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-ENW-v1", + "model_sha": "efdac4ac4898e003628c593c43105ac831ef26ca", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-Ko-ENWdpo-v1/result_2024-01-16 01:23:59.json b/oopsung/Yi-Ko-ENWdpo-v1/result_2024-01-16 01:23:59.json new file mode 100644 index 0000000000000000000000000000000000000000..d3058d11c128cc1602aaea129b4bf3b0abbe76c5 --- /dev/null +++ b/oopsung/Yi-Ko-ENWdpo-v1/result_2024-01-16 01:23:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3609215017064846, + "acc_stderr": 0.014034761386175452, + "acc_norm": 0.4274744027303754, + "acc_norm_stderr": 0.014456862944650654 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4053973312089225, + "acc_stderr": 0.0048996537040328384, + "acc_norm": 0.5444134634534953, + "acc_norm_stderr": 0.004970057183367312 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5261813537675607, + "acc_stderr": 0.01785543455404199, + "acc_norm": 0.5261813537675607, + "acc_norm_stderr": 0.01785543455404199 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234355, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234355 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.03446897738659332, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.03446897738659332 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736118, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736118 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.03476725747649038, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.03476725747649038 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5161290322580645, + "acc_stderr": 0.028429203176724562, + "acc_norm": 0.5161290322580645, + "acc_norm_stderr": 0.028429203176724562 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.0307235352490061, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.0307235352490061 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307706, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307706 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.03919415545048409, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.03919415545048409 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.618348623853211, + "acc_stderr": 0.0208281485170226, + "acc_norm": 0.618348623853211, + "acc_norm_stderr": 0.0208281485170226 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.02856869975222588, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.02856869975222588 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199502, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199502 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697624, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697624 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802748, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596455, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596455 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.031987615467631264, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.031987615467631264 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301847, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301847 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452227, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452227 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557956, + "mc2": 0.41393669760039026, + "mc2_stderr": 0.014816891258589298 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5572609208972845, + "acc_stderr": 0.017077254131556224, + "acc_norm": 0.6068476977567887, + "acc_norm_stderr": 0.016793262801287075 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-Ko-ENWdpo-v1", + "model_sha": "6c23befc5d1e0e82658f88e50c4544828ad684d4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-ko-F-v1/result_2023-12-28 06:29:25.json b/oopsung/Yi-ko-F-v1/result_2023-12-28 06:29:25.json new file mode 100644 index 0000000000000000000000000000000000000000..50342f39b4ee26a67b59c845bb4347bfcad32cc9 --- /dev/null +++ b/oopsung/Yi-ko-F-v1/result_2023-12-28 06:29:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3430034129692833, + "acc_stderr": 0.01387242322371817, + "acc_norm": 0.4052901023890785, + "acc_norm_stderr": 0.014346869060229325 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3950408285202151, + "acc_stderr": 0.004878603699686037, + "acc_norm": 0.5302728540131448, + "acc_norm_stderr": 0.004980627287147577 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.017797751493865633, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.017797751493865633 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736118, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736118 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5096774193548387, + "acc_stderr": 0.028438677998909558, + "acc_norm": 0.5096774193548387, + "acc_norm_stderr": 0.028438677998909558 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431187, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431187 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.03077265364207567, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.03077265364207567 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255168, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255168 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.03468343295111126, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.03468343295111126 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149138, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5216049382716049, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.5216049382716049, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6091743119266055, + "acc_stderr": 0.020920058346111062, + "acc_norm": 0.6091743119266055, + "acc_norm_stderr": 0.020920058346111062 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.019997973035458333, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.019997973035458333 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176851, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176851 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4632352941176471, + "acc_stderr": 0.030290619180485694, + "acc_norm": 0.4632352941176471, + "acc_norm_stderr": 0.030290619180485694 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.031137304297185805, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.031137304297185805 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3200782268578879, + "acc_stderr": 0.01191479194763853, + "acc_norm": 0.3200782268578879, + "acc_norm_stderr": 0.01191479194763853 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.038783721137112745, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.038783721137112745 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253595, + "mc2": 0.410763306988178, + "mc2_stderr": 0.014699934243376766 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5218417945690673, + "acc_stderr": 0.01717394447429438, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.01701403811929749 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-ko-F-v1", + "model_sha": "6969a37bf3a4f29a2570a584d5cc3dba7c28fd78", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/Yi-ko-Fdpo-v1/result_2024-01-02 01:14:26.json b/oopsung/Yi-ko-Fdpo-v1/result_2024-01-02 01:14:26.json new file mode 100644 index 0000000000000000000000000000000000000000..b880a5ec8bcf711ec0301e4c81c91488fe835b14 --- /dev/null +++ b/oopsung/Yi-ko-Fdpo-v1/result_2024-01-02 01:14:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35409556313993173, + "acc_stderr": 0.01397545412275656, + "acc_norm": 0.42235494880546076, + "acc_norm_stderr": 0.014434138713379981 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4032065325632344, + "acc_stderr": 0.004895390341445628, + "acc_norm": 0.5421230830511851, + "acc_norm_stderr": 0.004972042602001382 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5261813537675607, + "acc_stderr": 0.01785543455404199, + "acc_norm": 0.5261813537675607, + "acc_norm_stderr": 0.01785543455404199 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5112107623318386, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.5112107623318386, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48717948717948717, + "acc_stderr": 0.025342671293807264, + "acc_norm": 0.48717948717948717, + "acc_norm_stderr": 0.025342671293807264 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.532258064516129, + "acc_stderr": 0.02838474778881334, + "acc_norm": 0.532258064516129, + "acc_norm_stderr": 0.02838474778881334 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5358490566037736, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.5358490566037736, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968352, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968352 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.024419234966819064, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.024419234966819064 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.026918645383239015, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.026918645383239015 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008746, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008746 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6275229357798165, + "acc_stderr": 0.020728368457638494, + "acc_norm": 0.6275229357798165, + "acc_norm_stderr": 0.020728368457638494 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576073, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576073 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.019910377463105935, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.019910377463105935 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976235, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976235 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098424, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098424 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.44485294117647056, + "acc_stderr": 0.030187532060329387, + "acc_norm": 0.44485294117647056, + "acc_norm_stderr": 0.030187532060329387 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744859, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452227, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452227 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766372, + "mc2": 0.4139698701961079, + "mc2_stderr": 0.014812141378593572 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5466351829988194, + "acc_stderr": 0.01711541822522687, + "acc_norm": 0.602125147579693, + "acc_norm_stderr": 0.01682795905473339 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/Yi-ko-Fdpo-v1", + "model_sha": "2b7f4e0729ec0c221e0461b778437b526d3cb267", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/llama2-7b-exo-test-v1/result_2023-11-29 04:04:02.json b/oopsung/llama2-7b-exo-test-v1/result_2023-11-29 04:04:02.json new file mode 100644 index 0000000000000000000000000000000000000000..f79b68c79171167bd2b2ef6efcba5e3606735de9 --- /dev/null +++ b/oopsung/llama2-7b-exo-test-v1/result_2023-11-29 04:04:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3165529010238908, + "acc_stderr": 0.01359243151906808, + "acc_norm": 0.37627986348122866, + "acc_norm_stderr": 0.014157022555407165 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38169687313284206, + "acc_stderr": 0.004848099661619696, + "acc_norm": 0.4947221668990241, + "acc_norm_stderr": 0.004989503417767286 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3486590038314176, + "acc_stderr": 0.017041243143490953, + "acc_norm": 0.3486590038314176, + "acc_norm_stderr": 0.017041243143490953 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996795, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996795 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.029771642712491223, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.029771642712491223 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.026795422327893947, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.026795422327893947 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.030898610882477515, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.030898610882477515 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.043171711948702556, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.043171711948702556 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378949, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378949 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3025210084033613, + "acc_stderr": 0.029837962388291926, + "acc_norm": 0.3025210084033613, + "acc_norm_stderr": 0.029837962388291926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28717948717948716, + "acc_stderr": 0.022939925418530627, + "acc_norm": 0.28717948717948716, + "acc_norm_stderr": 0.022939925418530627 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.030712730070982592, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.030712730070982592 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335134, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335134 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3547008547008547, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.3547008547008547, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3132075471698113, + "acc_stderr": 0.02854479331905533, + "acc_norm": 0.3132075471698113, + "acc_norm_stderr": 0.02854479331905533 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03333333333333334, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03333333333333334 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.022261817692400175, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.022261817692400175 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.20833333333333334, + "acc_stderr": 0.03396116205845334, + "acc_norm": 0.20833333333333334, + "acc_norm_stderr": 0.03396116205845334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.03291099578615769, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.03291099578615769 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.025630824975621348, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.025630824975621348 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27979274611398963, + "acc_stderr": 0.03239637046735704, + "acc_norm": 0.27979274611398963, + "acc_norm_stderr": 0.03239637046735704 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3834862385321101, + "acc_stderr": 0.02084715664191598, + "acc_norm": 0.3834862385321101, + "acc_norm_stderr": 0.02084715664191598 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.035670166752768614, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.035670166752768614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.0276841818833029, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.0276841818833029 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.272875816993464, + "acc_stderr": 0.01802047414839358, + "acc_norm": 0.272875816993464, + "acc_norm_stderr": 0.01802047414839358 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.02689170942834396, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.02689170942834396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936484, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936484 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280058, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280058 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2897959183673469, + "acc_stderr": 0.02904308868330433, + "acc_norm": 0.2897959183673469, + "acc_norm_stderr": 0.02904308868330433 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.37130801687763715, + "acc_stderr": 0.0314506860074486, + "acc_norm": 0.37130801687763715, + "acc_norm_stderr": 0.0314506860074486 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2920469361147327, + "acc_stderr": 0.011613349136271817, + "acc_norm": 0.2920469361147327, + "acc_norm_stderr": 0.011613349136271817 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083292, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083292 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511784, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511784 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731618, + "mc2": 0.369212058529924, + "mc2_stderr": 0.014746397665894159 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2892561983471074, + "acc_stderr": 0.015588800386053555, + "acc_norm": 0.4203069657615112, + "acc_norm_stderr": 0.016970598281177703 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/llama2-7b-exo-test-v1", + "model_sha": "a679a02782388b0ae0949b64c69f5681790aa569", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/llama2-7b-ko-Orcapus-test-v1/result_2023-11-30 13:49:45.json b/oopsung/llama2-7b-ko-Orcapus-test-v1/result_2023-11-30 13:49:45.json new file mode 100644 index 0000000000000000000000000000000000000000..13aa5fac5c8d7e184327ce7c1da6c9a1729bb416 --- /dev/null +++ b/oopsung/llama2-7b-ko-Orcapus-test-v1/result_2023-11-30 13:49:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32337883959044367, + "acc_stderr": 0.013669421630012127, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.014194389086685251 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3820952001593308, + "acc_stderr": 0.004849065962692134, + "acc_norm": 0.4947221668990241, + "acc_norm_stderr": 0.004989503417767286 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.23300970873786409, + "acc_stderr": 0.04185832598928315, + "acc_norm": 0.23300970873786409, + "acc_norm_stderr": 0.04185832598928315 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3397190293742018, + "acc_stderr": 0.016936394114301635, + "acc_norm": 0.3397190293742018, + "acc_norm_stderr": 0.016936394114301635 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.030363582197238167, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.030363582197238167 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.0350729543137052, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.0350729543137052 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2600896860986547, + "acc_stderr": 0.02944249558585747, + "acc_norm": 0.2600896860986547, + "acc_norm_stderr": 0.02944249558585747 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3484848484848485, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.3484848484848485, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003336, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416542, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416542 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2815126050420168, + "acc_stderr": 0.029213549414372156, + "acc_norm": 0.2815126050420168, + "acc_norm_stderr": 0.029213549414372156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.02228214120420443, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.02228214120420443 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27741935483870966, + "acc_stderr": 0.025470196835900055, + "acc_norm": 0.27741935483870966, + "acc_norm_stderr": 0.025470196835900055 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.030882736974138653, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.030882736974138653 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32075471698113206, + "acc_stderr": 0.02872750295788027, + "acc_norm": 0.32075471698113206, + "acc_norm_stderr": 0.02872750295788027 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969655, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969655 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.30845771144278605, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.30845771144278605, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321659, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321659 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.030085743248565666, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.030085743248565666 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.024946792225272314, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.024946792225272314 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.026406145973625672, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.026406145973625672 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.03119584087770029, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.03119584087770029 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3211009174311927, + "acc_stderr": 0.020018149772733747, + "acc_norm": 0.3211009174311927, + "acc_norm_stderr": 0.020018149772733747 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.03268454013011744, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.03268454013011744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.33986928104575165, + "acc_stderr": 0.027121956071388863, + "acc_norm": 0.33986928104575165, + "acc_norm_stderr": 0.027121956071388863 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28594771241830064, + "acc_stderr": 0.01828048507295468, + "acc_norm": 0.28594771241830064, + "acc_norm_stderr": 0.01828048507295468 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291518, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291518 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.03128039084329879, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.03128039084329879 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.014551553659369922, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.014551553659369922 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681404, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681404 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.027212835884073163, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.027212835884073163 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35443037974683544, + "acc_stderr": 0.031137304297185812, + "acc_norm": 0.35443037974683544, + "acc_norm_stderr": 0.031137304297185812 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2842242503259452, + "acc_stderr": 0.011519880596516076, + "acc_norm": 0.2842242503259452, + "acc_norm_stderr": 0.011519880596516076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.03646204963253812, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.03646204963253812 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752322, + "mc2": 0.3821392578358511, + "mc2_stderr": 0.014847374366938948 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2939787485242031, + "acc_stderr": 0.015663242569091115, + "acc_norm": 0.4344746162927981, + "acc_norm_stderr": 0.01704209862082494 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/llama2-7b-ko-Orcapus-test-v1", + "model_sha": "714ee1162de8359dd817b5c39158055213897f6a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/llama2-7b-ko-wiki-test-v1/result_2023-12-05 23:06:00.json b/oopsung/llama2-7b-ko-wiki-test-v1/result_2023-12-05 23:06:00.json new file mode 100644 index 0000000000000000000000000000000000000000..ed27d181f7008f193894bda342917cd0f28c0293 --- /dev/null +++ b/oopsung/llama2-7b-ko-wiki-test-v1/result_2023-12-05 23:06:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.013621696119173299, + "acc_norm": 0.3771331058020478, + "acc_norm_stderr": 0.014163366896192598 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3828918542123083, + "acc_stderr": 0.004850988215167545, + "acc_norm": 0.49522007568213505, + "acc_norm_stderr": 0.004989553396413108 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3537675606641124, + "acc_stderr": 0.017098184708161903, + "acc_norm": 0.3537675606641124, + "acc_norm_stderr": 0.017098184708161903 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.04049122041702506, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.04049122041702506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.03711725190740749, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.03711725190740749 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378949, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378949 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31512605042016806, + "acc_stderr": 0.03017680828897434, + "acc_norm": 0.31512605042016806, + "acc_norm_stderr": 0.03017680828897434 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2846153846153846, + "acc_stderr": 0.022878322799706263, + "acc_norm": 0.2846153846153846, + "acc_norm_stderr": 0.022878322799706263 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678242, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678242 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2903225806451613, + "acc_stderr": 0.025822106119415898, + "acc_norm": 0.2903225806451613, + "acc_norm_stderr": 0.025822106119415898 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.0311669573672359, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.0311669573672359 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32075471698113206, + "acc_stderr": 0.02872750295788027, + "acc_norm": 0.32075471698113206, + "acc_norm_stderr": 0.02872750295788027 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371216, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371216 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.037804458505267334, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.037804458505267334 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.34328358208955223, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.34328358208955223, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952365, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952365 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.034089978868575295, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.034089978868575295 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.025773111169630453, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.025773111169630453 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2849740932642487, + "acc_stderr": 0.03257714077709662, + "acc_norm": 0.2849740932642487, + "acc_norm_stderr": 0.03257714077709662 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3871559633027523, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.3871559633027523, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.027870745278290303, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.027870745278290303 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.0387813988879761, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.0387813988879761 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663137, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663137 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.02689170942834396, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.02689170942834396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.03266478331527272, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.03266478331527272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2816326530612245, + "acc_stderr": 0.02879518557429129, + "acc_norm": 0.2816326530612245, + "acc_norm_stderr": 0.02879518557429129 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3628691983122363, + "acc_stderr": 0.031299208255302136, + "acc_norm": 0.3628691983122363, + "acc_norm_stderr": 0.031299208255302136 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28878748370273793, + "acc_stderr": 0.011574914757219962, + "acc_norm": 0.28878748370273793, + "acc_norm_stderr": 0.011574914757219962 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923403, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923403 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624337, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624337 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23623011015911874, + "mc1_stderr": 0.014869755015871101, + "mc2": 0.3714247171675403, + "mc2_stderr": 0.014762111514590639 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2939787485242031, + "acc_stderr": 0.015663242569091112, + "acc_norm": 0.4167650531286895, + "acc_norm_stderr": 0.01695048914610883 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/llama2-7b-ko-wiki-test-v1", + "model_sha": "12e202c014b0c901644b34a9d422be93f5a7f959", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/llama2-7b-koNqa-test-v1/result_2023-11-30 07:12:33.json b/oopsung/llama2-7b-koNqa-test-v1/result_2023-11-30 07:12:33.json new file mode 100644 index 0000000000000000000000000000000000000000..60eb1274db06f1ece813ea3907cc261f3c29f97a --- /dev/null +++ b/oopsung/llama2-7b-koNqa-test-v1/result_2023-11-30 07:12:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3148464163822526, + "acc_stderr": 0.013572657703084948, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349815 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3852818163712408, + "acc_stderr": 0.004856672322044455, + "acc_norm": 0.4971121290579566, + "acc_norm_stderr": 0.004989698183207841 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.32950191570881227, + "acc_stderr": 0.016808322261740446, + "acc_norm": 0.32950191570881227, + "acc_norm_stderr": 0.016808322261740446 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288087, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288087 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.030898610882477515, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.030898610882477515 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3435114503816794, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.3435114503816794, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.033184773338453315, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.033184773338453315 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277726, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277726 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2512820512820513, + "acc_stderr": 0.021992016662370547, + "acc_norm": 0.2512820512820513, + "acc_norm_stderr": 0.021992016662370547 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.031447125816782426, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.031447125816782426 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885203, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885203 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.030882736974138653, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.030882736974138653 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33584905660377357, + "acc_stderr": 0.029067220146644826, + "acc_norm": 0.33584905660377357, + "acc_norm_stderr": 0.029067220146644826 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.02659393910184408, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.02659393910184408 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.036030385453603826, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.036030385453603826 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3034825870646766, + "acc_stderr": 0.03251006816458618, + "acc_norm": 0.3034825870646766, + "acc_norm_stderr": 0.03251006816458618 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.1597222222222222, + "acc_stderr": 0.030635578972093278, + "acc_norm": 0.1597222222222222, + "acc_norm_stderr": 0.030635578972093278 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.024946792225272314, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.024946792225272314 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.034624199316156234, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.034624199316156234 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.026229649178821163, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.026229649178821163 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565319, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565319 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3724770642201835, + "acc_stderr": 0.020728368457638494, + "acc_norm": 0.3724770642201835, + "acc_norm_stderr": 0.020728368457638494 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.027530078447110307, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.027530078447110307 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4132231404958678, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.4132231404958678, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.01818521895431808, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.01818521895431808 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.02624492034984301, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.02624492034984301 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.02915752218460559, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.02915752218460559 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3014705882352941, + "acc_stderr": 0.027875982114273168, + "acc_norm": 0.3014705882352941, + "acc_norm_stderr": 0.027875982114273168 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2612244897959184, + "acc_stderr": 0.028123429335142777, + "acc_norm": 0.2612244897959184, + "acc_norm_stderr": 0.028123429335142777 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.34177215189873417, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.34177215189873417, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2790091264667536, + "acc_stderr": 0.011455208832803546, + "acc_norm": 0.2790091264667536, + "acc_norm_stderr": 0.011455208832803546 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3575757575757576, + "acc_stderr": 0.03742597043806586, + "acc_norm": 0.3575757575757576, + "acc_norm_stderr": 0.03742597043806586 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456418, + "mc2": 0.3738242893549986, + "mc2_stderr": 0.014771561798919752 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29161747343565525, + "acc_stderr": 0.015626276690070242, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.01697710193260152 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/llama2-7b-koNqa-test-v1", + "model_sha": "6a983a209cefd4285f18b60c380e818544155175", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/llama2-7b-n-ox-test-v1/result_2023-11-29 22:55:16.json b/oopsung/llama2-7b-n-ox-test-v1/result_2023-11-29 22:55:16.json new file mode 100644 index 0000000000000000000000000000000000000000..2f7cb433b406ce2292efd1f565b55168b918244a --- /dev/null +++ b/oopsung/llama2-7b-n-ox-test-v1/result_2023-11-29 22:55:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3122866894197952, + "acc_stderr": 0.013542598541688065, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349814 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38498307110137425, + "acc_stderr": 0.00485596857899873, + "acc_norm": 0.49731129257120094, + "acc_norm_stderr": 0.004989709267191029 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.03301405946987249, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.03301405946987249 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.33205619412515963, + "acc_stderr": 0.01684117465529572, + "acc_norm": 0.33205619412515963, + "acc_norm_stderr": 0.01684117465529572 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3148936170212766, + "acc_stderr": 0.03036358219723816, + "acc_norm": 0.3148936170212766, + "acc_norm_stderr": 0.03036358219723816 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288087, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288087 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3247588424437299, + "acc_stderr": 0.026596782287697046, + "acc_norm": 0.3247588424437299, + "acc_norm_stderr": 0.026596782287697046 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.30493273542600896, + "acc_stderr": 0.030898610882477515, + "acc_norm": 0.30493273542600896, + "acc_norm_stderr": 0.030898610882477515 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732524, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732524 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.028359620870533953, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.028359620870533953 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2512820512820513, + "acc_stderr": 0.021992016662370547, + "acc_norm": 0.2512820512820513, + "acc_norm_stderr": 0.021992016662370547 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.031447125816782426, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.031447125816782426 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.02556060472102288, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.02556060472102288 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3247863247863248, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.3247863247863248, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33962264150943394, + "acc_stderr": 0.02914690474779834, + "acc_norm": 0.33962264150943394, + "acc_norm_stderr": 0.02914690474779834 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.29850746268656714, + "acc_stderr": 0.032357437893550424, + "acc_norm": 0.29850746268656714, + "acc_norm_stderr": 0.032357437893550424 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.033687629322594295, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.033687629322594295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.02210112878741542, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.02210112878741542 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.1736111111111111, + "acc_stderr": 0.031674733837957166, + "acc_norm": 0.1736111111111111, + "acc_norm_stderr": 0.031674733837957166 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.025070713719153193, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.025070713719153193 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33641975308641975, + "acc_stderr": 0.02628973494595293, + "acc_norm": 0.33641975308641975, + "acc_norm_stderr": 0.02628973494595293 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.03097543638684543, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.03097543638684543 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3908256880733945, + "acc_stderr": 0.02092005834611106, + "acc_norm": 0.3908256880733945, + "acc_norm_stderr": 0.02092005834611106 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.14285714285714285, + "acc_stderr": 0.03129843185743809, + "acc_norm": 0.14285714285714285, + "acc_norm_stderr": 0.03129843185743809 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.027363593284684948, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.027363593284684948 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28594771241830064, + "acc_stderr": 0.01828048507295468, + "acc_norm": 0.28594771241830064, + "acc_norm_stderr": 0.01828048507295468 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291518, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291518 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.030388051301678116, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.030388051301678116 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031236, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031236 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2571428571428571, + "acc_stderr": 0.027979823538744543, + "acc_norm": 0.2571428571428571, + "acc_norm_stderr": 0.027979823538744543 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.030964810588786713, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.030964810588786713 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2842242503259452, + "acc_stderr": 0.011519880596516076, + "acc_norm": 0.2842242503259452, + "acc_norm_stderr": 0.011519880596516076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.03713158067481912, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.03713158067481912 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2386780905752754, + "mc1_stderr": 0.014922629695456418, + "mc2": 0.3776665539033974, + "mc2_stderr": 0.01480593605333011 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2939787485242031, + "acc_stderr": 0.015663242569091115, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.01697710193260152 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/llama2-7b-n-ox-test-v1", + "model_sha": "0eee95b8a16fdeec4b800e0d4d4c9fd2f340f3d1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/llama2-7b-n-test-v1/result_2023-11-29 13:28:28.json b/oopsung/llama2-7b-n-test-v1/result_2023-11-29 13:28:28.json new file mode 100644 index 0000000000000000000000000000000000000000..7df01f4b081473fb5a10b3cd8bd8d41b9771017d --- /dev/null +++ b/oopsung/llama2-7b-n-test-v1/result_2023-11-29 13:28:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31569965870307165, + "acc_stderr": 0.013582571095815291, + "acc_norm": 0.378839590443686, + "acc_norm_stderr": 0.014175915490000326 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38488348934475203, + "acc_stderr": 0.004855733568540273, + "acc_norm": 0.4971121290579566, + "acc_norm_stderr": 0.004989698183207841 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2573099415204678, + "acc_stderr": 0.03352799844161865, + "acc_norm": 0.2573099415204678, + "acc_norm_stderr": 0.03352799844161865 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3499361430395913, + "acc_stderr": 0.017055679797150426, + "acc_norm": 0.3499361430395913, + "acc_norm_stderr": 0.017055679797150426 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572203, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572203 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03358618145732524, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03358618145732524 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2605042016806723, + "acc_stderr": 0.028510251512341923, + "acc_norm": 0.2605042016806723, + "acc_norm_stderr": 0.028510251512341923 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24871794871794872, + "acc_stderr": 0.0219169577092138, + "acc_norm": 0.24871794871794872, + "acc_norm_stderr": 0.0219169577092138 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27741935483870966, + "acc_stderr": 0.025470196835900055, + "acc_norm": 0.27741935483870966, + "acc_norm_stderr": 0.025470196835900055 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3247863247863248, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.3247863247863248, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3320754716981132, + "acc_stderr": 0.02898545565233439, + "acc_norm": 0.3320754716981132, + "acc_norm_stderr": 0.02898545565233439 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844075, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3034825870646766, + "acc_stderr": 0.03251006816458618, + "acc_norm": 0.3034825870646766, + "acc_norm_stderr": 0.03251006816458618 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.022418042891113946, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.022418042891113946 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.18055555555555555, + "acc_stderr": 0.032166008088022675, + "acc_norm": 0.18055555555555555, + "acc_norm_stderr": 0.032166008088022675 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.025070713719153193, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.025070713719153193 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3395061728395062, + "acc_stderr": 0.026348564412011624, + "acc_norm": 0.3395061728395062, + "acc_norm_stderr": 0.026348564412011624 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.03097543638684543, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.03097543638684543 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.020828148517022596, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.020828148517022596 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.027420477662629242, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.027420477662629242 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2826797385620915, + "acc_stderr": 0.018217269552053442, + "acc_norm": 0.2826797385620915, + "acc_norm_stderr": 0.018217269552053442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291518, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291518 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25462962962962965, + "acc_stderr": 0.02971127586000535, + "acc_norm": 0.25462962962962965, + "acc_norm_stderr": 0.02971127586000535 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031232, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031232 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2530612244897959, + "acc_stderr": 0.027833023871399673, + "acc_norm": 0.2530612244897959, + "acc_norm_stderr": 0.027833023871399673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3459915611814346, + "acc_stderr": 0.030964810588786713, + "acc_norm": 0.3459915611814346, + "acc_norm_stderr": 0.030964810588786713 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2842242503259452, + "acc_stderr": 0.011519880596516076, + "acc_norm": 0.2842242503259452, + "acc_norm_stderr": 0.011519880596516076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.036974422050315967, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.036974422050315967 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.014896277441041855, + "mc2": 0.37643541252753543, + "mc2_stderr": 0.014796413880118433 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2939787485242031, + "acc_stderr": 0.015663242569091112, + "acc_norm": 0.4203069657615112, + "acc_norm_stderr": 0.016970598281177706 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/llama2-7b-n-test-v1", + "model_sha": "da4b8d73de4f71bd8752f16240315120c8409029", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/oopsung/llama2-platypus-7b-f/result_2023-11-27 06:40:38.json b/oopsung/llama2-platypus-7b-f/result_2023-11-27 06:40:38.json new file mode 100644 index 0000000000000000000000000000000000000000..b1579c1c79ce570b87531ef4f8a96b5003b76ed2 --- /dev/null +++ b/oopsung/llama2-platypus-7b-f/result_2023-11-27 06:40:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31569965870307165, + "acc_stderr": 0.013582571095815291, + "acc_norm": 0.37542662116040953, + "acc_norm_stderr": 0.01415063143511173 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3813981278629755, + "acc_stderr": 0.004847372670134639, + "acc_norm": 0.4918342959569807, + "acc_norm_stderr": 0.004989115942570064 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457923, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457923 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3486590038314176, + "acc_stderr": 0.017041243143490956, + "acc_norm": 0.3486590038314176, + "acc_norm_stderr": 0.017041243143490956 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.02937917046412482, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.02937917046412482 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683229, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683229 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.33440514469453375, + "acc_stderr": 0.026795422327893944, + "acc_norm": 0.33440514469453375, + "acc_norm_stderr": 0.026795422327893944 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.030769352008229136, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.030769352008229136 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3893129770992366, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.3893129770992366, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3838383838383838, + "acc_stderr": 0.03464881675016339, + "acc_norm": 0.3838383838383838, + "acc_norm_stderr": 0.03464881675016339 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3310344827586207, + "acc_stderr": 0.039215453124671215, + "acc_norm": 0.3310344827586207, + "acc_norm_stderr": 0.039215453124671215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3067226890756303, + "acc_stderr": 0.029953823891887048, + "acc_norm": 0.3067226890756303, + "acc_norm_stderr": 0.029953823891887048 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2794871794871795, + "acc_stderr": 0.02275238883977682, + "acc_norm": 0.2794871794871795, + "acc_norm_stderr": 0.02275238883977682 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.03127090713297698, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.03127090713297698 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335134, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335134 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3547008547008547, + "acc_stderr": 0.03134250486245402, + "acc_norm": 0.3547008547008547, + "acc_norm_stderr": 0.03134250486245402 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3320754716981132, + "acc_stderr": 0.02898545565233439, + "acc_norm": 0.3320754716981132, + "acc_norm_stderr": 0.02898545565233439 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844075, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.036848815213890225, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.036848815213890225 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.033917503223216613, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.033917503223216613 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.18055555555555555, + "acc_stderr": 0.032166008088022675, + "acc_norm": 0.18055555555555555, + "acc_norm_stderr": 0.032166008088022675 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.025630824975621348, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.025630824975621348 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2849740932642487, + "acc_stderr": 0.032577140777096614, + "acc_norm": 0.2849740932642487, + "acc_norm_stderr": 0.032577140777096614 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.381651376146789, + "acc_stderr": 0.02082814851702259, + "acc_norm": 0.381651376146789, + "acc_norm_stderr": 0.02082814851702259 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.027420477662629235, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.027420477662629235 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.272875816993464, + "acc_stderr": 0.01802047414839358, + "acc_norm": 0.272875816993464, + "acc_norm_stderr": 0.01802047414839358 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936484, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936484 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.029520095697687758, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.029520095697687758 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.27346938775510204, + "acc_stderr": 0.02853556033712844, + "acc_norm": 0.27346938775510204, + "acc_norm_stderr": 0.02853556033712844 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3755274261603376, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.3755274261603376, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28878748370273793, + "acc_stderr": 0.011574914757219962, + "acc_norm": 0.28878748370273793, + "acc_norm_stderr": 0.011574914757219962 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268048, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268048 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.01500067437357034, + "mc2": 0.37489222578788345, + "mc2_stderr": 0.014767448504749014 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2975206611570248, + "acc_stderr": 0.01571774220508993, + "acc_norm": 0.42266824085005905, + "acc_norm_stderr": 0.0169835060795776 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "oopsung/llama2-platypus-7b-f", + "model_sha": "a42c0fc7a148b9a0b977a79bb9460585b1120350", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/openchat/openchat-3.5-0106-gemma/result_2024-05-17 21:56:00.json b/openchat/openchat-3.5-0106-gemma/result_2024-05-17 21:56:00.json new file mode 100644 index 0000000000000000000000000000000000000000..8ac15209365fe2b4d9ede327fa30784eb2d8a533 --- /dev/null +++ b/openchat/openchat-3.5-0106-gemma/result_2024-05-17 21:56:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19112627986348124, + "acc_stderr": 0.011490055292778592, + "acc_norm": 0.257679180887372, + "acc_norm_stderr": 0.012780770562768402 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2575184226249751, + "acc_stderr": 0.004363736410689624, + "acc_norm": 0.26618203545110536, + "acc_norm_stderr": 0.004410573431837638 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.035282112582452306, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.035282112582452306 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.19879518072289157, + "acc_stderr": 0.03106939026078942, + "acc_norm": 0.19879518072289157, + "acc_norm_stderr": 0.03106939026078942 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.0298575156733864, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.0298575156733864 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.026265024608275882, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.026265024608275882 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24838709677419354, + "acc_stderr": 0.02458002892148101, + "acc_norm": 0.24838709677419354, + "acc_norm_stderr": 0.02458002892148101 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891356, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891356 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072775, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.023000086859068642, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.023000086859068642 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080342, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080342 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.03623089915724149, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.03623089915724149 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.02517104191530968, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.02517104191530968 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22752293577981653, + "acc_stderr": 0.0179744635787765, + "acc_norm": 0.22752293577981653, + "acc_norm_stderr": 0.0179744635787765 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.024848018263875195, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.024848018263875195 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909281, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909281 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.03690677986137282, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.03690677986137282 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26633986928104575, + "acc_stderr": 0.017883188134667192, + "acc_norm": 0.26633986928104575, + "acc_norm_stderr": 0.017883188134667192 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590638, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590638 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.040598672469526885, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.040598672469526885 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2037037037037037, + "acc_stderr": 0.027467401804057993, + "acc_norm": 0.2037037037037037, + "acc_norm_stderr": 0.027467401804057993 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16911764705882354, + "acc_stderr": 0.022770868010113007, + "acc_norm": 0.16911764705882354, + "acc_norm_stderr": 0.022770868010113007 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.027682979522960224, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.027682979522960224 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26792698826597133, + "acc_stderr": 0.011311347690633872, + "acc_norm": 0.26792698826597133, + "acc_norm_stderr": 0.011311347690633872 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557994, + "mc2": 0.5031791953233768, + "mc2_stderr": 0.01728815166743411 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.12987012987012986, + "acc_stderr": 0.01155743593276671, + "acc_norm": 0.2680047225501771, + "acc_norm_stderr": 0.01522790579633515 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "openchat/openchat-3.5-0106-gemma", + "model_sha": "5316738a9049ab32e3b9a49eb0e9dfb49de8a725", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/openchat/openchat-3.5-0106/result_2024-03-20 16:36:28.json b/openchat/openchat-3.5-0106/result_2024-03-20 16:36:28.json new file mode 100644 index 0000000000000000000000000000000000000000..62f810da2b0f7b2425bf503d3f1c052f43b41371 --- /dev/null +++ b/openchat/openchat-3.5-0106/result_2024-03-20 16:36:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33361774744027306, + "acc_stderr": 0.013778687054176538, + "acc_norm": 0.3984641638225256, + "acc_norm_stderr": 0.014306946052735565 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36885082652858, + "acc_stderr": 0.0048150733340006, + "acc_norm": 0.47082254530969925, + "acc_norm_stderr": 0.004981278326428021 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.048979577377811674, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.048979577377811674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47381864623243936, + "acc_stderr": 0.017855434554041975, + "acc_norm": 0.47381864623243936, + "acc_norm_stderr": 0.017855434554041975 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.028256660723360184, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.028256660723360184 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.041443118108781506, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.041443118108781506 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.02531063925493392, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.02531063925493392 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.02860595370200425, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.02860595370200425 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961827, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961827 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.029723278961476664, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.029723278961476664 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960719, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960719 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.02687408588351835, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.02687408588351835 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5137614678899083, + "acc_stderr": 0.02142920208987408, + "acc_norm": 0.5137614678899083, + "acc_norm_stderr": 0.02142920208987408 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.01972205893961806, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.01972205893961806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.0338517797604481, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.0338517797604481 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925315, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925315 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687578, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.011977676704715997, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.011977676704715997 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.039042723414318574, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.039042723414318574 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3317013463892289, + "mc1_stderr": 0.01648214881024147, + "mc2": 0.5125185634419395, + "mc2_stderr": 0.01588857483046443 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44510035419126326, + "acc_stderr": 0.017086417431005467, + "acc_norm": 0.47461629279811096, + "acc_norm_stderr": 0.017168187201429253 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "openchat/openchat-3.5-0106", + "model_sha": "9619fb7d2a8e25fa6b0633c0f57f7f4aa79b45c4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/openchat/openchat-3.6-8b-20240522/result_2024-05-28 05:29:35.json b/openchat/openchat-3.6-8b-20240522/result_2024-05-28 05:29:35.json new file mode 100644 index 0000000000000000000000000000000000000000..2c737daf974ed627ae3f84afde0e3d73a3e71e1b --- /dev/null +++ b/openchat/openchat-3.6-8b-20240522/result_2024-05-28 05:29:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3779863481228669, + "acc_stderr": 0.014169664520303094, + "acc_norm": 0.41638225255972694, + "acc_norm_stderr": 0.014405618279436174 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37751443935471024, + "acc_stderr": 0.004837744647345718, + "acc_norm": 0.48894642501493724, + "acc_norm_stderr": 0.004988561944277394 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6081871345029239, + "acc_stderr": 0.03743979825926399, + "acc_norm": 0.6081871345029239, + "acc_norm_stderr": 0.03743979825926399 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.017797751493865633, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.017797751493865633 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5106382978723404, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.5106382978723404, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564584, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564584 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.047551296160629475, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.047551296160629475 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075657, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075657 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972743, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972743 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251976, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251976 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41798941798941797, + "acc_stderr": 0.02540255550326091, + "acc_norm": 0.41798941798941797, + "acc_norm_stderr": 0.02540255550326091 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.040894654493255835, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.040894654493255835 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5308641975308642, + "acc_stderr": 0.027767689606833925, + "acc_norm": 0.5308641975308642, + "acc_norm_stderr": 0.027767689606833925 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.581651376146789, + "acc_stderr": 0.02114954859644388, + "acc_norm": 0.581651376146789, + "acc_norm_stderr": 0.02114954859644388 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.545751633986928, + "acc_stderr": 0.028509807802626592, + "acc_norm": 0.545751633986928, + "acc_norm_stderr": 0.028509807802626592 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521664, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521664 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.04065771002562605, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.04065771002562605 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.019861155193829177, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.019861155193829177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963775, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963775 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5, + "acc_stderr": 0.04745789978762494, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04745789978762494 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31508379888268156, + "acc_stderr": 0.015536850852473642, + "acc_norm": 0.31508379888268156, + "acc_norm_stderr": 0.015536850852473642 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556165, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702365, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702365 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3767926988265971, + "acc_stderr": 0.012376459593894397, + "acc_norm": 0.3767926988265971, + "acc_norm_stderr": 0.012376459593894397 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.034711579079534274, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.034711579079534274 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03756335775187896, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03756335775187896 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.01611412415688247, + "mc2": 0.4798411861338715, + "mc2_stderr": 0.01570597153109691 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4769775678866588, + "acc_stderr": 0.017172121546727634, + "acc_norm": 0.5112160566706021, + "acc_norm_stderr": 0.017186028469489287 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "openchat/openchat-3.6-8b-20240522", + "model_sha": "2264eb98558978f708e88ae52afb78e43b832801", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/openchat/openchat_3.5/result_2023-11-06 11:21:55.json b/openchat/openchat_3.5/result_2023-11-06 11:21:55.json new file mode 100644 index 0000000000000000000000000000000000000000..5d1948b9f03de6aae42b4a4f892b8b0947aa7c80 --- /dev/null +++ b/openchat/openchat_3.5/result_2023-11-06 11:21:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3250853242320819, + "acc_stderr": 0.013688147309729124, + "acc_norm": 0.37372013651877134, + "acc_norm_stderr": 0.014137708601759084 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36885082652858, + "acc_stderr": 0.0048150733340006, + "acc_norm": 0.47161919936267677, + "acc_norm_stderr": 0.0049817366895187455 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107675, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107675 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46360153256704983, + "acc_stderr": 0.017832524079593265, + "acc_norm": 0.46360153256704983, + "acc_norm_stderr": 0.017832524079593265 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.041539484047424, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.041539484047424 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.031967586978353627, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.031967586978353627 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.028173917761762875, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.028173917761762875 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923325, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923325 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.035212249088415845, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.035212249088415845 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.02528558599001783, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.02528558599001783 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998576, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998576 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43548387096774194, + "acc_stderr": 0.028206225591502737, + "acc_norm": 0.43548387096774194, + "acc_norm_stderr": 0.028206225591502737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123937, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123937 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028428, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028428 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5302752293577981, + "acc_stderr": 0.021397988604936965, + "acc_norm": 0.5302752293577981, + "acc_norm_stderr": 0.021397988604936965 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412243, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412243 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160834, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160834 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.0317229500433233, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.0317229500433233 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.011989936640666533, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.011989936640666533 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431855, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431855 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.47927145420911593, + "mc2_stderr": 0.01578696956369576 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43211334120425027, + "acc_stderr": 0.017031170198851746, + "acc_norm": 0.4510035419126328, + "acc_norm_stderr": 0.01710761885954935 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "openchat/openchat_3.5", + "model_sha": "0be788e53032214fe8c05d34682a2bbab6ba6580", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/openlynn/Llama-3-Soliloquy-8B/result_2024-04-23 09:05:10.json b/openlynn/Llama-3-Soliloquy-8B/result_2024-04-23 09:05:10.json new file mode 100644 index 0000000000000000000000000000000000000000..fde046fe12c668801f8d4804f01f07c3223efb21 --- /dev/null +++ b/openlynn/Llama-3-Soliloquy-8B/result_2024-04-23 09:05:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3370307167235495, + "acc_stderr": 0.013813476652902276, + "acc_norm": 0.38310580204778155, + "acc_norm_stderr": 0.01420647266167288 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3450507866958773, + "acc_stderr": 0.004744132825391516, + "acc_norm": 0.4342760406293567, + "acc_norm_stderr": 0.004946485466544624 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393163, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393163 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.38697318007662834, + "acc_stderr": 0.017417138059440153, + "acc_norm": 0.38697318007662834, + "acc_norm_stderr": 0.017417138059440153 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534432, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534432 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.38263665594855306, + "acc_stderr": 0.027604689028582, + "acc_norm": 0.38263665594855306, + "acc_norm_stderr": 0.027604689028582 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.43434343434343436, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.43434343434343436, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.04093793981266237, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.04093793981266237 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102308, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102308 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3399014778325123, + "acc_stderr": 0.03332769068410789, + "acc_norm": 0.3399014778325123, + "acc_norm_stderr": 0.03332769068410789 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3967741935483871, + "acc_stderr": 0.027831231605767944, + "acc_norm": 0.3967741935483871, + "acc_norm_stderr": 0.027831231605767944 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.48756218905472637, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.48756218905472637, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028414, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028414 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03942082639927213, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03942082639927213 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.02680372058320619, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.02680372058320619 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4012345679012346, + "acc_stderr": 0.027272582849839792, + "acc_norm": 0.4012345679012346, + "acc_norm_stderr": 0.027272582849839792 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37305699481865284, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.37305699481865284, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41284403669724773, + "acc_stderr": 0.0211091281334139, + "acc_norm": 0.41284403669724773, + "acc_norm_stderr": 0.0211091281334139 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.028245134024387292, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.028245134024387292 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.019184639328092487, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.019184639328092487 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.03085199299325701, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.03085199299325701 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.288268156424581, + "acc_stderr": 0.015149132860209422, + "acc_norm": 0.288268156424581, + "acc_norm_stderr": 0.015149132860209422 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29044117647058826, + "acc_stderr": 0.02757646862274052, + "acc_norm": 0.29044117647058826, + "acc_norm_stderr": 0.02757646862274052 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4448979591836735, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.4448979591836735, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4810126582278481, + "acc_stderr": 0.03252375148090448, + "acc_norm": 0.4810126582278481, + "acc_norm_stderr": 0.03252375148090448 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2985658409387223, + "acc_stderr": 0.011688060141794228, + "acc_norm": 0.2985658409387223, + "acc_norm_stderr": 0.011688060141794228 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3382352941176471, + "acc_stderr": 0.033205746129454324, + "acc_norm": 0.3382352941176471, + "acc_norm_stderr": 0.033205746129454324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4610042851178602, + "mc2_stderr": 0.015721481482296386 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2975206611570248, + "acc_stderr": 0.015717742205089924, + "acc_norm": 0.37662337662337664, + "acc_norm_stderr": 0.016658799874051965 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "openlynn/Llama-3-Soliloquy-8B", + "model_sha": "fbe4409da81a2faa3e295448bd75e54a2f1cfe12", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/papercat404/llama-test2-fp16/result_2024-03-19 10:21:04.json b/papercat404/llama-test2-fp16/result_2024-03-19 10:21:04.json new file mode 100644 index 0000000000000000000000000000000000000000..ee3f0fa2fc30e0c096b46e97999ea5941166224c --- /dev/null +++ b/papercat404/llama-test2-fp16/result_2024-03-19 10:21:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19965870307167236, + "acc_stderr": 0.011681625756888676, + "acc_norm": 0.24488054607508533, + "acc_norm_stderr": 0.012566273985131356 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2502489543915555, + "acc_stderr": 0.004322710911026375, + "acc_norm": 0.2408882692690699, + "acc_norm_stderr": 0.004267486869180763 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.18971061093247588, + "acc_stderr": 0.022268196258783218, + "acc_norm": 0.18971061093247588, + "acc_norm_stderr": 0.022268196258783218 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.0154610276272536, + "mc2": 0.5043596147707021, + "mc2_stderr": 0.017295954976670278 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08618654073199528, + "acc_stderr": 0.009648578143920889, + "acc_norm": 0.29634002361275086, + "acc_norm_stderr": 0.015699701628594236 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "papercat404/llama-test2-fp16", + "model_sha": "8fd1c4c573a60ff1bcb15cc478630110bcc6b5fd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/papercat404/mergecat_v0.1/result_2024-03-26 05:06:05.json b/papercat404/mergecat_v0.1/result_2024-03-26 05:06:05.json new file mode 100644 index 0000000000000000000000000000000000000000..6a9f502022f095da612bfc4aa5894ad744cbdc54 --- /dev/null +++ b/papercat404/mergecat_v0.1/result_2024-03-26 05:06:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6979522184300341, + "acc_stderr": 0.013417519144716413, + "acc_norm": 0.7406143344709898, + "acc_norm_stderr": 0.012808273573927099 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5685122485560645, + "acc_stderr": 0.004942716091996096, + "acc_norm": 0.7218681537542322, + "acc_norm_stderr": 0.004471629546895082 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7309941520467836, + "acc_stderr": 0.03401052620104089, + "acc_norm": 0.7309941520467836, + "acc_norm_stderr": 0.03401052620104089 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7037037037037037, + "acc_stderr": 0.016328814422102052, + "acc_norm": 0.7037037037037037, + "acc_norm_stderr": 0.016328814422102052 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5234042553191489, + "acc_stderr": 0.0326501947503358, + "acc_norm": 0.5234042553191489, + "acc_norm_stderr": 0.0326501947503358 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6559485530546624, + "acc_stderr": 0.026981478043648043, + "acc_norm": 0.6559485530546624, + "acc_norm_stderr": 0.026981478043648043 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6367713004484304, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.6367713004484304, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.797979797979798, + "acc_stderr": 0.028606204289229876, + "acc_norm": 0.797979797979798, + "acc_norm_stderr": 0.028606204289229876 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6384615384615384, + "acc_stderr": 0.02435958146539701, + "acc_norm": 0.6384615384615384, + "acc_norm_stderr": 0.02435958146539701 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.035158955511656986, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.035158955511656986 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.635483870967742, + "acc_stderr": 0.02737987122994325, + "acc_norm": 0.635483870967742, + "acc_norm_stderr": 0.02737987122994325 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8333333333333334, + "acc_stderr": 0.024414947304543688, + "acc_norm": 0.8333333333333334, + "acc_norm_stderr": 0.024414947304543688 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.030437794342983052, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.030437794342983052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.046313813194254656, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.046313813194254656 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630882, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630882 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.030965903123573037, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.030965903123573037 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4656084656084656, + "acc_stderr": 0.025690321762493855, + "acc_norm": 0.4656084656084656, + "acc_norm_stderr": 0.025690321762493855 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6527777777777778, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.6527777777777778, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932263, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932263 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.02626167760780665, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.02626167760780665 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6012269938650306, + "acc_stderr": 0.03847021420456024, + "acc_norm": 0.6012269938650306, + "acc_norm_stderr": 0.03847021420456024 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6728395061728395, + "acc_stderr": 0.026105673861409825, + "acc_norm": 0.6728395061728395, + "acc_norm_stderr": 0.026105673861409825 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7504587155963303, + "acc_stderr": 0.018553897629501617, + "acc_norm": 0.7504587155963303, + "acc_norm_stderr": 0.018553897629501617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6339869281045751, + "acc_stderr": 0.02758281141515961, + "acc_norm": 0.6339869281045751, + "acc_norm_stderr": 0.02758281141515961 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6644736842105263, + "acc_stderr": 0.03842498559395268, + "acc_norm": 0.6644736842105263, + "acc_norm_stderr": 0.03842498559395268 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.019944914136873586, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.019944914136873586 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3971631205673759, + "acc_stderr": 0.029189805673587102, + "acc_norm": 0.3971631205673759, + "acc_norm_stderr": 0.029189805673587102 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.41675977653631285, + "acc_stderr": 0.01648913496243895, + "acc_norm": 0.41675977653631285, + "acc_norm_stderr": 0.01648913496243895 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5514705882352942, + "acc_stderr": 0.030211479609121596, + "acc_norm": 0.5514705882352942, + "acc_norm_stderr": 0.030211479609121596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6816326530612244, + "acc_stderr": 0.02982253379398204, + "acc_norm": 0.6816326530612244, + "acc_norm_stderr": 0.02982253379398204 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.02782078198114968, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.02782078198114968 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44589308996088656, + "acc_stderr": 0.012695244711379781, + "acc_norm": 0.44589308996088656, + "acc_norm_stderr": 0.012695244711379781 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.0332057461294543, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.0332057461294543 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6988984088127295, + "mc1_stderr": 0.0160589990261006, + "mc2": 0.7955101991194582, + "mc2_stderr": 0.013322424943357492 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.538370720188902, + "acc_stderr": 0.01713966022184556, + "acc_norm": 0.5560802833530106, + "acc_norm_stderr": 0.017081884623542543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "papercat404/mergecat_v0.1", + "model_sha": "762a4e8f02b06df39c16eef075aa3e8a835da00e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/papercat404/testcat_v0.2/result_2024-03-26 02:22:02.json b/papercat404/testcat_v0.2/result_2024-03-26 02:22:02.json new file mode 100644 index 0000000000000000000000000000000000000000..41de0b31cc7a9dac6e3c3118e31cf62ac3e4269c --- /dev/null +++ b/papercat404/testcat_v0.2/result_2024-03-26 02:22:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.17918088737201365, + "acc_stderr": 0.011207045216615672, + "acc_norm": 0.22866894197952217, + "acc_norm_stderr": 0.0122728535825408 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2515435172276439, + "acc_stderr": 0.0043301342197628444, + "acc_norm": 0.24785899223262298, + "acc_norm_stderr": 0.004308870978210404 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.17543859649122806, + "acc_stderr": 0.029170885500727665, + "acc_norm": 0.17543859649122806, + "acc_norm_stderr": 0.029170885500727665 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3786407766990291, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.3786407766990291, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20434227330779056, + "acc_stderr": 0.014419123980931904, + "acc_norm": 0.20434227330779056, + "acc_norm_stderr": 0.014419123980931904 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.026556982117838746, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.026556982117838746 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.1927710843373494, + "acc_stderr": 0.030709824050565264, + "acc_norm": 0.1927710843373494, + "acc_norm_stderr": 0.030709824050565264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.10762331838565023, + "acc_stderr": 0.020799400082880004, + "acc_norm": 0.10762331838565023, + "acc_norm_stderr": 0.020799400082880004 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768361, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768361 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.048108401480826346, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.048108401480826346 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2981132075471698, + "acc_stderr": 0.028152837942493864, + "acc_norm": 0.2981132075471698, + "acc_norm_stderr": 0.028152837942493864 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577656, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577656 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757173, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757173 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3486238532110092, + "acc_stderr": 0.020431254090714328, + "acc_norm": 0.3486238532110092, + "acc_norm_stderr": 0.020431254090714328 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.026090162504279053, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.026090162504279053 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.14049586776859505, + "acc_stderr": 0.031722334260021585, + "acc_norm": 0.14049586776859505, + "acc_norm_stderr": 0.031722334260021585 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24113475177304963, + "acc_stderr": 0.02551873104953776, + "acc_norm": 0.24113475177304963, + "acc_norm_stderr": 0.02551873104953776 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20253164556962025, + "acc_stderr": 0.026160568246601464, + "acc_norm": 0.20253164556962025, + "acc_norm_stderr": 0.026160568246601464 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24445893089960888, + "acc_stderr": 0.010976425013113907, + "acc_norm": 0.24445893089960888, + "acc_norm_stderr": 0.010976425013113907 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.46715544333136166, + "mc2_stderr": 0.016983007105976163 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.07674144037780402, + "acc_stderr": 0.009151482698827049, + "acc_norm": 0.2762691853600944, + "acc_norm_stderr": 0.015373387500464474 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "papercat404/testcat_v0.2", + "model_sha": "27295e455bbcf045131e8b7fb471b3f64748a081", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/papercat404/testcat_v0.4/result_2024-04-09 07:40:23.json b/papercat404/testcat_v0.4/result_2024-04-09 07:40:23.json new file mode 100644 index 0000000000000000000000000000000000000000..e3ba8b777541d4542fe213c30ecd3b5b269522bd --- /dev/null +++ b/papercat404/testcat_v0.4/result_2024-04-09 07:40:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6885665529010239, + "acc_stderr": 0.01353247209985094, + "acc_norm": 0.7320819112627986, + "acc_norm_stderr": 0.012942030195136432 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5089623580959968, + "acc_stderr": 0.004988979750014422, + "acc_norm": 0.6719776936865166, + "acc_norm_stderr": 0.004685334844038641 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7192982456140351, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.7192982456140351, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7475728155339806, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.7475728155339806, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7049808429118773, + "acc_stderr": 0.016308363772932717, + "acc_norm": 0.7049808429118773, + "acc_norm_stderr": 0.016308363772932717 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5319148936170213, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.5319148936170213, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.03891364495835821, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.03891364495835821 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6430868167202572, + "acc_stderr": 0.027210420375934023, + "acc_norm": 0.6430868167202572, + "acc_norm_stderr": 0.027210420375934023 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6502242152466368, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.6502242152466368, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6106870229007634, + "acc_stderr": 0.04276486542814591, + "acc_norm": 0.6106870229007634, + "acc_norm_stderr": 0.04276486542814591 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7525252525252525, + "acc_stderr": 0.03074630074212451, + "acc_norm": 0.7525252525252525, + "acc_norm_stderr": 0.03074630074212451 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.03142946637883708, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.03142946637883708 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6153846153846154, + "acc_stderr": 0.0246667449151872, + "acc_norm": 0.6153846153846154, + "acc_norm_stderr": 0.0246667449151872 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6290322580645161, + "acc_stderr": 0.027480541887953593, + "acc_norm": 0.6290322580645161, + "acc_norm_stderr": 0.027480541887953593 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8333333333333334, + "acc_stderr": 0.02441494730454368, + "acc_norm": 0.8333333333333334, + "acc_norm_stderr": 0.02441494730454368 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5962264150943396, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.5962264150943396, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.04582004841505417, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.04582004841505417 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.029723278961476664, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.029723278961476664 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.037940126746970296, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.037940126746970296 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43915343915343913, + "acc_stderr": 0.025559920550531013, + "acc_norm": 0.43915343915343913, + "acc_norm_stderr": 0.025559920550531013 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5902777777777778, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.5902777777777778, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.02626167760780665, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.02626167760780665 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6257668711656442, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.6257668711656442, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.026229649178821163, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.026229649178821163 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7668393782383419, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.7668393782383419, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7394495412844037, + "acc_stderr": 0.018819182034850068, + "acc_norm": 0.7394495412844037, + "acc_norm_stderr": 0.018819182034850068 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.044631127206771704, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.044631127206771704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6209150326797386, + "acc_stderr": 0.027780141207023344, + "acc_norm": 0.6209150326797386, + "acc_norm_stderr": 0.027780141207023344 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7355371900826446, + "acc_stderr": 0.04026187527591207, + "acc_norm": 0.7355371900826446, + "acc_norm_stderr": 0.04026187527591207 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6644736842105263, + "acc_stderr": 0.038424985593952674, + "acc_norm": 0.6644736842105263, + "acc_norm_stderr": 0.038424985593952674 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5866013071895425, + "acc_stderr": 0.019922115682786682, + "acc_norm": 0.5866013071895425, + "acc_norm_stderr": 0.019922115682786682 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4148936170212766, + "acc_stderr": 0.029392236584612503, + "acc_norm": 0.4148936170212766, + "acc_norm_stderr": 0.029392236584612503 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3474860335195531, + "acc_stderr": 0.01592556406020815, + "acc_norm": 0.3474860335195531, + "acc_norm_stderr": 0.01592556406020815 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.030306257722468307, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.030306257722468307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6775510204081633, + "acc_stderr": 0.029923100563683913, + "acc_norm": 0.6775510204081633, + "acc_norm_stderr": 0.029923100563683913 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7383966244725738, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.7383966244725738, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4439374185136897, + "acc_stderr": 0.012689708167787679, + "acc_norm": 0.4439374185136897, + "acc_norm_stderr": 0.012689708167787679 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6462668298653611, + "mc1_stderr": 0.016737814358846147, + "mc2": 0.7500723202447881, + "mc2_stderr": 0.013964568376760541 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5088547815820543, + "acc_stderr": 0.01718765819933674, + "acc_norm": 0.5336481700118064, + "acc_norm_stderr": 0.01715138411713187 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "papercat404/testcat_v0.4", + "model_sha": "befe71e2a8e747684bb5dd33ad18c63738dd1f5f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/peterkang/mymodel_v1/result_2024-01-03 12:04:54.json b/peterkang/mymodel_v1/result_2024-01-03 12:04:54.json new file mode 100644 index 0000000000000000000000000000000000000000..15c27c86310b212f8448f8f6a20837a6607935d5 --- /dev/null +++ b/peterkang/mymodel_v1/result_2024-01-03 12:04:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1945392491467577, + "acc_stderr": 0.011567709174648728, + "acc_norm": 0.23720136518771331, + "acc_norm_stderr": 0.012430399829260847 + }, + "harness|ko_hellaswag|10": { + "acc": 0.270264887472615, + "acc_stderr": 0.004431889783633818, + "acc_norm": 0.3028281218880701, + "acc_norm_stderr": 0.004585424513012111 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.034240429246915845, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.034240429246915845 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2771392081736909, + "acc_stderr": 0.016005636294122414, + "acc_norm": 0.2771392081736909, + "acc_norm_stderr": 0.016005636294122414 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.24680851063829787, + "acc_stderr": 0.028185441301234095, + "acc_norm": 0.24680851063829787, + "acc_norm_stderr": 0.028185441301234095 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2861736334405145, + "acc_stderr": 0.025670259242188936, + "acc_norm": 0.2861736334405145, + "acc_norm_stderr": 0.025670259242188936 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.23318385650224216, + "acc_stderr": 0.028380391147094737, + "acc_norm": 0.23318385650224216, + "acc_norm_stderr": 0.028380391147094737 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365904, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365904 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.02665353159671549, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.02665353159671549 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.021763733684173926, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.021763733684173926 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.025284416114900152, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.025284416114900152 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.02528839450289136, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.02528839450289136 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.040693063197213775, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.040693063197213775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.031871875379197966, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.031871875379197966 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.03214737302029469, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.03214737302029469 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.023618678310069356, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.023618678310069356 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.02977866303775296, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.02977866303775296 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22385321100917432, + "acc_stderr": 0.017871217767790222, + "acc_norm": 0.22385321100917432, + "acc_norm_stderr": 0.017871217767790222 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03718489006818115, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03718489006818115 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.037610708698674805, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.037610708698674805 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.01728276069516741, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.01728276069516741 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590627, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590627 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.032847388576472056, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.032847388576472056 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767864, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767864 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174937, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174937 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658335, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.242503259452412, + "acc_stderr": 0.010946570966348783, + "acc_norm": 0.242503259452412, + "acc_norm_stderr": 0.010946570966348783 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.01484306150773162, + "mc2": 0.39729765622115015, + "mc2_stderr": 0.016675198241453474 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.014846044968252252, + "acc_norm": 0.27744982290436837, + "acc_norm_stderr": 0.015393630236605973 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "peterkang/mymodel_v1", + "model_sha": "ce55e1c93375ef5cb4085651c05d7dee04a6f95b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/peterkang/mymodel_v2/result_2024-01-03 16:53:19.json b/peterkang/mymodel_v2/result_2024-01-03 16:53:19.json new file mode 100644 index 0000000000000000000000000000000000000000..d3f97290a236f98dfe9fb28c8adcae29af015139 --- /dev/null +++ b/peterkang/mymodel_v2/result_2024-01-03 16:53:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1945392491467577, + "acc_stderr": 0.011567709174648728, + "acc_norm": 0.23720136518771331, + "acc_norm_stderr": 0.012430399829260847 + }, + "harness|ko_hellaswag|10": { + "acc": 0.270264887472615, + "acc_stderr": 0.004431889783633818, + "acc_norm": 0.3028281218880701, + "acc_norm_stderr": 0.004585424513012111 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.034240429246915845, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.034240429246915845 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.21359223300970873, + "acc_stderr": 0.04058042015646034, + "acc_norm": 0.21359223300970873, + "acc_norm_stderr": 0.04058042015646034 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2771392081736909, + "acc_stderr": 0.016005636294122414, + "acc_norm": 0.2771392081736909, + "acc_norm_stderr": 0.016005636294122414 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.24680851063829787, + "acc_stderr": 0.028185441301234095, + "acc_norm": 0.24680851063829787, + "acc_norm_stderr": 0.028185441301234095 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2861736334405145, + "acc_stderr": 0.025670259242188936, + "acc_norm": 0.2861736334405145, + "acc_norm_stderr": 0.025670259242188936 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.23318385650224216, + "acc_stderr": 0.028380391147094737, + "acc_norm": 0.23318385650224216, + "acc_norm_stderr": 0.028380391147094737 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365904, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365904 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.038783523721386236, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.038783523721386236 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.02665353159671549, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.02665353159671549 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.021763733684173926, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.021763733684173926 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2709677419354839, + "acc_stderr": 0.025284416114900152, + "acc_norm": 0.2709677419354839, + "acc_norm_stderr": 0.025284416114900152 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.02528839450289136, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.02528839450289136 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.040693063197213775, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.040693063197213775 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085622, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085622 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.031871875379197966, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.031871875379197966 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.03214737302029469, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.03214737302029469 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.023618678310069356, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.023618678310069356 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.02977866303775296, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.02977866303775296 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22385321100917432, + "acc_stderr": 0.017871217767790222, + "acc_norm": 0.22385321100917432, + "acc_norm_stderr": 0.017871217767790222 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03718489006818115, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03718489006818115 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.037610708698674805, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.037610708698674805 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.01728276069516741, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.01728276069516741 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590627, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590627 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.032847388576472056, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.032847388576472056 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767864, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767864 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24489795918367346, + "acc_stderr": 0.027529637440174937, + "acc_norm": 0.24489795918367346, + "acc_norm_stderr": 0.027529637440174937 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658335, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.242503259452412, + "acc_stderr": 0.010946570966348783, + "acc_norm": 0.242503259452412, + "acc_norm_stderr": 0.010946570966348783 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.01484306150773162, + "mc2": 0.39729765622115015, + "mc2_stderr": 0.016675198241453474 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.24793388429752067, + "acc_stderr": 0.014846044968252252, + "acc_norm": 0.27744982290436837, + "acc_norm_stderr": 0.015393630236605973 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "peterkang/mymodel_v2", + "model_sha": "668e1f1f8bc60060310ff60e38132e7b4308579b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/peterkang/mymodel_v3/result_2024-01-03 18:51:00.json b/peterkang/mymodel_v3/result_2024-01-03 18:51:00.json new file mode 100644 index 0000000000000000000000000000000000000000..284a530c1ed8ceed4dcdc3cfe4e0ff756d75c51b --- /dev/null +++ b/peterkang/mymodel_v3/result_2024-01-03 18:51:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3848122866894198, + "acc_stderr": 0.014218371065251102, + "acc_norm": 0.4129692832764505, + "acc_norm_stderr": 0.014388344935398324 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40679147580163316, + "acc_stderr": 0.0049023140557256, + "acc_norm": 0.4896434973112926, + "acc_norm_stderr": 0.004988710917169328 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.03833185275213025, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.03833185275213025 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4367816091954023, + "acc_stderr": 0.017736470837800684, + "acc_norm": 0.4367816091954023, + "acc_norm_stderr": 0.017736470837800684 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785137, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785137 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357794, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357794 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.038743715565879536, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.038743715565879536 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3762057877813505, + "acc_stderr": 0.027513925683549427, + "acc_norm": 0.3762057877813505, + "acc_norm_stderr": 0.027513925683549427 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4494949494949495, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.4494949494949495, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307808, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307808 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236153, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236153 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.32019704433497537, + "acc_stderr": 0.032826493853041504, + "acc_norm": 0.32019704433497537, + "acc_norm_stderr": 0.032826493853041504 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3258064516129032, + "acc_stderr": 0.026662010578567104, + "acc_norm": 0.3258064516129032, + "acc_norm_stderr": 0.026662010578567104 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3886792452830189, + "acc_stderr": 0.030000485448675986, + "acc_norm": 0.3886792452830189, + "acc_norm_stderr": 0.030000485448675986 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670238, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670238 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673281, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673281 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247077, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247077 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.02432631052914915, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.02432631052914915 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03942082639927213, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03942082639927213 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3619631901840491, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.3619631901840491, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4012345679012346, + "acc_stderr": 0.027272582849839796, + "acc_norm": 0.4012345679012346, + "acc_norm_stderr": 0.027272582849839796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3963302752293578, + "acc_stderr": 0.020971469947900525, + "acc_norm": 0.3963302752293578, + "acc_norm_stderr": 0.020971469947900525 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.028304576673141107, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.028304576673141107 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4628099173553719, + "acc_stderr": 0.04551711196104218, + "acc_norm": 0.4628099173553719, + "acc_norm_stderr": 0.04551711196104218 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.03842498559395268, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.03842498559395268 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35130718954248363, + "acc_stderr": 0.01931267606578657, + "acc_norm": 0.35130718954248363, + "acc_norm_stderr": 0.01931267606578657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966727, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966727 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828979, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828979 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.17988826815642459, + "acc_stderr": 0.012846037138924089, + "acc_norm": 0.17988826815642459, + "acc_norm_stderr": 0.012846037138924089 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.028064998167040094, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.028064998167040094 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5274261603375527, + "acc_stderr": 0.03249822718301303, + "acc_norm": 0.5274261603375527, + "acc_norm_stderr": 0.03249822718301303 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3122555410691004, + "acc_stderr": 0.011835798135683192, + "acc_norm": 0.3122555410691004, + "acc_norm_stderr": 0.011835798135683192 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.03296245110172229, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.03296245110172229 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.03793713171165635, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.03793713171165635 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.4625169191060848, + "mc2_stderr": 0.017016551324773616 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33530106257378983, + "acc_stderr": 0.016230981232989813, + "acc_norm": 0.35064935064935066, + "acc_norm_stderr": 0.0164055569038933 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "peterkang/mymodel_v3", + "model_sha": "ed008e27c111343795f14c113f4642e2a6a2ea31", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/peterkang/mymodel_v4/result_2024-01-04 06:32:15.json b/peterkang/mymodel_v4/result_2024-01-04 06:32:15.json new file mode 100644 index 0000000000000000000000000000000000000000..094758e0eeb629fff7426d5e7dba7b06ca26ca89 --- /dev/null +++ b/peterkang/mymodel_v4/result_2024-01-04 06:32:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35409556313993173, + "acc_stderr": 0.013975454122756557, + "acc_norm": 0.3796928327645051, + "acc_norm_stderr": 0.014182119866974872 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4148575980880303, + "acc_stderr": 0.004916905095810843, + "acc_norm": 0.4870543716391157, + "acc_norm_stderr": 0.00498810866317976 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46360153256704983, + "acc_stderr": 0.017832524079593265, + "acc_norm": 0.46360153256704983, + "acc_norm_stderr": 0.017832524079593265 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.028290869054197604, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.028290869054197604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.043482080516448585, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.043482080516448585 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.02527589207024063, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.02527589207024063 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674078, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674078 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.36981132075471695, + "acc_stderr": 0.02971142188010793, + "acc_norm": 0.36981132075471695, + "acc_norm_stderr": 0.02971142188010793 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794914, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794914 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948496, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.03445406271987054, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.03445406271987054 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342665, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539274, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.021364122533881695, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.021364122533881695 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.019659922493623336, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.019659922493623336 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.029049190342543458, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.029049190342543458 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30614525139664805, + "acc_stderr": 0.015414494487903198, + "acc_norm": 0.30614525139664805, + "acc_norm_stderr": 0.015414494487903198 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398864, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398864 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3122555410691004, + "acc_stderr": 0.011835798135683176, + "acc_norm": 0.3122555410691004, + "acc_norm_stderr": 0.011835798135683176 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.033933885849584046, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.033933885849584046 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31456548347613217, + "mc1_stderr": 0.016255241993179185, + "mc2": 0.45541236453172873, + "mc2_stderr": 0.017058931921492763 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.35064935064935066, + "acc_stderr": 0.016405556903893302, + "acc_norm": 0.3600944510035419, + "acc_norm_stderr": 0.016503686720440076 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "peterkang/mymodel_v4", + "model_sha": "c0a557951cdc971ebc44eaf8f93dd5995ca5fec4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/peterkang/mymodel_v5/result_2024-01-04 07:03:49.json b/peterkang/mymodel_v5/result_2024-01-04 07:03:49.json new file mode 100644 index 0000000000000000000000000000000000000000..ce631e2324c34f1faf6ddc42a6781cd1718f3001 --- /dev/null +++ b/peterkang/mymodel_v5/result_2024-01-04 07:03:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31399317406143346, + "acc_stderr": 0.013562691224726291, + "acc_norm": 0.34812286689419797, + "acc_norm_stderr": 0.013921008595179337 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35749850627365065, + "acc_stderr": 0.00478283835222253, + "acc_norm": 0.4448317068313085, + "acc_norm_stderr": 0.00495931519801116 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44699872286079184, + "acc_stderr": 0.01777922523339421, + "acc_norm": 0.44699872286079184, + "acc_norm_stderr": 0.01777922523339421 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.40192926045016075, + "acc_stderr": 0.02784647600593048, + "acc_norm": 0.40192926045016075, + "acc_norm_stderr": 0.02784647600593048 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.033272833702713445, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.033272833702713445 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4393939393939394, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.4393939393939394, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4, + "acc_stderr": 0.024838811988033158, + "acc_norm": 0.4, + "acc_norm_stderr": 0.024838811988033158 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.027709359675032488, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.027709359675032488 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.030242233800854505, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.030242233800854505 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.03583901754736412, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.03583901754736412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307695, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307695 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145634, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145634 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303128, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303128 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.38650306748466257, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.38650306748466257, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.02700252103451648, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.02700252103451648 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.43523316062176165, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.43523316062176165, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41284403669724773, + "acc_stderr": 0.021109128133413903, + "acc_norm": 0.41284403669724773, + "acc_norm_stderr": 0.021109128133413903 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225882, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225882 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.044313245019684304, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.044313245019684304 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3709150326797386, + "acc_stderr": 0.01954210156485412, + "acc_norm": 0.3709150326797386, + "acc_norm_stderr": 0.01954210156485412 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.02866382014719949, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.02866382014719949 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010212, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010212 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802749, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.015024083883322884, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.015024083883322884 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2757352941176471, + "acc_stderr": 0.027146271936625162, + "acc_norm": 0.2757352941176471, + "acc_norm_stderr": 0.027146271936625162 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.03253302807877738, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.03253302807877738 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33376792698826596, + "acc_stderr": 0.012043812655846146, + "acc_norm": 0.33376792698826596, + "acc_norm_stderr": 0.012043812655846146 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.03354092437591519, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.03354092437591519 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.0381549430868893, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.0381549430868893 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283344, + "mc2": 0.42382977166227975, + "mc2_stderr": 0.01583303218834649 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4049586776859504, + "acc_stderr": 0.016876941165045612, + "acc_norm": 0.45808736717827625, + "acc_norm_stderr": 0.017129852117911147 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "peterkang/mymodel_v5", + "model_sha": "15dc9e98806b34ac949a59781817ec29fd629ec0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/peterkang/mymodel_v6/result_2024-01-04 12:19:41.json b/peterkang/mymodel_v6/result_2024-01-04 12:19:41.json new file mode 100644 index 0000000000000000000000000000000000000000..801a72986fdf98d2073dd8beff9a1f918a54e323 --- /dev/null +++ b/peterkang/mymodel_v6/result_2024-01-04 12:19:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36177474402730375, + "acc_stderr": 0.014041957945038085, + "acc_norm": 0.4232081911262799, + "acc_norm_stderr": 0.014438036220848034 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5624377614021111, + "acc_stderr": 0.004950723480149752, + "acc_norm": 0.6439952200756821, + "acc_norm_stderr": 0.0047783807588511265 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458935, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4623243933588761, + "acc_stderr": 0.01782913176428719, + "acc_norm": 0.4623243933588761, + "acc_norm_stderr": 0.01782913176428719 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101737, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101737 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.043482080516448585, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.043482080516448585 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.0354413249194797, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.0354413249194797 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.02515826601686856, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.02515826601686856 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066482, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066482 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.034815208033673474, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.034815208033673474 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155254, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155254 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03942082639927213, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03942082639927213 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.026817718130348916, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.026817718130348916 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48990825688073397, + "acc_stderr": 0.021432956203453316, + "acc_norm": 0.48990825688073397, + "acc_norm_stderr": 0.021432956203453316 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.02862930519400355, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.02862930519400355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.019873802005061177, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.019873802005061177 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30614525139664805, + "acc_stderr": 0.015414494487903212, + "acc_norm": 0.30614525139664805, + "acc_norm_stderr": 0.015414494487903212 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.02952009569768776, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.02952009569768776 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32920469361147325, + "acc_stderr": 0.012002091666902302, + "acc_norm": 0.32920469361147325, + "acc_norm_stderr": 0.012002091666902302 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2876376988984088, + "mc1_stderr": 0.01584631510139482, + "mc2": 0.43434900051140474, + "mc2_stderr": 0.016646686049344012 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46871310507674147, + "acc_stderr": 0.017156666859785466, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.01718506973267653 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "peterkang/mymodel_v6", + "model_sha": "fa6b68820271d790740ef55736c5cded2a04c0c5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/ppuuttyy/ppuuttyy/result_2024-04-03 04:59:23.json b/ppuuttyy/ppuuttyy/result_2024-04-03 04:59:23.json new file mode 100644 index 0000000000000000000000000000000000000000..4e62d263ff5c3a78ff86d2b0fd76d719a3474e3e --- /dev/null +++ b/ppuuttyy/ppuuttyy/result_2024-04-03 04:59:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2158703071672355, + "acc_stderr": 0.012022975360030665, + "acc_norm": 0.26621160409556316, + "acc_norm_stderr": 0.01291577478152322 + }, + "harness|ko_hellaswag|10": { + "acc": 0.24805815574586737, + "acc_stderr": 0.0043100310444591445, + "acc_norm": 0.24278032264489147, + "acc_norm_stderr": 0.004278871104930377 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.02620276653465215, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.02620276653465215 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.031493281045079556, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.031493281045079556 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23255813953488372, + "mc1_stderr": 0.014789157531080501, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.07792207792207792, + "acc_stderr": 0.00921571197230472, + "acc_norm": 0.21133412042502953, + "acc_norm_stderr": 0.014036090342930314 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "ppuuttyy/ppuuttyy", + "model_sha": "ed58e41fce8ef14b0e1263c0bc0ffbb9ab8107ae", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/qnguyen3/Master-Yi-9B/result_2024-05-29 09:31:09.json b/qnguyen3/Master-Yi-9B/result_2024-05-29 09:31:09.json new file mode 100644 index 0000000000000000000000000000000000000000..42b7a2f2547f7516a7410b91a4b74de4cf4d4261 --- /dev/null +++ b/qnguyen3/Master-Yi-9B/result_2024-05-29 09:31:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30802047781569963, + "acc_stderr": 0.01349142951729204, + "acc_norm": 0.35409556313993173, + "acc_norm_stderr": 0.013975454122756557 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3375821549492133, + "acc_stderr": 0.004719187890948065, + "acc_norm": 0.4190400318661621, + "acc_norm_stderr": 0.004923935749842496 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4904214559386973, + "acc_stderr": 0.017876682275340887, + "acc_norm": 0.4904214559386973, + "acc_norm_stderr": 0.017876682275340887 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996793, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996793 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033582, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033582 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984548, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984548 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828063, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828063 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6068965517241379, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.6068965517241379, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5672268907563025, + "acc_stderr": 0.032183581077426124, + "acc_norm": 0.5672268907563025, + "acc_norm_stderr": 0.032183581077426124 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.02531063925493391, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.02531063925493391 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942656, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5634920634920635, + "acc_stderr": 0.02554284681740049, + "acc_norm": 0.5634920634920635, + "acc_norm_stderr": 0.02554284681740049 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869334, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04644602091222317, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04644602091222317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5247706422018349, + "acc_stderr": 0.021410999753635914, + "acc_norm": 0.5247706422018349, + "acc_norm_stderr": 0.021410999753635914 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.49206349206349204, + "acc_stderr": 0.044715725362943486, + "acc_norm": 0.49206349206349204, + "acc_norm_stderr": 0.044715725362943486 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.019910377463105935, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.019910377463105935 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3139664804469274, + "acc_stderr": 0.015521923933523646, + "acc_norm": 0.3139664804469274, + "acc_norm_stderr": 0.015521923933523646 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.73, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.73, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.02833295951403122, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.02833295951403122 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.031680911612338825, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.031680911612338825 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48945147679324896, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.48945147679324896, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34159061277705344, + "acc_stderr": 0.012112391320842842, + "acc_norm": 0.34159061277705344, + "acc_norm_stderr": 0.012112391320842842 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31946144430844553, + "mc1_stderr": 0.0163226441829605, + "mc2": 0.49449328523751745, + "mc2_stderr": 0.015882954301102255 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5525383707201889, + "acc_stderr": 0.017095190301500578, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.01705775370216029 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "qnguyen3/Master-Yi-9B", + "model_sha": "e30c2b86d68053dd99bb2159f6ff04636786ac32", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/quantumaikr/KoreanLM-1.5b/result_2023-10-18 16:24:34.json b/quantumaikr/KoreanLM-1.5b/result_2023-10-18 16:24:34.json new file mode 100644 index 0000000000000000000000000000000000000000..11b8203e5545b3714e6e2186e9e29f0001aabcc6 --- /dev/null +++ b/quantumaikr/KoreanLM-1.5b/result_2023-10-18 16:24:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21245733788395904, + "acc_stderr": 0.011953482906582949, + "acc_norm": 0.2781569965870307, + "acc_norm_stderr": 0.0130944699195388 + }, + "harness|ko_hellaswag|10": { + "acc": 0.26000796654052977, + "acc_stderr": 0.004377421493297836, + "acc_norm": 0.2647878908583947, + "acc_norm_stderr": 0.004403184691341697 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.26900584795321636, + "acc_stderr": 0.03401052620104089, + "acc_norm": 0.26900584795321636, + "acc_norm_stderr": 0.03401052620104089 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.22988505747126436, + "acc_stderr": 0.015046301846691838, + "acc_norm": 0.22988505747126436, + "acc_norm_stderr": 0.015046301846691838 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501116, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501116 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.19574468085106383, + "acc_stderr": 0.025937853139977148, + "acc_norm": 0.19574468085106383, + "acc_norm_stderr": 0.025937853139977148 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.26506024096385544, + "acc_stderr": 0.03436024037944966, + "acc_norm": 0.26506024096385544, + "acc_norm_stderr": 0.03436024037944966 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.32286995515695066, + "acc_stderr": 0.03138147637575498, + "acc_norm": 0.32286995515695066, + "acc_norm_stderr": 0.03138147637575498 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.32061068702290074, + "acc_stderr": 0.04093329229834278, + "acc_norm": 0.32061068702290074, + "acc_norm_stderr": 0.04093329229834278 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23232323232323232, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.23232323232323232, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727772, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727772 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.048580835742663434, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.048580835742663434 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.029597329730978082, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.029597329730978082 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.29743589743589743, + "acc_stderr": 0.023177408131465942, + "acc_norm": 0.29743589743589743, + "acc_norm_stderr": 0.023177408131465942 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.16, + "acc_stderr": 0.0368452949177471, + "acc_norm": 0.16, + "acc_norm_stderr": 0.0368452949177471 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.0395783547198098, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.0395783547198098 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.031618563353586114, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.031618563353586114 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.02573654274559452, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.02573654274559452 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.02704685763071668, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.02704685763071668 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.17272727272727273, + "acc_stderr": 0.0362069183392922, + "acc_norm": 0.17272727272727273, + "acc_norm_stderr": 0.0362069183392922 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2074074074074074, + "acc_stderr": 0.024720713193952165, + "acc_norm": 0.2074074074074074, + "acc_norm_stderr": 0.024720713193952165 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804726, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804726 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31840796019900497, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.31840796019900497, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.03414014007044036, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.03414014007044036 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184763, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184763 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.16, + "acc_stderr": 0.03684529491774708, + "acc_norm": 0.16, + "acc_norm_stderr": 0.03684529491774708 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.02289408248992599, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.02289408248992599 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943354, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943354 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.20679012345679013, + "acc_stderr": 0.022535006705942818, + "acc_norm": 0.20679012345679013, + "acc_norm_stderr": 0.022535006705942818 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.0314102478056532, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.0314102478056532 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281337, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281337 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26238532110091745, + "acc_stderr": 0.018861885021534734, + "acc_norm": 0.26238532110091745, + "acc_norm_stderr": 0.018861885021534734 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604674, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604674 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.025160998214292456, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.025160998214292456 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.03896878985070415, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.03896878985070415 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.26973684210526316, + "acc_stderr": 0.03611780560284898, + "acc_norm": 0.26973684210526316, + "acc_norm_stderr": 0.03611780560284898 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.016992723465466233, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.016992723465466233 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.02646903681859063, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.02646903681859063 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.030546745264953185, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.030546745264953185 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966339, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966339 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1801470588235294, + "acc_stderr": 0.02334516361654486, + "acc_norm": 0.1801470588235294, + "acc_norm_stderr": 0.02334516361654486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249783, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24050632911392406, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.24050632911392406, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23728813559322035, + "acc_stderr": 0.010865436690780269, + "acc_norm": 0.23728813559322035, + "acc_norm_stderr": 0.010865436690780269 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522517, + "mc2": 0.5207557813698324, + "mc2_stderr": 0.01656184952031738 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2089728453364817, + "acc_stderr": 0.013978334944170286, + "acc_norm": 0.4757969303423849, + "acc_norm_stderr": 0.01717020246652075 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "quantumaikr/KoreanLM-1.5b", + "model_sha": "d26b261612f7cf8358309921bc387b754596355f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/quantumaikr/KoreanLM-3B/result_2023-10-18 16:24:22.json b/quantumaikr/KoreanLM-3B/result_2023-10-18 16:24:22.json new file mode 100644 index 0000000000000000000000000000000000000000..f399854d9bdc2aa16706c29a43471c84fb7ec1f3 --- /dev/null +++ b/quantumaikr/KoreanLM-3B/result_2023-10-18 16:24:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19880546075085323, + "acc_stderr": 0.011662850198175536, + "acc_norm": 0.24488054607508533, + "acc_norm_stderr": 0.012566273985131356 + }, + "harness|ko_hellaswag|10": { + "acc": 0.27106154152559253, + "acc_stderr": 0.004435993492583864, + "acc_norm": 0.27753435570603463, + "acc_norm_stderr": 0.004468672138910928 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824563, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824563 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24265644955300128, + "acc_stderr": 0.015329888940899873, + "acc_norm": 0.24265644955300128, + "acc_norm_stderr": 0.015329888940899873 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2723404255319149, + "acc_stderr": 0.029101290698386694, + "acc_norm": 0.2723404255319149, + "acc_norm_stderr": 0.029101290698386694 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.18971061093247588, + "acc_stderr": 0.022268196258783218, + "acc_norm": 0.18971061093247588, + "acc_norm_stderr": 0.022268196258783218 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.18181818181818182, + "acc_stderr": 0.0274796030105388, + "acc_norm": 0.18181818181818182, + "acc_norm_stderr": 0.0274796030105388 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.03095663632856655, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.03095663632856655 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33076923076923076, + "acc_stderr": 0.023854795680971142, + "acc_norm": 0.33076923076923076, + "acc_norm_stderr": 0.023854795680971142 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358611, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358611 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23870967741935484, + "acc_stderr": 0.024251071262208834, + "acc_norm": 0.23870967741935484, + "acc_norm_stderr": 0.024251071262208834 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2606837606837607, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.2606837606837607, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.20754716981132076, + "acc_stderr": 0.024959918028911274, + "acc_norm": 0.20754716981132076, + "acc_norm_stderr": 0.024959918028911274 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2185430463576159, + "acc_stderr": 0.03374235550425694, + "acc_norm": 0.2185430463576159, + "acc_norm_stderr": 0.03374235550425694 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2361111111111111, + "acc_stderr": 0.03551446610810826, + "acc_norm": 0.2361111111111111, + "acc_norm_stderr": 0.03551446610810826 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2085889570552147, + "acc_stderr": 0.03192193448934725, + "acc_norm": 0.2085889570552147, + "acc_norm_stderr": 0.03192193448934725 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.20987654320987653, + "acc_stderr": 0.02265834408598137, + "acc_norm": 0.20987654320987653, + "acc_norm_stderr": 0.02265834408598137 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.033248379397581594, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.033248379397581594 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03718489006818115, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03718489006818115 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21241830065359477, + "acc_stderr": 0.023420375478296125, + "acc_norm": 0.21241830065359477, + "acc_norm_stderr": 0.023420375478296125 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516303, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516303 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.01755581809132227, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.01755581809132227 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2624113475177305, + "acc_stderr": 0.026244920349843003, + "acc_norm": 0.2624113475177305, + "acc_norm_stderr": 0.026244920349843003 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252336, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252336 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22448979591836735, + "acc_stderr": 0.02671143055553841, + "acc_norm": 0.22448979591836735, + "acc_norm_stderr": 0.02671143055553841 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.24050632911392406, + "acc_stderr": 0.027820781981149675, + "acc_norm": 0.24050632911392406, + "acc_norm_stderr": 0.027820781981149675 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522519, + "mc2": 0.4822371041865183, + "mc2_stderr": 0.01604938696224229 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27744982290436837, + "acc_stderr": 0.015393630236605971, + "acc_norm": 0.4769775678866588, + "acc_norm_stderr": 0.01717212154672764 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "quantumaikr/KoreanLM-3B", + "model_sha": "f49217779eea253aa3e7dd4645eedfd496fa9e0b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/quantumaikr/KoreanLM/result_2023-10-18 16:23:36.json b/quantumaikr/KoreanLM/result_2023-10-18 16:23:36.json new file mode 100644 index 0000000000000000000000000000000000000000..a1e42c732272441b1cb1ba50a459503091697341 --- /dev/null +++ b/quantumaikr/KoreanLM/result_2023-10-18 16:23:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2619453924914676, + "acc_stderr": 0.012849054826858117, + "acc_norm": 0.30119453924914674, + "acc_norm_stderr": 0.01340674176784762 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3179645488946425, + "acc_stderr": 0.004647338877642185, + "acc_norm": 0.3739294961163115, + "acc_norm_stderr": 0.004828564090620291 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.27485380116959063, + "acc_stderr": 0.034240429246915824, + "acc_norm": 0.27485380116959063, + "acc_norm_stderr": 0.034240429246915824 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2988505747126437, + "acc_stderr": 0.016369256815093127, + "acc_norm": 0.2988505747126437, + "acc_norm_stderr": 0.016369256815093127 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2604501607717042, + "acc_stderr": 0.02492672322484555, + "acc_norm": 0.2604501607717042, + "acc_norm_stderr": 0.02492672322484555 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.04010358942462203, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.04010358942462203 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.037528339580033376, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.037528339580033376 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.03618664819936244, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.03618664819936244 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.28991596638655465, + "acc_stderr": 0.029472485833136084, + "acc_norm": 0.28991596638655465, + "acc_norm_stderr": 0.029472485833136084 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.02144454730156048, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.02144454730156048 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.04587904741301811, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.04587904741301811 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.030516530732694436, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.030516530732694436 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.024993053397764815, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.024993053397764815 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4188034188034188, + "acc_stderr": 0.03232128912157792, + "acc_norm": 0.4188034188034188, + "acc_norm_stderr": 0.03232128912157792 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.02761116340239972, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.02761116340239972 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.04494290866252088, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.04494290866252088 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.02671924078371215, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.02671924078371215 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.034454062719870546, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.034454062719870546 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014635, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014635 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194974, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194974 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566017, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566017 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28034682080924855, + "acc_stderr": 0.024182427496577622, + "acc_norm": 0.28034682080924855, + "acc_norm_stderr": 0.024182427496577622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3067484662576687, + "acc_stderr": 0.036230899157241474, + "acc_norm": 0.3067484662576687, + "acc_norm_stderr": 0.036230899157241474 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.02483605786829468, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.02483605786829468 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22279792746113988, + "acc_stderr": 0.03003114797764154, + "acc_norm": 0.22279792746113988, + "acc_norm_stderr": 0.03003114797764154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.018125669180861493, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.018125669180861493 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235172, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235172 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.02617390850671858, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.02617390850671858 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.033176727875331574, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.033176727875331574 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.018185218954318082, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.018185218954318082 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036625, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036625 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322716, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322716 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.024562204314142314, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.024562204314142314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.16326530612244897, + "acc_stderr": 0.023661699177098604, + "acc_norm": 0.16326530612244897, + "acc_norm_stderr": 0.023661699177098604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.031052391937584353, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.031052391937584353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26140808344198174, + "acc_stderr": 0.011222528169771316, + "acc_norm": 0.26140808344198174, + "acc_norm_stderr": 0.011222528169771316 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145628, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145628 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015016, + "mc2": 0.42260296070190784, + "mc2_stderr": 0.015435227733476522 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26564344746162927, + "acc_stderr": 0.015185107107791248, + "acc_norm": 0.3707201889020071, + "acc_norm_stderr": 0.0166058012892126 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "quantumaikr/KoreanLM", + "model_sha": "f4351abcdd6a933afbaffad0badf60c273e71920", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/qutrino/7b-finetune/result_2024-01-27 19:21:06.json b/qutrino/7b-finetune/result_2024-01-27 19:21:06.json new file mode 100644 index 0000000000000000000000000000000000000000..b6a07fadbf7b3bdde049fd23d9af9ccf2d239c86 --- /dev/null +++ b/qutrino/7b-finetune/result_2024-01-27 19:21:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3250853242320819, + "acc_stderr": 0.01368814730972912, + "acc_norm": 0.3967576791808874, + "acc_norm_stderr": 0.014296513020180646 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37621987651862177, + "acc_stderr": 0.004834461997944861, + "acc_norm": 0.48755228042222665, + "acc_norm_stderr": 0.004988234881206744 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3391812865497076, + "acc_stderr": 0.03631053496488905, + "acc_norm": 0.3391812865497076, + "acc_norm_stderr": 0.03631053496488905 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3592233009708738, + "acc_stderr": 0.04750458399041692, + "acc_norm": 0.3592233009708738, + "acc_norm_stderr": 0.04750458399041692 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4227330779054917, + "acc_stderr": 0.017665180351954062, + "acc_norm": 0.4227330779054917, + "acc_norm_stderr": 0.017665180351954062 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2851063829787234, + "acc_stderr": 0.029513196625539345, + "acc_norm": 0.2851063829787234, + "acc_norm_stderr": 0.029513196625539345 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43729903536977494, + "acc_stderr": 0.028173917761762885, + "acc_norm": 0.43729903536977494, + "acc_norm_stderr": 0.028173917761762885 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459156, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459156 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.034273086529999344, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.034273086529999344 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.0394170763206489, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.0394170763206489 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643966, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643966 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36554621848739494, + "acc_stderr": 0.031282177063684614, + "acc_norm": 0.36554621848739494, + "acc_norm_stderr": 0.031282177063684614 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.36923076923076925, + "acc_stderr": 0.02446861524147892, + "acc_norm": 0.36923076923076925, + "acc_norm_stderr": 0.02446861524147892 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.34516129032258064, + "acc_stderr": 0.027045746573534323, + "acc_norm": 0.34516129032258064, + "acc_norm_stderr": 0.027045746573534323 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5042735042735043, + "acc_stderr": 0.03275489264382132, + "acc_norm": 0.5042735042735043, + "acc_norm_stderr": 0.03275489264382132 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3622641509433962, + "acc_stderr": 0.0295822451283843, + "acc_norm": 0.3622641509433962, + "acc_norm_stderr": 0.0295822451283843 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.045253935963025065, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.045253935963025065 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.03531987930208732, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.03531987930208732 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2861271676300578, + "acc_stderr": 0.024332146779134128, + "acc_norm": 0.2861271676300578, + "acc_norm_stderr": 0.024332146779134128 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.026869490744815254, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.026869490744815254 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.37823834196891193, + "acc_stderr": 0.03499807276193338, + "acc_norm": 0.37823834196891193, + "acc_norm_stderr": 0.03499807276193338 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.038351539543994194, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.038351539543994194 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43119266055045874, + "acc_stderr": 0.021233365030319567, + "acc_norm": 0.43119266055045874, + "acc_norm_stderr": 0.021233365030319567 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.027684181883302898, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.027684181883302898 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.49586776859504134, + "acc_stderr": 0.045641987674327526, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.045641987674327526 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2581699346405229, + "acc_stderr": 0.017704531653250078, + "acc_norm": 0.2581699346405229, + "acc_norm_stderr": 0.017704531653250078 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.027374128882631146, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.027374128882631146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.0356236785009539, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.0356236785009539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.03154696285656628, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.03154696285656628 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010083, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010083 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935893, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935893 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3924050632911392, + "acc_stderr": 0.03178471874564729, + "acc_norm": 0.3924050632911392, + "acc_norm_stderr": 0.03178471874564729 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3070404172099087, + "acc_stderr": 0.011780959114513774, + "acc_norm": 0.3070404172099087, + "acc_norm_stderr": 0.011780959114513774 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.03756335775187897, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.03756335775187897 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148123, + "mc2": 0.41604750399939083, + "mc2_stderr": 0.01504688904356143 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32585596221959856, + "acc_stderr": 0.016114023894800343, + "acc_norm": 0.4132231404958678, + "acc_norm_stderr": 0.016929480234495232 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "qutrino/7b-finetune", + "model_sha": "17e70c485b9081a5ad110768bdb722ee25e279e9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/realPCH/240102_test_noshard/result_2024-01-03 00:54:17.json b/realPCH/240102_test_noshard/result_2024-01-03 00:54:17.json new file mode 100644 index 0000000000000000000000000000000000000000..7008673c9c5d36aa9e62b2ac0adf3831e58a76fb --- /dev/null +++ b/realPCH/240102_test_noshard/result_2024-01-03 00:54:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26706484641638223, + "acc_stderr": 0.012928933196496342, + "acc_norm": 0.34897610921501704, + "acc_norm_stderr": 0.0139289334613825 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3573989245170285, + "acc_stderr": 0.004782542754102088, + "acc_norm": 0.45439155546703847, + "acc_norm_stderr": 0.004968979259738335 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21455938697318008, + "acc_stderr": 0.014680033956893346, + "acc_norm": 0.21455938697318008, + "acc_norm_stderr": 0.014680033956893346 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617721, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617721 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.02964400657700962, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.02964400657700962 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2508038585209003, + "acc_stderr": 0.024619771956697165, + "acc_norm": 0.2508038585209003, + "acc_norm_stderr": 0.024619771956697165 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2645739910313901, + "acc_stderr": 0.02960510321703835, + "acc_norm": 0.2645739910313901, + "acc_norm_stderr": 0.02960510321703835 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306085, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306085 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.02835962087053395, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.02835962087053395 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.022556551010132368, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.022556551010132368 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733552, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733552 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885193, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.029872577708891155, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.029872577708891155 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2037735849056604, + "acc_stderr": 0.02479078450177541, + "acc_norm": 0.2037735849056604, + "acc_norm_stderr": 0.02479078450177541 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721375, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721375 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275805, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275805 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.030965903123573033, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.030965903123573033 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.19653179190751446, + "acc_stderr": 0.030299574664788137, + "acc_norm": 0.19653179190751446, + "acc_norm_stderr": 0.030299574664788137 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.021679219663693152, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.021679219663693152 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071134, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071134 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02492200116888633, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02492200116888633 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178267, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178267 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.2036697247706422, + "acc_stderr": 0.017266742087630797, + "acc_norm": 0.2036697247706422, + "acc_norm_stderr": 0.017266742087630797 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21241830065359477, + "acc_stderr": 0.02342037547829613, + "acc_norm": 0.21241830065359477, + "acc_norm_stderr": 0.02342037547829613 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2892561983471074, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.03197565821032501, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.03197565821032501 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2434640522875817, + "acc_stderr": 0.017362473762146623, + "acc_norm": 0.2434640522875817, + "acc_norm_stderr": 0.017362473762146623 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.024987106365642962, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.024987106365642962 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697626, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697626 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.026537045312145287, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.026537045312145287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29535864978902954, + "acc_stderr": 0.029696338713422882, + "acc_norm": 0.29535864978902954, + "acc_norm_stderr": 0.029696338713422882 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2438070404172099, + "acc_stderr": 0.010966507972178475, + "acc_norm": 0.2438070404172099, + "acc_norm_stderr": 0.010966507972178475 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.4294408765617315, + "mc2_stderr": 0.015039627065597595 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26564344746162927, + "acc_stderr": 0.015185107107791255, + "acc_norm": 0.4002361275088548, + "acc_norm_stderr": 0.016844693510505056 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "realPCH/240102_test_noshard", + "model_sha": "66b432bc356907e352a8be71a1d0b9484af36e80", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/realPCH/240103_llama_test_1/result_2024-01-03 07:54:38.json b/realPCH/240103_llama_test_1/result_2024-01-03 07:54:38.json new file mode 100644 index 0000000000000000000000000000000000000000..2531f1bade35dba49df3ec4bde5ec10e410183a9 --- /dev/null +++ b/realPCH/240103_llama_test_1/result_2024-01-03 07:54:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21331058020477817, + "acc_stderr": 0.011970971742326334, + "acc_norm": 0.2568259385665529, + "acc_norm_stderr": 0.0127669237941168 + }, + "harness|ko_hellaswag|10": { + "acc": 0.29286994622585144, + "acc_stderr": 0.0045414921516392275, + "acc_norm": 0.3255327623979287, + "acc_norm_stderr": 0.00467615929910542 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.030267457554898458, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.030267457554898458 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2388250319284802, + "acc_stderr": 0.015246803197398682, + "acc_norm": 0.2388250319284802, + "acc_norm_stderr": 0.015246803197398682 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.037125378336148665, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.037125378336148665 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.029771642712491234, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.029771642712491234 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.02558306248998483, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.02558306248998483 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003337, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003337 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.02820554503327772, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.02820554503327772 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.02311936275823228, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.02311936275823228 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653697, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653697 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358608, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358608 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23504273504273504, + "acc_stderr": 0.02777883590493543, + "acc_norm": 0.23504273504273504, + "acc_norm_stderr": 0.02777883590493543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.027611163402399715, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.027611163402399715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766128, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766128 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.02992941540834839, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.02992941540834839 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.034765901043041336, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.034765901043041336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0230836585869842, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0230836585869842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.02925282329180363, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.02925282329180363 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03835153954399419, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03835153954399419 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26422018348623855, + "acc_stderr": 0.018904164171510186, + "acc_norm": 0.26422018348623855, + "acc_norm_stderr": 0.018904164171510186 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.02591780611714716, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.02591780611714716 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.30578512396694213, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.0373852067611967, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.0373852067611967 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.016906615927288142, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.016906615927288142 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966358, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966358 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312547, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312547 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.02412746346265014, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.02412746346265014 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460302, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460302 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23402868318122555, + "acc_stderr": 0.0108135855526597, + "acc_norm": 0.23402868318122555, + "acc_norm_stderr": 0.0108135855526597 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3182374541003672, + "mc1_stderr": 0.016305988648920612, + "mc2": 0.48985625400067373, + "mc2_stderr": 0.01602317028922327 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2396694214876033, + "acc_stderr": 0.014676495332267253, + "acc_norm": 0.2680047225501771, + "acc_norm_stderr": 0.015227905796335145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "realPCH/240103_llama_test_1", + "model_sha": "86100c9725d63c8a992e9c6fe28b9a7abc759e8f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/realPCH/240103_llama_test_2/result_2024-01-03 07:55:45.json b/realPCH/240103_llama_test_2/result_2024-01-03 07:55:45.json new file mode 100644 index 0000000000000000000000000000000000000000..41953a5b5e64ffa11a66d218da4860fbf8ca5e7e --- /dev/null +++ b/realPCH/240103_llama_test_2/result_2024-01-03 07:55:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21331058020477817, + "acc_stderr": 0.011970971742326334, + "acc_norm": 0.2568259385665529, + "acc_norm_stderr": 0.0127669237941168 + }, + "harness|ko_hellaswag|10": { + "acc": 0.29286994622585144, + "acc_stderr": 0.0045414921516392275, + "acc_norm": 0.3255327623979287, + "acc_norm_stderr": 0.00467615929910542 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.030267457554898458, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.030267457554898458 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2388250319284802, + "acc_stderr": 0.015246803197398682, + "acc_norm": 0.2388250319284802, + "acc_norm_stderr": 0.015246803197398682 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.037125378336148665, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.037125378336148665 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.029771642712491234, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.029771642712491234 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.02558306248998483, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.02558306248998483 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003337, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003337 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.02820554503327772, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.02820554503327772 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.02311936275823228, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.02311936275823228 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653697, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653697 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358608, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358608 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23504273504273504, + "acc_stderr": 0.02777883590493543, + "acc_norm": 0.23504273504273504, + "acc_norm_stderr": 0.02777883590493543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.027611163402399715, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.027611163402399715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766128, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766128 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.02992941540834839, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.02992941540834839 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.034765901043041336, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.034765901043041336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0230836585869842, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0230836585869842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.02925282329180363, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.02925282329180363 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03835153954399419, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03835153954399419 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26422018348623855, + "acc_stderr": 0.018904164171510186, + "acc_norm": 0.26422018348623855, + "acc_norm_stderr": 0.018904164171510186 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.02591780611714716, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.02591780611714716 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.30578512396694213, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.0373852067611967, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.0373852067611967 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.016906615927288142, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.016906615927288142 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966358, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966358 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312547, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312547 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.02412746346265014, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.02412746346265014 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460302, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460302 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23402868318122555, + "acc_stderr": 0.0108135855526597, + "acc_norm": 0.23402868318122555, + "acc_norm_stderr": 0.0108135855526597 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3182374541003672, + "mc1_stderr": 0.016305988648920612, + "mc2": 0.48985625400067373, + "mc2_stderr": 0.01602317028922327 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2396694214876033, + "acc_stderr": 0.014676495332267253, + "acc_norm": 0.2680047225501771, + "acc_norm_stderr": 0.015227905796335145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "realPCH/240103_llama_test_2", + "model_sha": "54933ac33fe0c288816f56e101191ad0f82beccb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/realPCH/240103_llama_test_3/result_2024-01-03 13:43:43.json b/realPCH/240103_llama_test_3/result_2024-01-03 13:43:43.json new file mode 100644 index 0000000000000000000000000000000000000000..d12d068c113cac98bf02b7bf638a501070e091be --- /dev/null +++ b/realPCH/240103_llama_test_3/result_2024-01-03 13:43:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21331058020477817, + "acc_stderr": 0.011970971742326334, + "acc_norm": 0.2568259385665529, + "acc_norm_stderr": 0.0127669237941168 + }, + "harness|ko_hellaswag|10": { + "acc": 0.29286994622585144, + "acc_stderr": 0.0045414921516392275, + "acc_norm": 0.3255327623979287, + "acc_norm_stderr": 0.00467615929910542 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.030267457554898458, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.030267457554898458 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2388250319284802, + "acc_stderr": 0.015246803197398682, + "acc_norm": 0.2388250319284802, + "acc_norm_stderr": 0.015246803197398682 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.037125378336148665, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.037125378336148665 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.029771642712491234, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.029771642712491234 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3253012048192771, + "acc_stderr": 0.03647168523683227, + "acc_norm": 0.3253012048192771, + "acc_norm_stderr": 0.03647168523683227 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.02558306248998483, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.02558306248998483 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003337, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003337 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.02820554503327772, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.02820554503327772 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.02311936275823228, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.02311936275823228 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653697, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653697 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358608, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358608 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23504273504273504, + "acc_stderr": 0.02777883590493543, + "acc_norm": 0.23504273504273504, + "acc_norm_stderr": 0.02777883590493543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.027611163402399715, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.027611163402399715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766128, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766128 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.02992941540834839, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.02992941540834839 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.034765901043041336, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.034765901043041336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0230836585869842, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0230836585869842 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.02925282329180363, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.02925282329180363 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03835153954399419, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03835153954399419 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26422018348623855, + "acc_stderr": 0.018904164171510186, + "acc_norm": 0.26422018348623855, + "acc_norm_stderr": 0.018904164171510186 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2875816993464052, + "acc_stderr": 0.02591780611714716, + "acc_norm": 0.2875816993464052, + "acc_norm_stderr": 0.02591780611714716 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.30578512396694213, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.30578512396694213, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.0373852067611967, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.0373852067611967 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.016906615927288142, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.016906615927288142 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966358, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966358 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312547, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312547 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.02412746346265014, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.02412746346265014 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460302, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460302 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23402868318122555, + "acc_stderr": 0.0108135855526597, + "acc_norm": 0.23402868318122555, + "acc_norm_stderr": 0.0108135855526597 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3182374541003672, + "mc1_stderr": 0.016305988648920612, + "mc2": 0.48985625400067373, + "mc2_stderr": 0.01602317028922327 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2396694214876033, + "acc_stderr": 0.014676495332267253, + "acc_norm": 0.2680047225501771, + "acc_norm_stderr": 0.015227905796335145 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "realPCH/240103_llama_test_3", + "model_sha": "4f5c579e90370936459e17e7e7e0f1a0cf0fe6e6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/realPCH/240103_test_nosafetensor/result_2024-01-03 02:00:54.json b/realPCH/240103_test_nosafetensor/result_2024-01-03 02:00:54.json new file mode 100644 index 0000000000000000000000000000000000000000..1c8a3e69f11dced64daf06881cd50b7c31c005f9 --- /dev/null +++ b/realPCH/240103_test_nosafetensor/result_2024-01-03 02:00:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.26706484641638223, + "acc_stderr": 0.012928933196496342, + "acc_norm": 0.34897610921501704, + "acc_norm_stderr": 0.0139289334613825 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3573989245170285, + "acc_stderr": 0.004782542754102088, + "acc_norm": 0.45439155546703847, + "acc_norm_stderr": 0.004968979259738335 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.21455938697318008, + "acc_stderr": 0.014680033956893346, + "acc_norm": 0.21455938697318008, + "acc_norm_stderr": 0.014680033956893346 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617721, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617721 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.02964400657700962, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.02964400657700962 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2508038585209003, + "acc_stderr": 0.024619771956697165, + "acc_norm": 0.2508038585209003, + "acc_norm_stderr": 0.024619771956697165 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2645739910313901, + "acc_stderr": 0.02960510321703835, + "acc_norm": 0.2645739910313901, + "acc_norm_stderr": 0.02960510321703835 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306085, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306085 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25630252100840334, + "acc_stderr": 0.02835962087053395, + "acc_norm": 0.25630252100840334, + "acc_norm_stderr": 0.02835962087053395 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.022556551010132368, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.022556551010132368 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733552, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733552 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.27419354838709675, + "acc_stderr": 0.025378139970885193, + "acc_norm": 0.27419354838709675, + "acc_norm_stderr": 0.025378139970885193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.029872577708891155, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.029872577708891155 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2037735849056604, + "acc_stderr": 0.02479078450177541, + "acc_norm": 0.2037735849056604, + "acc_norm_stderr": 0.02479078450177541 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721375, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721375 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.026067159222275805, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.026067159222275805 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.030965903123573033, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.030965903123573033 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.19653179190751446, + "acc_stderr": 0.030299574664788137, + "acc_norm": 0.19653179190751446, + "acc_norm_stderr": 0.030299574664788137 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.021679219663693152, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.021679219663693152 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23410404624277456, + "acc_stderr": 0.022797110278071134, + "acc_norm": 0.23410404624277456, + "acc_norm_stderr": 0.022797110278071134 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.02492200116888633, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.02492200116888633 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178267, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178267 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.2036697247706422, + "acc_stderr": 0.017266742087630797, + "acc_norm": 0.2036697247706422, + "acc_norm_stderr": 0.017266742087630797 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21241830065359477, + "acc_stderr": 0.02342037547829613, + "acc_norm": 0.21241830065359477, + "acc_norm_stderr": 0.02342037547829613 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2892561983471074, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.2892561983471074, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.03197565821032501, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.03197565821032501 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2434640522875817, + "acc_stderr": 0.017362473762146623, + "acc_norm": 0.2434640522875817, + "acc_norm_stderr": 0.017362473762146623 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.024987106365642962, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.024987106365642962 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697626, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697626 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335307, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.026537045312145287, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.026537045312145287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29535864978902954, + "acc_stderr": 0.029696338713422882, + "acc_norm": 0.29535864978902954, + "acc_norm_stderr": 0.029696338713422882 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2438070404172099, + "acc_stderr": 0.010966507972178475, + "acc_norm": 0.2438070404172099, + "acc_norm_stderr": 0.010966507972178475 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.4294408765617315, + "mc2_stderr": 0.015039627065597595 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.26564344746162927, + "acc_stderr": 0.015185107107791255, + "acc_norm": 0.4002361275088548, + "acc_norm_stderr": 0.016844693510505056 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "realPCH/240103_test_nosafetensor", + "model_sha": "acc97ca68c7e2d776e053ab1e9fa0194abda933c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/realPCH/240104_mistral_lora/result_2024-01-04 01:18:15.json b/realPCH/240104_mistral_lora/result_2024-01-04 01:18:15.json new file mode 100644 index 0000000000000000000000000000000000000000..b2e24b212954d8a090d81e083d354fd5fdcc1a84 --- /dev/null +++ b/realPCH/240104_mistral_lora/result_2024-01-04 01:18:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32337883959044367, + "acc_stderr": 0.013669421630012134, + "acc_norm": 0.3626279863481229, + "acc_norm_stderr": 0.014049106564955019 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35660227046405096, + "acc_stderr": 0.004780169873332854, + "acc_norm": 0.4585739892451703, + "acc_norm_stderr": 0.004972625848702653 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4227330779054917, + "acc_stderr": 0.017665180351954062, + "acc_norm": 0.4227330779054917, + "acc_norm_stderr": 0.017665180351954062 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236785, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236785 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4180064308681672, + "acc_stderr": 0.02801365189199507, + "acc_norm": 0.4180064308681672, + "acc_norm_stderr": 0.02801365189199507 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.035476014940069384, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.035476014940069384 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.02478431694215637, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.02478431694215637 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568392, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568392 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942645, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942645 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796183, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524582, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524582 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.0344578996436275, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.0344578996436275 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983053, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983053 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2986111111111111, + "acc_stderr": 0.03827052357950756, + "acc_norm": 0.2986111111111111, + "acc_norm_stderr": 0.03827052357950756 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303128, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303128 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194045, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46788990825688076, + "acc_stderr": 0.02139307122268081, + "acc_norm": 0.46788990825688076, + "acc_norm_stderr": 0.02139307122268081 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.02858034106513828, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.02858034106513828 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292536, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292536 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412236, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412236 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966727, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966727 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3217877094972067, + "acc_stderr": 0.015624236160792577, + "acc_norm": 0.3217877094972067, + "acc_norm_stderr": 0.015624236160792577 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225418, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225418 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.031717528240626645, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.031717528240626645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3324641460234681, + "acc_stderr": 0.012032022332260516, + "acc_norm": 0.3324641460234681, + "acc_norm_stderr": 0.012032022332260516 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3182374541003672, + "mc1_stderr": 0.016305988648920598, + "mc2": 0.4900916793307252, + "mc2_stderr": 0.015725187106486866 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4085005903187721, + "acc_stderr": 0.016900062879427115, + "acc_norm": 0.48288075560802834, + "acc_norm_stderr": 0.017180275246085622 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "realPCH/240104_mistral_lora", + "model_sha": "8d4ea69572f72ec13bb78d91e493213caea59268", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/realPCH/Orca-Platypus-kiwi-1epoch/result_2024-01-28 23:15:00.json b/realPCH/Orca-Platypus-kiwi-1epoch/result_2024-01-28 23:15:00.json new file mode 100644 index 0000000000000000000000000000000000000000..18a9d6bda5bfcab4abc4e278ef959147eef2897b --- /dev/null +++ b/realPCH/Orca-Platypus-kiwi-1epoch/result_2024-01-28 23:15:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42406143344709896, + "acc_stderr": 0.014441889627464394, + "acc_norm": 0.4709897610921502, + "acc_norm_stderr": 0.014586776355294314 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43776140211113324, + "acc_stderr": 0.004950973231188735, + "acc_norm": 0.5995817566221868, + "acc_norm_stderr": 0.004889817489739684 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.03687130615562059, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.03687130615562059 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6538952745849298, + "acc_stderr": 0.01701196526641208, + "acc_norm": 0.6538952745849298, + "acc_norm_stderr": 0.01701196526641208 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.033378837362550984, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.033378837362550984 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.042607351576445594, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.042607351576445594 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207761, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207761 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5966386554621849, + "acc_stderr": 0.03186608121408832, + "acc_norm": 0.5966386554621849, + "acc_norm_stderr": 0.03186608121408832 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5102564102564102, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.5102564102564102, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04712821257426769, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04712821257426769 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5806451612903226, + "acc_stderr": 0.028071588901091835, + "acc_norm": 0.5806451612903226, + "acc_norm_stderr": 0.028071588901091835 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392943, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392943 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670238, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670238 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083018, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919795, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4021164021164021, + "acc_stderr": 0.02525303255499769, + "acc_norm": 0.4021164021164021, + "acc_norm_stderr": 0.02525303255499769 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5809248554913294, + "acc_stderr": 0.02656417811142262, + "acc_norm": 0.5809248554913294, + "acc_norm_stderr": 0.02656417811142262 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436972, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436972 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6049382716049383, + "acc_stderr": 0.027201117666925647, + "acc_norm": 0.6049382716049383, + "acc_norm_stderr": 0.027201117666925647 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070435, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070435 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.671559633027523, + "acc_stderr": 0.0201359027972984, + "acc_norm": 0.671559633027523, + "acc_norm_stderr": 0.0201359027972984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5620915032679739, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.5620915032679739, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.020219083895133924, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.020219083895133924 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125146, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.20446927374301677, + "acc_stderr": 0.013488813404711905, + "acc_norm": 0.20446927374301677, + "acc_norm_stderr": 0.013488813404711905 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5073529411764706, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.5073529411764706, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5959183673469388, + "acc_stderr": 0.03141470802586588, + "acc_norm": 0.5959183673469388, + "acc_norm_stderr": 0.03141470802586588 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7088607594936709, + "acc_stderr": 0.029571601065753374, + "acc_norm": 0.7088607594936709, + "acc_norm_stderr": 0.029571601065753374 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3878748370273794, + "acc_stderr": 0.012444998309675633, + "acc_norm": 0.3878748370273794, + "acc_norm_stderr": 0.012444998309675633 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.03393388584958404, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.03393388584958404 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.015201522246299956, + "mc2": 0.39596313102363667, + "mc2_stderr": 0.014811261606457016 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5182998819362455, + "acc_stderr": 0.017178836639177755, + "acc_norm": 0.5844155844155844, + "acc_norm_stderr": 0.016943586313076565 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "realPCH/Orca-Platypus-kiwi-1epoch", + "model_sha": "c8e7fc1b416356478a79d7046bc1cee34bbe2f8b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/realPCH/Orca-Platypus-v3-1epoch/result_2024-01-28 23:16:19.json b/realPCH/Orca-Platypus-v3-1epoch/result_2024-01-28 23:16:19.json new file mode 100644 index 0000000000000000000000000000000000000000..b431ae661524fff29df418148b1bd00f41f51714 --- /dev/null +++ b/realPCH/Orca-Platypus-v3-1epoch/result_2024-01-28 23:16:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4308873720136519, + "acc_stderr": 0.014471133392642475, + "acc_norm": 0.4709897610921502, + "acc_norm_stderr": 0.014586776355294317 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43527185819557856, + "acc_stderr": 0.004947793051042668, + "acc_norm": 0.6000796654052978, + "acc_norm_stderr": 0.004888805003103068 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6198830409356725, + "acc_stderr": 0.037229657413855394, + "acc_norm": 0.6198830409356725, + "acc_norm_stderr": 0.037229657413855394 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6526181353767561, + "acc_stderr": 0.017026671748655728, + "acc_norm": 0.6526181353767561, + "acc_norm_stderr": 0.017026671748655728 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936338, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936338 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5980707395498392, + "acc_stderr": 0.027846476005930473, + "acc_norm": 0.5980707395498392, + "acc_norm_stderr": 0.027846476005930473 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5695067264573991, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.5695067264573991, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7474747474747475, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.7474747474747475, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.03142946637883708, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.03142946637883708 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5307692307692308, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.5307692307692308, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5870967741935483, + "acc_stderr": 0.028009138125400384, + "acc_norm": 0.5870967741935483, + "acc_norm_stderr": 0.028009138125400384 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.026453508054040353, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.026453508054040353 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.030767394707808093, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.030767394707808093 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.029318203645206865, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.029318203645206865 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.03170056183497309, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.03170056183497309 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273958, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273958 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.02519710107424649, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.02519710107424649 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5867052023121387, + "acc_stderr": 0.02651126136940924, + "acc_norm": 0.5867052023121387, + "acc_norm_stderr": 0.02651126136940924 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.039158572914369714, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.039158572914369714 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.027237415094592477, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.027237415094592477 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7409326424870466, + "acc_stderr": 0.03161877917935411, + "acc_norm": 0.7409326424870466, + "acc_norm_stderr": 0.03161877917935411 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.671559633027523, + "acc_stderr": 0.0201359027972984, + "acc_norm": 0.671559633027523, + "acc_norm_stderr": 0.0201359027972984 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.028110928492809068, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.028110928492809068 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.0403356566784832, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.0403356566784832 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.020226106567657814, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.020226106567657814 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.02914454478159615, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.02914454478159615 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833587, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833587 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.034086558679777494, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.034086558679777494 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2122905027932961, + "acc_stderr": 0.013676644685831726, + "acc_norm": 0.2122905027932961, + "acc_norm_stderr": 0.013676644685831726 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4963235294117647, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.4963235294117647, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6244897959183674, + "acc_stderr": 0.031001209039894843, + "acc_norm": 0.6244897959183674, + "acc_norm_stderr": 0.031001209039894843 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7088607594936709, + "acc_stderr": 0.02957160106575337, + "acc_norm": 0.7088607594936709, + "acc_norm_stderr": 0.02957160106575337 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39374185136897, + "acc_stderr": 0.012478532272564435, + "acc_norm": 0.39374185136897, + "acc_norm_stderr": 0.012478532272564435 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834559, + "mc2": 0.4236299783982114, + "mc2_stderr": 0.015092774145575695 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.017175671279836446, + "acc_norm": 0.5548996458087367, + "acc_norm_stderr": 0.017086417431005467 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "realPCH/Orca-Platypus-v3-1epoch", + "model_sha": "54224f2864c33e98128041fbbf8fccb3d1a54988", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/realPCH/ko-solra-platusv3-koprompt/result_2024-01-18 01:34:54.json b/realPCH/ko-solra-platusv3-koprompt/result_2024-01-18 01:34:54.json new file mode 100644 index 0000000000000000000000000000000000000000..257599a563a4e113337b84996ba51bd9a21ad8aa --- /dev/null +++ b/realPCH/ko-solra-platusv3-koprompt/result_2024-01-18 01:34:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4351535836177474, + "acc_stderr": 0.014487986197186045, + "acc_norm": 0.5008532423208191, + "acc_norm_stderr": 0.014611369529813262 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44064927305317664, + "acc_stderr": 0.004954503606471608, + "acc_norm": 0.6025692093208525, + "acc_norm_stderr": 0.004883663587184777 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6432748538011696, + "acc_stderr": 0.036740130028609534, + "acc_norm": 0.6432748538011696, + "acc_norm_stderr": 0.036740130028609534 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6590038314176245, + "acc_stderr": 0.01695178138322332, + "acc_norm": 0.6590038314176245, + "acc_norm_stderr": 0.01695178138322332 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033583, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033583 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6205787781350482, + "acc_stderr": 0.027559949802347817, + "acc_norm": 0.6205787781350482, + "acc_norm_stderr": 0.027559949802347817 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.03154449888270286, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.03154449888270286 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5840336134453782, + "acc_stderr": 0.03201650100739611, + "acc_norm": 0.5840336134453782, + "acc_norm_stderr": 0.03201650100739611 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5230769230769231, + "acc_stderr": 0.025323990861736253, + "acc_norm": 0.5230769230769231, + "acc_norm_stderr": 0.025323990861736253 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5580645161290323, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.5580645161290323, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.027236013946196673, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.027236013946196673 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49056603773584906, + "acc_stderr": 0.030767394707808093, + "acc_norm": 0.49056603773584906, + "acc_norm_stderr": 0.030767394707808093 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251976, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251976 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.0320384104021332, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.0320384104021332 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.02535574126305527, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.02535574126305527 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.74, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5895953757225434, + "acc_stderr": 0.026483392042098177, + "acc_norm": 0.5895953757225434, + "acc_norm_stderr": 0.026483392042098177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.595679012345679, + "acc_stderr": 0.027306625297327684, + "acc_norm": 0.595679012345679, + "acc_norm_stderr": 0.027306625297327684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.03221024508041154, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.03221024508041154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.04598188057816542, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.04598188057816542 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6697247706422018, + "acc_stderr": 0.02016446633634298, + "acc_norm": 0.6697247706422018, + "acc_norm_stderr": 0.02016446633634298 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.02830457667314111, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.02830457667314111 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.040260970832965634, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.040260970832965634 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.020223946005074312, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.020223946005074312 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.029097675599463926, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.029097675599463926 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.045218299028335865, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.045218299028335865 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2245810055865922, + "acc_stderr": 0.01395680366654464, + "acc_norm": 0.2245810055865922, + "acc_norm_stderr": 0.01395680366654464 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.47794117647058826, + "acc_stderr": 0.030343264224213528, + "acc_norm": 0.47794117647058826, + "acc_norm_stderr": 0.030343264224213528 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.030932858792789862, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.030932858792789862 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7172995780590717, + "acc_stderr": 0.029312814153955934, + "acc_norm": 0.7172995780590717, + "acc_norm_stderr": 0.029312814153955934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.394393741851369, + "acc_stderr": 0.012482141665631177, + "acc_norm": 0.394393741851369, + "acc_norm_stderr": 0.012482141665631177 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6225490196078431, + "acc_stderr": 0.03402272044340703, + "acc_norm": 0.6225490196078431, + "acc_norm_stderr": 0.03402272044340703 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.4190443640782649, + "mc2_stderr": 0.015170966698027236 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5053128689492326, + "acc_stderr": 0.01718938362722971, + "acc_norm": 0.5914994096812278, + "acc_norm_stderr": 0.016900062879427122 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "realPCH/ko-solra-platusv3-koprompt", + "model_sha": "afdaf5c6e7352b296ffec4ebe1279cbe7424e250", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/realPCH/ko_solra_merge/result_2024-01-18 01:23:40.json b/realPCH/ko_solra_merge/result_2024-01-18 01:23:40.json new file mode 100644 index 0000000000000000000000000000000000000000..5799d0ab61dc0186be958c7a5c07225b9e4be8a6 --- /dev/null +++ b/realPCH/ko_solra_merge/result_2024-01-18 01:23:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4180887372013652, + "acc_stderr": 0.01441398839699608, + "acc_norm": 0.4880546075085324, + "acc_norm_stderr": 0.014607220340597171 + }, + "harness|ko_hellaswag|10": { + "acc": 0.433877713602868, + "acc_stderr": 0.004945956744943811, + "acc_norm": 0.5952997410874328, + "acc_norm_stderr": 0.004898308167211848 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.03786720706234213, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.03786720706234213 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.644955300127714, + "acc_stderr": 0.01711208577277299, + "acc_norm": 0.644955300127714, + "acc_norm_stderr": 0.01711208577277299 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5829596412556054, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.5829596412556054, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6870229007633588, + "acc_stderr": 0.04066962905677697, + "acc_norm": 0.6870229007633588, + "acc_norm_stderr": 0.04066962905677697 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6868686868686869, + "acc_stderr": 0.033042050878136525, + "acc_norm": 0.6868686868686869, + "acc_norm_stderr": 0.033042050878136525 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6050420168067226, + "acc_stderr": 0.031753678460966245, + "acc_norm": 0.6050420168067226, + "acc_norm_stderr": 0.031753678460966245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5307692307692308, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.5307692307692308, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199985, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199985 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.027869320571664618, + "acc_norm": 0.6, + "acc_norm_stderr": 0.027869320571664618 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.026035386098951292, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.026035386098951292 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.02938162072646507, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.02938162072646507 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.032658195885126966, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.032658195885126966 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273958, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273958 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137602, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.77, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5751445086705202, + "acc_stderr": 0.026613350840261736, + "acc_norm": 0.5751445086705202, + "acc_norm_stderr": 0.026613350840261736 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6049382716049383, + "acc_stderr": 0.02720111766692565, + "acc_norm": 0.6049382716049383, + "acc_norm_stderr": 0.02720111766692565 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.694300518134715, + "acc_stderr": 0.033248379397581594, + "acc_norm": 0.694300518134715, + "acc_norm_stderr": 0.033248379397581594 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583703, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583703 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.636697247706422, + "acc_stderr": 0.020620603919625804, + "acc_norm": 0.636697247706422, + "acc_norm_stderr": 0.020620603919625804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.028304576673141107, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.028304576673141107 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041019, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041019 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.04026097083296563, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.04026097083296563 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.020219083895133924, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.020219083895133924 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.014333522059217892, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.014333522059217892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03035969707904611, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03035969707904611 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5795918367346938, + "acc_stderr": 0.031601069934496004, + "acc_norm": 0.5795918367346938, + "acc_norm_stderr": 0.031601069934496004 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7046413502109705, + "acc_stderr": 0.02969633871342288, + "acc_norm": 0.7046413502109705, + "acc_norm_stderr": 0.02969633871342288 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38852672750977835, + "acc_stderr": 0.012448817838292376, + "acc_norm": 0.38852672750977835, + "acc_norm_stderr": 0.012448817838292376 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6078431372549019, + "acc_stderr": 0.03426712349247272, + "acc_norm": 0.6078431372549019, + "acc_norm_stderr": 0.03426712349247272 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.01570210709062789, + "mc2": 0.42820228423640155, + "mc2_stderr": 0.01542704227331791 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.564344746162928, + "acc_stderr": 0.017047415229476316, + "acc_norm": 0.6103896103896104, + "acc_norm_stderr": 0.016766161671893518 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "realPCH/ko_solra_merge", + "model_sha": "dbaccac36390ee6d62e8b90aa746fbb1efd97ea0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/realPCH/kosolra-kullm/result_2024-01-19 02:35:50.json b/realPCH/kosolra-kullm/result_2024-01-19 02:35:50.json new file mode 100644 index 0000000000000000000000000000000000000000..5e25af4f0e27bfc354625d7c2eb2c7ff351782f3 --- /dev/null +++ b/realPCH/kosolra-kullm/result_2024-01-19 02:35:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44368600682593856, + "acc_stderr": 0.014518421825670452, + "acc_norm": 0.4948805460750853, + "acc_norm_stderr": 0.014610624890309162 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44901414060944034, + "acc_stderr": 0.004963771168672095, + "acc_norm": 0.6148177653853814, + "acc_norm_stderr": 0.004856437955719855 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6432748538011696, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.6432748538011696, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6564495530012772, + "acc_stderr": 0.016982145632652476, + "acc_norm": 0.6564495530012772, + "acc_norm_stderr": 0.016982145632652476 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333047, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333047 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.639871382636656, + "acc_stderr": 0.027264297599804015, + "acc_norm": 0.639871382636656, + "acc_norm_stderr": 0.027264297599804015 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.547085201793722, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.547085201793722, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6793893129770993, + "acc_stderr": 0.04093329229834278, + "acc_norm": 0.6793893129770993, + "acc_norm_stderr": 0.04093329229834278 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.04122737111370332, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.04122737111370332 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207761, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207761 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.03142946637883708, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.03142946637883708 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5076923076923077, + "acc_stderr": 0.025348006031534743, + "acc_norm": 0.5076923076923077, + "acc_norm_stderr": 0.025348006031534743 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6064516129032258, + "acc_stderr": 0.02779187875313227, + "acc_norm": 0.6064516129032258, + "acc_norm_stderr": 0.02779187875313227 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8076923076923077, + "acc_stderr": 0.025819233256483706, + "acc_norm": 0.8076923076923077, + "acc_norm_stderr": 0.025819233256483706 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.030767394707808093, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.030767394707808093 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608456, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608456 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.031343283582089536, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.031343283582089536 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137595, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137595 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4930555555555556, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.4930555555555556, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.041633319989322626, + "acc_norm": 0.78, + "acc_norm_stderr": 0.041633319989322626 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5982658959537572, + "acc_stderr": 0.02639410417764363, + "acc_norm": 0.5982658959537572, + "acc_norm_stderr": 0.02639410417764363 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.588957055214724, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.588957055214724, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6265432098765432, + "acc_stderr": 0.026915003011380157, + "acc_norm": 0.6265432098765432, + "acc_norm_stderr": 0.026915003011380157 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.03221024508041154, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.03221024508041154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.655045871559633, + "acc_stderr": 0.020380605405066966, + "acc_norm": 0.655045871559633, + "acc_norm_stderr": 0.020380605405066966 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.028358956313423552, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.028358956313423552 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49836601307189543, + "acc_stderr": 0.020227726838150117, + "acc_norm": 0.49836601307189543, + "acc_norm_stderr": 0.020227726838150117 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.028999080904806178, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.028999080904806178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.01414957534897626, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.01414957534897626 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.030306257722468314, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.030306257722468314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5877551020408164, + "acc_stderr": 0.0315123604467427, + "acc_norm": 0.5877551020408164, + "acc_norm_stderr": 0.0315123604467427 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7172995780590717, + "acc_stderr": 0.029312814153955927, + "acc_norm": 0.7172995780590717, + "acc_norm_stderr": 0.029312814153955927 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39765319426336376, + "acc_stderr": 0.01249984034746064, + "acc_norm": 0.39765319426336376, + "acc_norm_stderr": 0.01249984034746064 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.0343413116471913, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.0343413116471913 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.01607750926613303, + "mc2": 0.46459300398559167, + "mc2_stderr": 0.015483799655999796 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5371900826446281, + "acc_stderr": 0.017142736117643304, + "acc_norm": 0.5914994096812278, + "acc_norm_stderr": 0.016900062879427122 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "realPCH/kosolra-kullm", + "model_sha": "42266719de680a547bc6f85d3c0263216f50a49f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/realPCH/kosolra-wiki-QA-1epoch/result_2024-01-28 23:22:31.json b/realPCH/kosolra-wiki-QA-1epoch/result_2024-01-28 23:22:31.json new file mode 100644 index 0000000000000000000000000000000000000000..67b32ab9074cd773da2a3dea294ea50e66e6a638 --- /dev/null +++ b/realPCH/kosolra-wiki-QA-1epoch/result_2024-01-28 23:22:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41638225255972694, + "acc_stderr": 0.014405618279436178, + "acc_norm": 0.46928327645051193, + "acc_norm_stderr": 0.014583792546304038 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42202748456482775, + "acc_stderr": 0.0049287351036358465, + "acc_norm": 0.578868751244772, + "acc_norm_stderr": 0.004927314729433556 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6432748538011696, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.6432748538011696, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.04689765937278132, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.04689765937278132 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6590038314176245, + "acc_stderr": 0.016951781383223313, + "acc_norm": 0.6590038314176245, + "acc_norm_stderr": 0.016951781383223313 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835816, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835816 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.594855305466238, + "acc_stderr": 0.027882383791325963, + "acc_norm": 0.594855305466238, + "acc_norm_stderr": 0.027882383791325963 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6767676767676768, + "acc_stderr": 0.03332299921070646, + "acc_norm": 0.6767676767676768, + "acc_norm_stderr": 0.03332299921070646 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5756302521008403, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.5756302521008403, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5282051282051282, + "acc_stderr": 0.02531063925493386, + "acc_norm": 0.5282051282051282, + "acc_norm_stderr": 0.02531063925493386 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162933, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162933 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.567741935483871, + "acc_stderr": 0.028181739720019416, + "acc_norm": 0.567741935483871, + "acc_norm_stderr": 0.028181739720019416 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392933, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857406, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857406 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.038047497443647646, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.038047497443647646 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.0250107491161376, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.0250107491161376 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.041795966175810016, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.041795966175810016 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5780346820809249, + "acc_stderr": 0.026589231142174267, + "acc_norm": 0.5780346820809249, + "acc_norm_stderr": 0.026589231142174267 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088445, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088445 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5895061728395061, + "acc_stderr": 0.027371350925124764, + "acc_norm": 0.5895061728395061, + "acc_norm_stderr": 0.027371350925124764 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6580310880829016, + "acc_stderr": 0.03423465100104284, + "acc_norm": 0.6580310880829016, + "acc_norm_stderr": 0.03423465100104284 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6256880733944954, + "acc_stderr": 0.020748959408988316, + "acc_norm": 0.6256880733944954, + "acc_norm_stderr": 0.020748959408988316 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5816993464052288, + "acc_stderr": 0.02824513402438729, + "acc_norm": 0.5816993464052288, + "acc_norm_stderr": 0.02824513402438729 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.04017901275981751, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.04017901275981751 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.020220920829626916, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.020220920829626916 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650144, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650144 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2346368715083799, + "acc_stderr": 0.014173044098303675, + "acc_norm": 0.2346368715083799, + "acc_norm_stderr": 0.014173044098303675 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03035969707904611, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03035969707904611 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7046413502109705, + "acc_stderr": 0.02969633871342288, + "acc_norm": 0.7046413502109705, + "acc_norm_stderr": 0.02969633871342288 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39374185136897, + "acc_stderr": 0.012478532272564432, + "acc_norm": 0.39374185136897, + "acc_norm_stderr": 0.012478532272564432 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5931372549019608, + "acc_stderr": 0.03447891136353383, + "acc_norm": 0.5931372549019608, + "acc_norm_stderr": 0.03447891136353383 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752329, + "mc2": 0.37358304652321317, + "mc2_stderr": 0.01483043090033962 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4781582054309327, + "acc_stderr": 0.017173944474294375, + "acc_norm": 0.526564344746163, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "realPCH/kosolra-wiki-QA-1epoch", + "model_sha": "24889ddbb5fb95aa6a140456e6f0bbf568f218c4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/realPCH/kosolra-wiki-QA/result_2024-01-18 01:33:11.json b/realPCH/kosolra-wiki-QA/result_2024-01-18 01:33:11.json new file mode 100644 index 0000000000000000000000000000000000000000..8f26bfeafda36bc16f44a1f2522df119fd2eacf7 --- /dev/null +++ b/realPCH/kosolra-wiki-QA/result_2024-01-18 01:33:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41723549488054607, + "acc_stderr": 0.014409825518403084, + "acc_norm": 0.48890784982935154, + "acc_norm_stderr": 0.01460779491401305 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42800238996215895, + "acc_stderr": 0.004937779821908575, + "acc_norm": 0.576777534355706, + "acc_norm_stderr": 0.004930603061590765 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6432748538011696, + "acc_stderr": 0.03674013002860954, + "acc_norm": 0.6432748538011696, + "acc_norm_stderr": 0.03674013002860954 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6372924648786717, + "acc_stderr": 0.017192708674602302, + "acc_norm": 0.6372924648786717, + "acc_norm_stderr": 0.017192708674602302 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5852090032154341, + "acc_stderr": 0.02798268045975956, + "acc_norm": 0.5852090032154341, + "acc_norm_stderr": 0.02798268045975956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6641221374045801, + "acc_stderr": 0.04142313771996665, + "acc_norm": 0.6641221374045801, + "acc_norm_stderr": 0.04142313771996665 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.03154449888270286, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.03154449888270286 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.592436974789916, + "acc_stderr": 0.03191863374478466, + "acc_norm": 0.592436974789916, + "acc_norm_stderr": 0.03191863374478466 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5076923076923077, + "acc_stderr": 0.025348006031534743, + "acc_norm": 0.5076923076923077, + "acc_norm_stderr": 0.025348006031534743 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978815, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978815 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162933, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162933 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5774193548387097, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.5774193548387097, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417614, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417614 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.03077265364207567, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.03077265364207567 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251972, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251972 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.038073017265045125, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.038073017265045125 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.041614023984032786, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.041614023984032786 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.026772990653361823, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.026772990653361823 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5864197530864198, + "acc_stderr": 0.02740204204026997, + "acc_norm": 0.5864197530864198, + "acc_norm_stderr": 0.02740204204026997 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6683937823834197, + "acc_stderr": 0.03397636541089118, + "acc_norm": 0.6683937823834197, + "acc_norm_stderr": 0.03397636541089118 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583704, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583704 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6458715596330276, + "acc_stderr": 0.02050472901382911, + "acc_norm": 0.6458715596330276, + "acc_norm_stderr": 0.02050472901382911 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.042857142857142816, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.042857142857142816 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.028358956313423556, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.028358956313423556 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5657894736842105, + "acc_stderr": 0.040335656678483184, + "acc_norm": 0.5657894736842105, + "acc_norm_stderr": 0.040335656678483184 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.020203517280261436, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.020203517280261436 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650147, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.014551553659369918, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.014551553659369918 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5220588235294118, + "acc_stderr": 0.030343264224213528, + "acc_norm": 0.5220588235294118, + "acc_norm_stderr": 0.030343264224213528 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.03189141832421396, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.03189141832421396 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.729957805907173, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.729957805907173, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39048239895697523, + "acc_stderr": 0.012460135913945064, + "acc_norm": 0.39048239895697523, + "acc_norm_stderr": 0.012460135913945064 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6787878787878788, + "acc_stderr": 0.036462049632538136, + "acc_norm": 0.6787878787878788, + "acc_norm_stderr": 0.036462049632538136 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.01534540948555796, + "mc2": 0.39588513591602464, + "mc2_stderr": 0.015028634521657725 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.525383707201889, + "acc_stderr": 0.01716818720142926, + "acc_norm": 0.5726092089728453, + "acc_norm_stderr": 0.017008129844823156 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "realPCH/kosolra-wiki-QA", + "model_sha": "fb3eb54c278b5c8b157ee972392159fcdfdaeaf5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/realPCH/kosolra_SFT_DPO_v0/result_2024-02-21 07:57:18.json b/realPCH/kosolra_SFT_DPO_v0/result_2024-02-21 07:57:18.json new file mode 100644 index 0000000000000000000000000000000000000000..24f0d67332b6f88ba61aa8237780a76c8bbaf343 --- /dev/null +++ b/realPCH/kosolra_SFT_DPO_v0/result_2024-02-21 07:57:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4812286689419795, + "acc_stderr": 0.014601090150633964, + "acc_norm": 0.5238907849829352, + "acc_norm_stderr": 0.01459470179807165 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4480183230432185, + "acc_stderr": 0.004962742426849881, + "acc_norm": 0.6205935072694683, + "acc_norm_stderr": 0.004842476363739978 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.047504583990416974, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.047504583990416974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.644955300127714, + "acc_stderr": 0.01711208577277299, + "acc_norm": 0.644955300127714, + "acc_norm_stderr": 0.01711208577277299 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340354, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340354 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6109324758842444, + "acc_stderr": 0.027690337536485376, + "acc_norm": 0.6109324758842444, + "acc_norm_stderr": 0.027690337536485376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5381165919282511, + "acc_stderr": 0.033460150119732274, + "acc_norm": 0.5381165919282511, + "acc_norm_stderr": 0.033460150119732274 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009225, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009225 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7323232323232324, + "acc_stderr": 0.03154449888270285, + "acc_norm": 0.7323232323232324, + "acc_norm_stderr": 0.03154449888270285 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.02529460802398646, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.02529460802398646 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.02804098138076153, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.02804098138076153 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3074074074074074, + "acc_stderr": 0.028133252578815635, + "acc_norm": 0.3074074074074074, + "acc_norm_stderr": 0.028133252578815635 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.02535574126305527, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.02535574126305527 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04155319955593147, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04155319955593147 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.569364161849711, + "acc_stderr": 0.026658800273672383, + "acc_norm": 0.569364161849711, + "acc_norm_stderr": 0.026658800273672383 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5246913580246914, + "acc_stderr": 0.027786800931427443, + "acc_norm": 0.5246913580246914, + "acc_norm_stderr": 0.027786800931427443 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6839378238341969, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.6839378238341969, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6568807339449542, + "acc_stderr": 0.02035477773608604, + "acc_norm": 0.6568807339449542, + "acc_norm_stderr": 0.02035477773608604 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.02856869975222588, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.02856869975222588 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309172, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309172 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.020184583359102202, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.020184583359102202 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639875, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639875 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3452513966480447, + "acc_stderr": 0.015901432608930358, + "acc_norm": 0.3452513966480447, + "acc_norm_stderr": 0.015901432608930358 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5428571428571428, + "acc_stderr": 0.03189141832421396, + "acc_norm": 0.5428571428571428, + "acc_norm_stderr": 0.03189141832421396 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7130801687763713, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.7130801687763713, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3833116036505867, + "acc_stderr": 0.012417603662901186, + "acc_norm": 0.3833116036505867, + "acc_norm_stderr": 0.012417603662901186 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6127450980392157, + "acc_stderr": 0.03418931233833344, + "acc_norm": 0.6127450980392157, + "acc_norm_stderr": 0.03418931233833344 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35862913096695226, + "mc1_stderr": 0.016789289499502025, + "mc2": 0.5313254234967402, + "mc2_stderr": 0.01618994736505858 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46871310507674147, + "acc_stderr": 0.01715666685978546, + "acc_norm": 0.4911452184179457, + "acc_norm_stderr": 0.017187658199336733 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "realPCH/kosolra_SFT_DPO_v0", + "model_sha": "e93d1881008609d9d0859fc00b37da02cc03af95", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/refarde/Mistral-7B-Instruct-v0.2-Ko-S-Core/result_2024-01-18 21:10:51.json b/refarde/Mistral-7B-Instruct-v0.2-Ko-S-Core/result_2024-01-18 21:10:51.json new file mode 100644 index 0000000000000000000000000000000000000000..1e1f6baffe59e3f6bbee5b910d27f773704cc756 --- /dev/null +++ b/refarde/Mistral-7B-Instruct-v0.2-Ko-S-Core/result_2024-01-18 21:10:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2858361774744027, + "acc_stderr": 0.01320319608853737, + "acc_norm": 0.3438566552901024, + "acc_norm_stderr": 0.013880644570156218 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34475204142601074, + "acc_stderr": 0.00474316003427115, + "acc_norm": 0.4245170284803824, + "acc_norm_stderr": 0.004932593348813623 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.0381107966983353, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.0381107966983353 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4061302681992337, + "acc_stderr": 0.017562037406478912, + "acc_norm": 0.4061302681992337, + "acc_norm_stderr": 0.017562037406478912 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800254, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800254 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.30638297872340425, + "acc_stderr": 0.030135906478517563, + "acc_norm": 0.30638297872340425, + "acc_norm_stderr": 0.030135906478517563 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36012861736334406, + "acc_stderr": 0.027264297599804015, + "acc_norm": 0.36012861736334406, + "acc_norm_stderr": 0.027264297599804015 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928276, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3969465648854962, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.3969465648854962, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.47474747474747475, + "acc_stderr": 0.035578062450873145, + "acc_norm": 0.47474747474747475, + "acc_norm_stderr": 0.035578062450873145 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4128205128205128, + "acc_stderr": 0.024962683564331817, + "acc_norm": 0.4128205128205128, + "acc_norm_stderr": 0.024962683564331817 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.033959703819985754, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.033959703819985754 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4, + "acc_stderr": 0.027869320571664635, + "acc_norm": 0.4, + "acc_norm_stderr": 0.027869320571664635 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688173, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688173 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4037735849056604, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.4037735849056604, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952166, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952166 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416908, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416908 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.024594975128920945, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.024594975128920945 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.02678881193156275, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.02678881193156275 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.404320987654321, + "acc_stderr": 0.027306625297327688, + "acc_norm": 0.404320987654321, + "acc_norm_stderr": 0.027306625297327688 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3779816513761468, + "acc_stderr": 0.02078918706672811, + "acc_norm": 0.3779816513761468, + "acc_norm_stderr": 0.02078918706672811 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883037, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883037 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.045077322787750895, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.045077322787750895 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.039105257528497264, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.039105257528497264 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215934, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215934 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997865, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997865 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808854, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808854 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4936708860759494, + "acc_stderr": 0.032544620107678585, + "acc_norm": 0.4936708860759494, + "acc_norm_stderr": 0.032544620107678585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3070404172099087, + "acc_stderr": 0.011780959114513765, + "acc_norm": 0.3070404172099087, + "acc_norm_stderr": 0.011780959114513765 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.03426712349247271, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.03426712349247271 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31701346389228885, + "mc1_stderr": 0.016289203374403396, + "mc2": 0.4785932131213098, + "mc2_stderr": 0.015797352863305174 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.33884297520661155, + "acc_stderr": 0.016272952997019124, + "acc_norm": 0.4037780401416765, + "acc_norm_stderr": 0.01686903154029863 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "refarde/Mistral-7B-Instruct-v0.2-Ko-S-Core", + "model_sha": "3ae5c7296148b59bd194daf53806ee3cad2d8307", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/refarde/OPEN-SOLAR-KO-10.7B-S-Core/result_2024-01-23 14:04:44.json b/refarde/OPEN-SOLAR-KO-10.7B-S-Core/result_2024-01-23 14:04:44.json new file mode 100644 index 0000000000000000000000000000000000000000..0ce7a4773215978d32f9bdd0186c86d9b2616389 --- /dev/null +++ b/refarde/OPEN-SOLAR-KO-10.7B-S-Core/result_2024-01-23 14:04:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41552901023890787, + "acc_stderr": 0.014401366641216384, + "acc_norm": 0.47440273037542663, + "acc_norm_stderr": 0.014592230885298959 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42332204740091617, + "acc_stderr": 0.004930757390897346, + "acc_norm": 0.5761800438159729, + "acc_norm_stderr": 0.004931525961035756 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6360153256704981, + "acc_stderr": 0.017205684809032232, + "acc_norm": 0.6360153256704981, + "acc_norm_stderr": 0.017205684809032232 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.547085201793722, + "acc_stderr": 0.033408675019233246, + "acc_norm": 0.547085201793722, + "acc_norm_stderr": 0.033408675019233246 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46923076923076923, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.46923076923076923, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.532258064516129, + "acc_stderr": 0.028384747788813336, + "acc_norm": 0.532258064516129, + "acc_norm_stderr": 0.028384747788813336 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028604, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028604 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.03468343295111126, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.03468343295111126 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425072, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.02681771813034892, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.02681771813034892 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.044895393502706986, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.044895393502706986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5522935779816514, + "acc_stderr": 0.021319754962425455, + "acc_norm": 0.5522935779816514, + "acc_norm_stderr": 0.021319754962425455 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664278, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664278 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.045077322787750874, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.045077322787750874 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.040633027314866704, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.040633027314866704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.019821843688271765, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.019821843688271765 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2324022346368715, + "acc_stderr": 0.01412596875467338, + "acc_norm": 0.2324022346368715, + "acc_norm_stderr": 0.01412596875467338 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841196, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46938775510204084, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.46938775510204084, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.031299208255302136, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.031299208255302136 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31290743155149936, + "acc_stderr": 0.011842529823062997, + "acc_norm": 0.31290743155149936, + "acc_norm_stderr": 0.011842529823062997 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.03898531605579418, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.03898531605579418 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3072215422276622, + "mc1_stderr": 0.016150201321323006, + "mc2": 0.478666696455671, + "mc2_stderr": 0.015233450690646431 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5277449822904369, + "acc_stderr": 0.01716386797945602, + "acc_norm": 0.5596221959858324, + "acc_norm_stderr": 0.017067699774312967 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "refarde/OPEN-SOLAR-KO-10.7B-S-Core", + "model_sha": "042a4e7233b4256636749011ef67b8b7b6abfb60", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/richard-park/inst-aihub-trans/result_2024-07-11 00:53:22.json b/richard-park/inst-aihub-trans/result_2024-07-11 00:53:22.json new file mode 100644 index 0000000000000000000000000000000000000000..52c4c8d6e7db8def80aff966827f4fd5c2957297 --- /dev/null +++ b/richard-park/inst-aihub-trans/result_2024-07-11 00:53:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.363481228668942, + "acc_stderr": 0.014056207319068285, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.014422181226303024 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3595897231627166, + "acc_stderr": 0.0047889940606542745, + "acc_norm": 0.4636526588329018, + "acc_norm_stderr": 0.004976579655169287 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44316730523627074, + "acc_stderr": 0.017764085035348397, + "acc_norm": 0.44316730523627074, + "acc_norm_stderr": 0.017764085035348397 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789958, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789958 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.0332319730294294, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.0332319730294294 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.47474747474747475, + "acc_stderr": 0.035578062450873145, + "acc_norm": 0.47474747474747475, + "acc_norm_stderr": 0.035578062450873145 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.02533900301010653, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.02533900301010653 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.028438677998909558, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.028438677998909558 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.029996951858349472, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.029996951858349472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.03053333843046751, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.03053333843046751 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131147, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131147 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.41721854304635764, + "acc_stderr": 0.040261414976346104, + "acc_norm": 0.41721854304635764, + "acc_norm_stderr": 0.040261414976346104 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.02507598176760168, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.02507598176760168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206177, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5030864197530864, + "acc_stderr": 0.027820214158594363, + "acc_norm": 0.5030864197530864, + "acc_norm_stderr": 0.027820214158594363 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5077720207253886, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.5077720207253886, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5321100917431193, + "acc_stderr": 0.021393071222680804, + "acc_norm": 0.5321100917431193, + "acc_norm_stderr": 0.021393071222680804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.043207678075366684, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.043207678075366684 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41830065359477125, + "acc_stderr": 0.019955975145835542, + "acc_norm": 0.41830065359477125, + "acc_norm_stderr": 0.019955975145835542 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.033812000056435254, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.033812000056435254 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2581005586592179, + "acc_stderr": 0.014635185616527836, + "acc_norm": 0.2581005586592179, + "acc_norm_stderr": 0.014635185616527836 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556165, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.032007041833595914, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.032007041833595914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3617992177314211, + "acc_stderr": 0.012272736233262941, + "acc_norm": 0.3617992177314211, + "acc_norm_stderr": 0.012272736233262941 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.03471157907953427, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.03471157907953427 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.01605899902610062, + "mc2": 0.4521869189988193, + "mc2_stderr": 0.015321568525318779 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3955135773317591, + "acc_stderr": 0.016810815902206042, + "acc_norm": 0.43683589138134593, + "acc_norm_stderr": 0.017052633559856076 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "richard-park/inst-aihub-trans", + "model_sha": "f761651f1cd50c7d15f2aef6765d0f06bddf9389", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/richard-park/llama-3-8B-inst-unsloth-ko-merged-1/result_2024-07-03 23:53:14.json b/richard-park/llama-3-8B-inst-unsloth-ko-merged-1/result_2024-07-03 23:53:14.json new file mode 100644 index 0000000000000000000000000000000000000000..1a27debb3c148f409c0aaedcb02e5f5745fa0a36 --- /dev/null +++ b/richard-park/llama-3-8B-inst-unsloth-ko-merged-1/result_2024-07-03 23:53:14.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.01388881628678211, + "acc_norm": 0.39334470989761094, + "acc_norm_stderr": 0.014275101465693028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3509261103365863, + "acc_stderr": 0.00476284477090985, + "acc_norm": 0.45120493925512845, + "acc_norm_stderr": 0.004965963647210315 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4342273307790549, + "acc_stderr": 0.017724589389677785, + "acc_norm": 0.4342273307790549, + "acc_norm_stderr": 0.017724589389677785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459156, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459156 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4797979797979798, + "acc_stderr": 0.03559443565563918, + "acc_norm": 0.4797979797979798, + "acc_norm_stderr": 0.03559443565563918 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.04122737111370333, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.04122737111370333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.02518914989476419, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.02518914989476419 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.47783251231527096, + "acc_stderr": 0.03514528562175008, + "acc_norm": 0.47783251231527096, + "acc_norm_stderr": 0.03514528562175008 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.030572811310299604, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.030572811310299604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.03058805297427066, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.03058805297427066 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.029318203645206858, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.029318203645206858 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.024870815251057096, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.024870815251057096 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5359477124183006, + "acc_stderr": 0.028555827516528777, + "acc_norm": 0.5359477124183006, + "acc_norm_stderr": 0.028555827516528777 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249032, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249032 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529672, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529672 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.033509916046960436, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.033509916046960436 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.01444415780826145, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.01444415780826145 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5306122448979592, + "acc_stderr": 0.031949171367580624, + "acc_norm": 0.5306122448979592, + "acc_norm_stderr": 0.031949171367580624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.012134433741002575, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.012134433741002575 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.035086373586305716, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.035086373586305716 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165634, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165634 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.01582614243950234, + "mc2": 0.43017145552190944, + "mc2_stderr": 0.015185918828173776 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.39315230224321135, + "acc_stderr": 0.016793262801287078, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.017019847535972205 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "richard-park/llama-3-8B-inst-unsloth-ko-merged-1", + "model_sha": "9e19543cd11634a8926417d45e76097d7ba281d0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/richard-park/llama-3-8B-inst-unsloth-ko-merged/result_2024-07-03 08:10:23.json b/richard-park/llama-3-8B-inst-unsloth-ko-merged/result_2024-07-03 08:10:23.json new file mode 100644 index 0000000000000000000000000000000000000000..fc64eb4350d7eeb9be5e68bdc730ad9f3993ef80 --- /dev/null +++ b/richard-park/llama-3-8B-inst-unsloth-ko-merged/result_2024-07-03 08:10:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145687, + "acc_norm": 0.3924914675767918, + "acc_norm_stderr": 0.01426963463567071 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3509261103365863, + "acc_stderr": 0.00476284477090985, + "acc_norm": 0.45130452101175067, + "acc_norm_stderr": 0.00496606099531506 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4342273307790549, + "acc_stderr": 0.017724589389677785, + "acc_norm": 0.4342273307790549, + "acc_norm_stderr": 0.017724589389677785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467381, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467381 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.48484848484848486, + "acc_stderr": 0.03560716516531061, + "acc_norm": 0.48484848484848486, + "acc_norm_stderr": 0.03560716516531061 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.04122737111370333, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.04122737111370333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201943, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201943 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.032478490123081544, + "acc_norm": 0.5, + "acc_norm_stderr": 0.032478490123081544 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4729064039408867, + "acc_stderr": 0.03512819077876106, + "acc_norm": 0.4729064039408867, + "acc_norm_stderr": 0.03512819077876106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561067, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561067 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.030572811310299604, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.030572811310299604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.03058805297427066, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.03058805297427066 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.029318203645206858, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.029318203645206858 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115979, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115979 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.027815973433878014, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.027815973433878014 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249032, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249032 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529672, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529672 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101366, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101366 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.033509916046960436, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.033509916046960436 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261452, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261452 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.02909720956841195, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.02909720956841195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.012150699768228572, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.012150699768228572 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.035086373586305716, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.035086373586305716 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6121212121212121, + "acc_stderr": 0.0380491365397101, + "acc_norm": 0.6121212121212121, + "acc_norm_stderr": 0.0380491365397101 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.01582614243950234, + "mc2": 0.4301830436767001, + "mc2_stderr": 0.015186961638298675 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3919716646989374, + "acc_stderr": 0.016784332119424077, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.017014038119297487 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "richard-park/llama-3-8B-inst-unsloth-ko-merged", + "model_sha": "9e19543cd11634a8926417d45e76097d7ba281d0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/richard-park/llama3-deepspeed-v1.0/result_2024-07-04 12:07:46.json b/richard-park/llama3-deepspeed-v1.0/result_2024-07-04 12:07:46.json new file mode 100644 index 0000000000000000000000000000000000000000..a29eedc47801096687fe916dd1bc0280cfd6c188 --- /dev/null +++ b/richard-park/llama3-deepspeed-v1.0/result_2024-07-04 12:07:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3984641638225256, + "acc_stderr": 0.014306946052735562, + "acc_norm": 0.44283276450511944, + "acc_norm_stderr": 0.014515573873348907 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3799044015136427, + "acc_stderr": 0.004843708550386518, + "acc_norm": 0.5032861979685321, + "acc_norm_stderr": 0.004989673640014259 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48531289910600256, + "acc_stderr": 0.017872248024429112, + "acc_norm": 0.48531289910600256, + "acc_norm_stderr": 0.017872248024429112 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840625, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840625 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.04122737111370333, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.04122737111370333 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5462184873949579, + "acc_stderr": 0.032339434681820885, + "acc_norm": 0.5462184873949579, + "acc_norm_stderr": 0.032339434681820885 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5051282051282051, + "acc_stderr": 0.025349672906838636, + "acc_norm": 0.5051282051282051, + "acc_norm_stderr": 0.025349672906838636 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5064516129032258, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.5064516129032258, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.029723278961476664, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.029723278961476664 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03333333333333333, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03333333333333333 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.025107425481137292, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.025107425481137292 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.041227287076512825, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.041227287076512825 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5544041450777202, + "acc_stderr": 0.0358701498607566, + "acc_norm": 0.5544041450777202, + "acc_norm_stderr": 0.0358701498607566 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5743119266055046, + "acc_stderr": 0.021199235972470806, + "acc_norm": 0.5743119266055046, + "acc_norm_stderr": 0.021199235972470806 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.04065771002562603, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.04065771002562603 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.01986115519382917, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.01986115519382917 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.028538650028878638, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.028538650028878638 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608043, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30837988826815643, + "acc_stderr": 0.015445716910998877, + "acc_norm": 0.30837988826815643, + "acc_norm_stderr": 0.015445716910998877 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.02976826352893311, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.02976826352893311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6, + "acc_stderr": 0.03136250240935895, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03136250240935895 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6497890295358649, + "acc_stderr": 0.03105239193758435, + "acc_norm": 0.6497890295358649, + "acc_norm_stderr": 0.03105239193758435 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35528031290743156, + "acc_stderr": 0.01222362336404404, + "acc_norm": 0.35528031290743156, + "acc_norm_stderr": 0.01222362336404404 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03756335775187896, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03756335775187896 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396708, + "mc2": 0.44282858825837984, + "mc2_stderr": 0.015289279919853823 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3435655253837072, + "acc_stderr": 0.01632733480642914, + "acc_norm": 0.3730814639905549, + "acc_norm_stderr": 0.01662731827513743 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "richard-park/llama3-deepspeed-v1.0", + "model_sha": "3cf7e4b30bbdda931b83c89de2827666b0fc834c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/richard-park/llama3-pre1-ds/result_2024-07-17 23:18:04.json b/richard-park/llama3-pre1-ds/result_2024-07-17 23:18:04.json new file mode 100644 index 0000000000000000000000000000000000000000..613ad74bc49ac54f6ca94f22be9b23d3f11d62bc --- /dev/null +++ b/richard-park/llama3-pre1-ds/result_2024-07-17 23:18:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2022184300341297, + "acc_stderr": 0.011737454431872104, + "acc_norm": 0.2645051194539249, + "acc_norm_stderr": 0.01288927294931337 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2513443537143995, + "acc_stderr": 0.004328995510312587, + "acc_norm": 0.24945230033857796, + "acc_norm_stderr": 0.004318117166358328 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.34099616858237547, + "acc_stderr": 0.01695178138322331, + "acc_norm": 0.34099616858237547, + "acc_norm_stderr": 0.01695178138322331 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174022, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174022 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.026355158413349407, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.026355158413349407 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824662, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824662 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31511254019292606, + "acc_stderr": 0.02638527370346449, + "acc_norm": 0.31511254019292606, + "acc_norm_stderr": 0.02638527370346449 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.042258754519696365, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.042258754519696365 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2878787878787879, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.2878787878787879, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.03878352372138622, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.03878352372138622 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.026265024608275882, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.026265024608275882 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35384615384615387, + "acc_stderr": 0.02424378399406217, + "acc_norm": 0.35384615384615387, + "acc_norm_stderr": 0.02424378399406217 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22660098522167488, + "acc_stderr": 0.029454863835292982, + "acc_norm": 0.22660098522167488, + "acc_norm_stderr": 0.029454863835292982 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.26129032258064516, + "acc_stderr": 0.02499305339776483, + "acc_norm": 0.26129032258064516, + "acc_norm_stderr": 0.02499305339776483 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.027046857630716688, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.027046857630716688 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3132075471698113, + "acc_stderr": 0.02854479331905533, + "acc_norm": 0.3132075471698113, + "acc_norm_stderr": 0.02854479331905533 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31343283582089554, + "acc_stderr": 0.03280188205348644, + "acc_norm": 0.31343283582089554, + "acc_norm_stderr": 0.03280188205348644 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.035839017547364134, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.035839017547364134 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.30346820809248554, + "acc_stderr": 0.024752411960917202, + "acc_norm": 0.30346820809248554, + "acc_norm_stderr": 0.024752411960917202 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2993827160493827, + "acc_stderr": 0.02548311560119547, + "acc_norm": 0.2993827160493827, + "acc_norm_stderr": 0.02548311560119547 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39378238341968913, + "acc_stderr": 0.035260770955482364, + "acc_norm": 0.39378238341968913, + "acc_norm_stderr": 0.035260770955482364 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3376146788990826, + "acc_stderr": 0.020275265986638907, + "acc_norm": 0.3376146788990826, + "acc_norm_stderr": 0.020275265986638907 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.027582811415159628, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.027582811415159628 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4049586776859504, + "acc_stderr": 0.044811377559424694, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.044811377559424694 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.03761070869867479, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.03761070869867479 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.20915032679738563, + "acc_stderr": 0.01645339933227933, + "acc_norm": 0.20915032679738563, + "acc_norm_stderr": 0.01645339933227933 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.0258921511567094, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.0258921511567094 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.02993534270787775, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.02993534270787775 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.031362502409358915, + "acc_norm": 0.4, + "acc_norm_stderr": 0.031362502409358915 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2869198312236287, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.2869198312236287, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27640156453715775, + "acc_stderr": 0.011422153194553572, + "acc_norm": 0.27640156453715775, + "acc_norm_stderr": 0.011422153194553572 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.031822318676475544, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.031822318676475544 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.015225899340826835, + "mc2": 0.514740530095073, + "mc2_stderr": 0.017139161265526514 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.11097992916174734, + "acc_stderr": 0.010799230802056048, + "acc_norm": 0.3282172373081464, + "acc_norm_stderr": 0.01614395503618444 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "richard-park/llama3-pre1-ds", + "model_sha": "f66b2fb6c31bcbe8a881eed9400c21854b91a0e7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/richard-park/sft-ds-14k/result_2024-07-12 01:18:48.json b/richard-park/sft-ds-14k/result_2024-07-12 01:18:48.json new file mode 100644 index 0000000000000000000000000000000000000000..5d424666383a1aaf8a10e223d344c941d4485f7d --- /dev/null +++ b/richard-park/sft-ds-14k/result_2024-07-12 01:18:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37457337883959047, + "acc_stderr": 0.014144193471893452, + "acc_norm": 0.4274744027303754, + "acc_norm_stderr": 0.014456862944650645 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39165504879506075, + "acc_stderr": 0.004871226629346399, + "acc_norm": 0.5272854013144792, + "acc_norm_stderr": 0.004982346155911128 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03615507630310935, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03615507630310935 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7475728155339806, + "acc_stderr": 0.043012503996908764, + "acc_norm": 0.7475728155339806, + "acc_norm_stderr": 0.043012503996908764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5696040868454662, + "acc_stderr": 0.01770586877629239, + "acc_norm": 0.5696040868454662, + "acc_norm_stderr": 0.01770586877629239 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542124, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542124 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936337, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5401929260450161, + "acc_stderr": 0.028306190403305693, + "acc_norm": 0.5401929260450161, + "acc_norm_stderr": 0.028306190403305693 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6919191919191919, + "acc_stderr": 0.03289477330098614, + "acc_norm": 0.6919191919191919, + "acc_norm_stderr": 0.03289477330098614 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5793103448275863, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.5793103448275863, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196156, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196156 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.03225294232399639, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.03225294232399639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5205128205128206, + "acc_stderr": 0.02532966316348994, + "acc_norm": 0.5205128205128206, + "acc_norm_stderr": 0.02532966316348994 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5387096774193548, + "acc_stderr": 0.028358634859836945, + "acc_norm": 0.5387096774193548, + "acc_norm_stderr": 0.028358634859836945 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7606837606837606, + "acc_stderr": 0.027951826808924333, + "acc_norm": 0.7606837606837606, + "acc_norm_stderr": 0.027951826808924333 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.49433962264150944, + "acc_stderr": 0.030770900763851302, + "acc_norm": 0.49433962264150944, + "acc_norm_stderr": 0.030770900763851302 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630882, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630882 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.03983798306659808, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.03983798306659808 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115978, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294939, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294939 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5722543352601156, + "acc_stderr": 0.02663653974111609, + "acc_norm": 0.5722543352601156, + "acc_norm_stderr": 0.02663653974111609 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6269430051813472, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.6269430051813472, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.04598188057816542, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.04598188057816542 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5871559633027523, + "acc_stderr": 0.02110912813341392, + "acc_norm": 0.5871559633027523, + "acc_norm_stderr": 0.02110912813341392 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.0404633688397825, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.0404633688397825 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.020154685712590888, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.020154685712590888 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.03408655867977749, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.03408655867977749 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3139664804469274, + "acc_stderr": 0.015521923933523647, + "acc_norm": 0.3139664804469274, + "acc_norm_stderr": 0.015521923933523647 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.03016191193076711, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.03016191193076711 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5877551020408164, + "acc_stderr": 0.031512360446742695, + "acc_norm": 0.5877551020408164, + "acc_norm_stderr": 0.031512360446742695 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.679324894514768, + "acc_stderr": 0.030381931949990417, + "acc_norm": 0.679324894514768, + "acc_norm_stderr": 0.030381931949990417 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36962190352020863, + "acc_stderr": 0.01232844577857526, + "acc_norm": 0.36962190352020863, + "acc_norm_stderr": 0.01232844577857526 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.03434131164719131, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.03434131164719131 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205983, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205983 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.01555077833284288, + "mc2": 0.4263802738646326, + "mc2_stderr": 0.014955112450073211 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.47107438016528924, + "acc_stderr": 0.017161563949916348, + "acc_norm": 0.6103896103896104, + "acc_norm_stderr": 0.01676616167189351 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "richard-park/sft-ds-14k", + "model_sha": "e6cb40782bf2412dbd2fd7566c88f3f76c1494c9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/rombodawg/Everyone-Coder-4x7b-Base/result_2024-07-27 03:41:43.json b/rombodawg/Everyone-Coder-4x7b-Base/result_2024-07-27 03:41:43.json new file mode 100644 index 0000000000000000000000000000000000000000..18f39f2bc3a99b35bcf1a3b42e6135268fde066c --- /dev/null +++ b/rombodawg/Everyone-Coder-4x7b-Base/result_2024-07-27 03:41:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35494880546075086, + "acc_stderr": 0.013983036904094094, + "acc_norm": 0.40102389078498296, + "acc_norm_stderr": 0.014322255790719864 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38388767177853017, + "acc_stderr": 0.004853371646239248, + "acc_norm": 0.49512049392551283, + "acc_norm_stderr": 0.0049895437965932835 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458935, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47509578544061304, + "acc_stderr": 0.017857770704901018, + "acc_norm": 0.47509578544061304, + "acc_norm_stderr": 0.017857770704901018 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.03515520728670417, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.03515520728670417 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.02523038123893483, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.02523038123893483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883231, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883231 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.03053333843046751, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.03053333843046751 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131143, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131143 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302065, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302065 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752052, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752052 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413313, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413313 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4935779816513762, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.4935779816513762, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43300653594771243, + "acc_stderr": 0.020045442473324227, + "acc_norm": 0.43300653594771243, + "acc_norm_stderr": 0.020045442473324227 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.288268156424581, + "acc_stderr": 0.015149132860209429, + "acc_norm": 0.288268156424581, + "acc_norm_stderr": 0.015149132860209429 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.02895975519682487, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.02895975519682487 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6286919831223629, + "acc_stderr": 0.03145068600744858, + "acc_norm": 0.6286919831223629, + "acc_norm_stderr": 0.03145068600744858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3494132985658409, + "acc_stderr": 0.012177306252786697, + "acc_norm": 0.3494132985658409, + "acc_norm_stderr": 0.012177306252786697 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.01613222972815506, + "mc2": 0.48792407160583323, + "mc2_stderr": 0.01578578869554025 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4757969303423849, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.4982290436835891, + "acc_norm_stderr": 0.017190246276231867 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "rombodawg/Everyone-Coder-4x7b-Base", + "model_sha": "5e4757ee9875ee65df9216dcc61208bd504d4632", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/royallab/ZephRP-m7b/result_2024-05-16 06:35:15.json b/royallab/ZephRP-m7b/result_2024-05-16 06:35:15.json new file mode 100644 index 0000000000000000000000000000000000000000..3886c3e31c13a0e6b8a7c6f6b236e486d179c72a --- /dev/null +++ b/royallab/ZephRP-m7b/result_2024-05-16 06:35:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31399317406143346, + "acc_stderr": 0.013562691224726286, + "acc_norm": 0.3720136518771331, + "acc_norm_stderr": 0.014124597881844453 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35749850627365065, + "acc_stderr": 0.004782838352222534, + "acc_norm": 0.4565823541127266, + "acc_norm_stderr": 0.0049709334202319285 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4240102171136654, + "acc_stderr": 0.017672263329084236, + "acc_norm": 0.4240102171136654, + "acc_norm_stderr": 0.017672263329084236 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2936170212765957, + "acc_stderr": 0.029771642712491227, + "acc_norm": 0.2936170212765957, + "acc_norm_stderr": 0.029771642712491227 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.043285772152629715, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.043285772152629715 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539746, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539746 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.02831050034856839, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.02831050034856839 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051621, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051621 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028435, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028435 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.026830805998952233, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.026830805998952233 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.39197530864197533, + "acc_stderr": 0.027163686038271236, + "acc_norm": 0.39197530864197533, + "acc_norm_stderr": 0.027163686038271236 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384486, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384486 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45688073394495415, + "acc_stderr": 0.02135745878522621, + "acc_norm": 0.45688073394495415, + "acc_norm_stderr": 0.02135745878522621 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795134, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795134 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.028452639985088, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.028452639985088 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.019450768432505518, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.019450768432505518 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042405, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3240223463687151, + "acc_stderr": 0.015652542496421125, + "acc_norm": 0.3240223463687151, + "acc_norm_stderr": 0.015652542496421125 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562427, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562427 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5063291139240507, + "acc_stderr": 0.03254462010767859, + "acc_norm": 0.5063291139240507, + "acc_norm_stderr": 0.03254462010767859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31290743155149936, + "acc_stderr": 0.011842529823063, + "acc_norm": 0.31290743155149936, + "acc_norm_stderr": 0.011842529823063 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.032566854844603886, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.032566854844603886 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03681050869161549, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03681050869161549 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3427172582619339, + "mc1_stderr": 0.016614949385347046, + "mc2": 0.5134624017198415, + "mc2_stderr": 0.015820972848102258 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36717827626918537, + "acc_stderr": 0.016572727807458595, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.016689333596980115 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "royallab/ZephRP-m7b", + "model_sha": "4ef26ff937765b3fb279151ba9af48fb42c03932", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/rrw-x2/KoSOLAR-10.7B-qlora-v1.2.1/result_2024-03-27 11:19:26.json b/rrw-x2/KoSOLAR-10.7B-qlora-v1.2.1/result_2024-03-27 11:19:26.json new file mode 100644 index 0000000000000000000000000000000000000000..daf348487b40f1d26c0268b6d839ac4fe23f63cb --- /dev/null +++ b/rrw-x2/KoSOLAR-10.7B-qlora-v1.2.1/result_2024-03-27 11:19:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19027303754266212, + "acc_stderr": 0.011470424179225695, + "acc_norm": 0.25597269624573377, + "acc_norm_stderr": 0.012753013241244525 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2529376618203545, + "acc_stderr": 0.004338071318912316, + "acc_norm": 0.2545309699263095, + "acc_norm_stderr": 0.0043470700195274775 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24724602203182375, + "mc1_stderr": 0.015102404797359652, + "mc2": 0.4914299132967239, + "mc2_stderr": 0.01720671246785043 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08736717827626919, + "acc_stderr": 0.009708162004168805, + "acc_norm": 0.4155844155844156, + "acc_norm_stderr": 0.01694358631307657 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "rrw-x2/KoSOLAR-10.7B-qlora-v1.2.1", + "model_sha": "9d5449c027f21ca577dc2e566f0dde721ca4d91e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/rrw-x2/KoSOLAR-10.7B-qlora-v1.2/result_2024-03-26 07:24:57.json b/rrw-x2/KoSOLAR-10.7B-qlora-v1.2/result_2024-03-26 07:24:57.json new file mode 100644 index 0000000000000000000000000000000000000000..2e4744372cdcb4b0724f4e43ad80e85058c169ee --- /dev/null +++ b/rrw-x2/KoSOLAR-10.7B-qlora-v1.2/result_2024-03-26 07:24:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19027303754266212, + "acc_stderr": 0.011470424179225695, + "acc_norm": 0.2568259385665529, + "acc_norm_stderr": 0.012766923794116796 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2529376618203545, + "acc_stderr": 0.004338071318912316, + "acc_norm": 0.2545309699263095, + "acc_norm_stderr": 0.0043470700195274775 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3216374269005848, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.3216374269005848, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23754789272030652, + "acc_stderr": 0.015218733046150191, + "acc_norm": 0.23754789272030652, + "acc_norm_stderr": 0.015218733046150191 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.03355677216313142, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.03355677216313142 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.1864951768488746, + "acc_stderr": 0.022122439772480757, + "acc_norm": 0.1864951768488746, + "acc_norm_stderr": 0.022122439772480757 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2595419847328244, + "acc_stderr": 0.03844876139785271, + "acc_norm": 0.2595419847328244, + "acc_norm_stderr": 0.03844876139785271 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.17676767676767677, + "acc_stderr": 0.027178752639044915, + "acc_norm": 0.17676767676767677, + "acc_norm_stderr": 0.027178752639044915 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.20256410256410257, + "acc_stderr": 0.020377660970371393, + "acc_norm": 0.20256410256410257, + "acc_norm_stderr": 0.020377660970371393 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.15270935960591134, + "acc_stderr": 0.025308904539380627, + "acc_norm": 0.15270935960591134, + "acc_norm_stderr": 0.025308904539380627 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.1774193548387097, + "acc_stderr": 0.021732540689329276, + "acc_norm": 0.1774193548387097, + "acc_norm_stderr": 0.021732540689329276 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2905982905982906, + "acc_stderr": 0.029745048572674057, + "acc_norm": 0.2905982905982906, + "acc_norm_stderr": 0.029745048572674057 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.025288394502891377, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.025288394502891377 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2111111111111111, + "acc_stderr": 0.024882116857655075, + "acc_norm": 0.2111111111111111, + "acc_norm_stderr": 0.024882116857655075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436775, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436775 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749884, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749884 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.20899470899470898, + "acc_stderr": 0.020940481565334835, + "acc_norm": 0.20899470899470898, + "acc_norm_stderr": 0.020940481565334835 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.2, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.19689119170984457, + "acc_stderr": 0.028697873971860677, + "acc_norm": 0.19689119170984457, + "acc_norm_stderr": 0.028697873971860677 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1926605504587156, + "acc_stderr": 0.016909276884936097, + "acc_norm": 0.1926605504587156, + "acc_norm_stderr": 0.016909276884936097 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.02392915551735129, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.02392915551735129 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.038968789850704164, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.038968789850704164 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.1527777777777778, + "acc_stderr": 0.024536326026134238, + "acc_norm": 0.1527777777777778, + "acc_norm_stderr": 0.024536326026134238 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.18382352941176472, + "acc_stderr": 0.02352924218519311, + "acc_norm": 0.18382352941176472, + "acc_norm_stderr": 0.02352924218519311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.18775510204081633, + "acc_stderr": 0.025000256039546198, + "acc_norm": 0.18775510204081633, + "acc_norm_stderr": 0.025000256039546198 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2457627118644068, + "acc_stderr": 0.010996156635142692, + "acc_norm": 0.2457627118644068, + "acc_norm_stderr": 0.010996156635142692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2460220318237454, + "mc1_stderr": 0.015077219200662595, + "mc2": 0.4914777058044544, + "mc2_stderr": 0.017206906856400528 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.089728453364817, + "acc_stderr": 0.009825742834398033, + "acc_norm": 0.41912632821723733, + "acc_norm_stderr": 0.016963995010862792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "rrw-x2/KoSOLAR-10.7B-qlora-v1.2", + "model_sha": "a387216f3c4593059a326a690718ac24d59e6747", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/rrw-x2/KoSOLAR-10.7B-v1.0/result_2024-03-25 03:04:50.json b/rrw-x2/KoSOLAR-10.7B-v1.0/result_2024-03-25 03:04:50.json new file mode 100644 index 0000000000000000000000000000000000000000..2d657fa0fc32056449013f9c56dbc47d7c18a5b8 --- /dev/null +++ b/rrw-x2/KoSOLAR-10.7B-v1.0/result_2024-03-25 03:04:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6723549488054608, + "acc_stderr": 0.01371584794071934, + "acc_norm": 0.7261092150170648, + "acc_norm_stderr": 0.013032004972989501 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44124676359290976, + "acc_stderr": 0.004955212787832377, + "acc_norm": 0.5854411471818363, + "acc_norm_stderr": 0.004916388962142326 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6832694763729247, + "acc_stderr": 0.01663556642771258, + "acc_norm": 0.6832694763729247, + "acc_norm_stderr": 0.01663556642771258 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.502127659574468, + "acc_stderr": 0.03268572658667493, + "acc_norm": 0.502127659574468, + "acc_norm_stderr": 0.03268572658667493 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.038922121953330446, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.038922121953330446 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.639871382636656, + "acc_stderr": 0.027264297599804015, + "acc_norm": 0.639871382636656, + "acc_norm_stderr": 0.027264297599804015 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6681614349775785, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.6681614349775785, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7121212121212122, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.7121212121212122, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201943, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201943 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.030956636328566545, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.030956636328566545 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6, + "acc_stderr": 0.024838811988033175, + "acc_norm": 0.6, + "acc_norm_stderr": 0.024838811988033175 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356462, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356462 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6161290322580645, + "acc_stderr": 0.02766618207553965, + "acc_norm": 0.6161290322580645, + "acc_norm_stderr": 0.02766618207553965 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8418803418803419, + "acc_stderr": 0.02390232554956041, + "acc_norm": 0.8418803418803419, + "acc_norm_stderr": 0.02390232554956041 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6037735849056604, + "acc_stderr": 0.030102793781791194, + "acc_norm": 0.6037735849056604, + "acc_norm_stderr": 0.030102793781791194 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.046737523336702384, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.046737523336702384 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857392, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857392 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.039837983066598075, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.039837983066598075 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41798941798941797, + "acc_stderr": 0.02540255550326091, + "acc_norm": 0.41798941798941797, + "acc_norm_stderr": 0.02540255550326091 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5347222222222222, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.5347222222222222, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5982658959537572, + "acc_stderr": 0.02639410417764363, + "acc_norm": 0.5982658959537572, + "acc_norm_stderr": 0.02639410417764363 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5705521472392638, + "acc_stderr": 0.03889066619112723, + "acc_norm": 0.5705521472392638, + "acc_norm_stderr": 0.03889066619112723 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.654320987654321, + "acc_stderr": 0.02646248777700187, + "acc_norm": 0.654320987654321, + "acc_norm_stderr": 0.02646248777700187 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.03221024508041154, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.03221024508041154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366596, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7376146788990826, + "acc_stderr": 0.018861885021534745, + "acc_norm": 0.7376146788990826, + "acc_norm_stderr": 0.018861885021534745 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6045751633986928, + "acc_stderr": 0.02799672318063145, + "acc_norm": 0.6045751633986928, + "acc_norm_stderr": 0.02799672318063145 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.768595041322314, + "acc_stderr": 0.03849856098794088, + "acc_norm": 0.768595041322314, + "acc_norm_stderr": 0.03849856098794088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.04026097083296561, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.04026097083296561 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5571895424836601, + "acc_stderr": 0.020095083154577358, + "acc_norm": 0.5571895424836601, + "acc_norm_stderr": 0.020095083154577358 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41843971631205673, + "acc_stderr": 0.02942799403941999, + "acc_norm": 0.41843971631205673, + "acc_norm_stderr": 0.02942799403941999 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.42569832402234636, + "acc_stderr": 0.01653682964899711, + "acc_norm": 0.42569832402234636, + "acc_norm_stderr": 0.01653682964899711 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.03004261583271486, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.03004261583271486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6571428571428571, + "acc_stderr": 0.03038726291954772, + "acc_norm": 0.6571428571428571, + "acc_norm_stderr": 0.03038726291954772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7426160337552743, + "acc_stderr": 0.02845882099146031, + "acc_norm": 0.7426160337552743, + "acc_norm_stderr": 0.02845882099146031 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4491525423728814, + "acc_stderr": 0.012704030518851476, + "acc_norm": 0.4491525423728814, + "acc_norm_stderr": 0.012704030518851476 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6519607843137255, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.6519607843137255, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.037563357751878954, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.037563357751878954 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5446756425948592, + "mc1_stderr": 0.017433490102538765, + "mc2": 0.6650293478858966, + "mc2_stderr": 0.014681198658160395 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5348288075560803, + "acc_stderr": 0.017148598015747422, + "acc_norm": 0.5938606847697757, + "acc_norm_stderr": 0.0168847495031914 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "rrw-x2/KoSOLAR-10.7B-v1.0", + "model_sha": "847de02df5f0ed51c60a6c5c0570b769aa6d2b75", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/rrw-x2/KoSOLAR-10.9B-v0.3/result_2024-02-09 14:31:51.json b/rrw-x2/KoSOLAR-10.9B-v0.3/result_2024-02-09 14:31:51.json new file mode 100644 index 0000000000000000000000000000000000000000..57b228ff1e1df9d0e08e503e62a685cd6311b987 --- /dev/null +++ b/rrw-x2/KoSOLAR-10.9B-v0.3/result_2024-02-09 14:31:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3728668941979522, + "acc_stderr": 0.014131176760131167, + "acc_norm": 0.43686006825938567, + "acc_norm_stderr": 0.01449442158425652 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40240987851025695, + "acc_stderr": 0.004893814890208322, + "acc_norm": 0.5199163513244374, + "acc_norm_stderr": 0.004985821336146407 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5402298850574713, + "acc_stderr": 0.01782199409693354, + "acc_norm": 0.5402298850574713, + "acc_norm_stderr": 0.01782199409693354 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.034961309720561266, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.034961309720561266 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.03191863374478465, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.03191863374478465 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.02521731518484648, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.02521731518484648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.0332085274234831, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.0332085274234831 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.03046365674734026, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.03046365674734026 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.035319879302087305, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.035319879302087305 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.03794012674697031, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.03794012674697031 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.0240268463928735, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.0240268463928735 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41329479768786126, + "acc_stderr": 0.02651126136940924, + "acc_norm": 0.41329479768786126, + "acc_norm_stderr": 0.02651126136940924 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656206, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656206 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5119266055045871, + "acc_stderr": 0.021431223617362223, + "acc_norm": 0.5119266055045871, + "acc_norm_stderr": 0.021431223617362223 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.02840830202033269, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.02840830202033269 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775089, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775089 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849726, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849726 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577443, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577443 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.02678917235114024, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.02678917235114024 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340455, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.033851779760448106, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.033851779760448106 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.293854748603352, + "acc_stderr": 0.015235075776719616, + "acc_norm": 0.293854748603352, + "acc_norm_stderr": 0.015235075776719616 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42448979591836733, + "acc_stderr": 0.03164209487942941, + "acc_norm": 0.42448979591836733, + "acc_norm_stderr": 0.03164209487942941 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33376792698826596, + "acc_stderr": 0.012043812655846144, + "acc_norm": 0.33376792698826596, + "acc_norm_stderr": 0.012043812655846144 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.039042723414318574, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.039042723414318574 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766373, + "mc2": 0.4165440736855395, + "mc2_stderr": 0.015358080941766751 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45218417945690675, + "acc_stderr": 0.017111567130916782, + "acc_norm": 0.525383707201889, + "acc_norm_stderr": 0.017168187201429257 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "rrw-x2/KoSOLAR-10.9B-v0.3", + "model_sha": "0b7f38ab06552c55441cda8feb40990fdd0a5d61", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/rrw-x2/KoSOLAR-10.9B-v0.5/result_2024-02-09 13:59:24.json b/rrw-x2/KoSOLAR-10.9B-v0.5/result_2024-02-09 13:59:24.json new file mode 100644 index 0000000000000000000000000000000000000000..4f15394a223066bac3898910c45c328eeddf690a --- /dev/null +++ b/rrw-x2/KoSOLAR-10.9B-v0.5/result_2024-02-09 13:59:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35921501706484643, + "acc_stderr": 0.014020224155839159, + "acc_norm": 0.4283276450511945, + "acc_norm_stderr": 0.014460496367599019 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4012148974307907, + "acc_stderr": 0.004891426533390627, + "acc_norm": 0.5311690898227445, + "acc_norm_stderr": 0.004980076707392439 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5300127713920817, + "acc_stderr": 0.017847723086649073, + "acc_norm": 0.5300127713920817, + "acc_norm_stderr": 0.017847723086649073 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.03115852213135778, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.03115852213135778 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.037117251907407486, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.037117251907407486 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.03437305501980619, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.03437305501980619 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.40756302521008403, + "acc_stderr": 0.03191863374478465, + "acc_norm": 0.40756302521008403, + "acc_norm_stderr": 0.03191863374478465 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43333333333333335, + "acc_stderr": 0.025124653525885134, + "acc_norm": 0.43333333333333335, + "acc_norm_stderr": 0.025124653525885134 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.0336612448905145, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.0336612448905145 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962956, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6752136752136753, + "acc_stderr": 0.03067902276549883, + "acc_norm": 0.6752136752136753, + "acc_norm_stderr": 0.03067902276549883 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.027634907264178544, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.027634907264178544 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.03522865864099598, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.03522865864099598 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.024419234966819064, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.024419234966819064 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.026613350840261746, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.026613350840261746 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.404320987654321, + "acc_stderr": 0.027306625297327684, + "acc_norm": 0.404320987654321, + "acc_norm_stderr": 0.027306625297327684 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.03606065001832919, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.03606065001832919 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.03809523809523811, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.03809523809523811 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483205, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3562091503267974, + "acc_stderr": 0.01937333242072449, + "acc_norm": 0.3562091503267974, + "acc_norm_stderr": 0.01937333242072449 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169938, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169938 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25139664804469275, + "acc_stderr": 0.014508979453553983, + "acc_norm": 0.25139664804469275, + "acc_norm_stderr": 0.014508979453553983 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37142857142857144, + "acc_stderr": 0.030932858792789845, + "acc_norm": 0.37142857142857144, + "acc_norm_stderr": 0.030932858792789845 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3389830508474576, + "acc_stderr": 0.01208994185758447, + "acc_norm": 0.3389830508474576, + "acc_norm_stderr": 0.01208994185758447 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879078, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879078 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326912, + "mc2": 0.439413337284231, + "mc2_stderr": 0.015388675179973017 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4769775678866588, + "acc_stderr": 0.017172121546727634, + "acc_norm": 0.5631641086186541, + "acc_norm_stderr": 0.01705263355985607 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "rrw-x2/KoSOLAR-10.9B-v0.5", + "model_sha": "a0ae60d6081a57ce525c0a99f3bf21acf61eaf46", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/rrw-x2/KoSOLAR-10.9B-v1.0/result_2024-03-21 10:14:06.json b/rrw-x2/KoSOLAR-10.9B-v1.0/result_2024-03-21 10:14:06.json new file mode 100644 index 0000000000000000000000000000000000000000..5e3e8b5dabe9897ea1081a0ab49105c855f009a0 --- /dev/null +++ b/rrw-x2/KoSOLAR-10.9B-v1.0/result_2024-03-21 10:14:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4462457337883959, + "acc_stderr": 0.014526705548539982, + "acc_norm": 0.49402730375426623, + "acc_norm_stderr": 0.014610348300255795 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44851623182632944, + "acc_stderr": 0.004963259311700553, + "acc_norm": 0.607647878908584, + "acc_norm_stderr": 0.004872765504069851 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6549707602339181, + "acc_stderr": 0.03645981377388807, + "acc_norm": 0.6549707602339181, + "acc_norm_stderr": 0.03645981377388807 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.04656147110012351, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.04656147110012351 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6679438058748404, + "acc_stderr": 0.01684117465529571, + "acc_norm": 0.6679438058748404, + "acc_norm_stderr": 0.01684117465529571 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789958, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789958 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5852090032154341, + "acc_stderr": 0.027982680459759563, + "acc_norm": 0.5852090032154341, + "acc_norm_stderr": 0.027982680459759563 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6188340807174888, + "acc_stderr": 0.03259625118416827, + "acc_norm": 0.6188340807174888, + "acc_norm_stderr": 0.03259625118416827 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7070707070707071, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.7070707070707071, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5307692307692308, + "acc_stderr": 0.025302958890850154, + "acc_norm": 0.5307692307692308, + "acc_norm_stderr": 0.025302958890850154 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883232, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883232 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6193548387096774, + "acc_stderr": 0.027621717832907046, + "acc_norm": 0.6193548387096774, + "acc_norm_stderr": 0.027621717832907046 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392926, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392926 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5773584905660377, + "acc_stderr": 0.030402331445769537, + "acc_norm": 0.5773584905660377, + "acc_norm_stderr": 0.030402331445769537 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652459, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652459 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.038047497443647646, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.038047497443647646 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41534391534391535, + "acc_stderr": 0.025379524910778405, + "acc_norm": 0.41534391534391535, + "acc_norm_stderr": 0.025379524910778405 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294939, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294939 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456608, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456608 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5679012345679012, + "acc_stderr": 0.02756301097160667, + "acc_norm": 0.5679012345679012, + "acc_norm_stderr": 0.02756301097160667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.03221024508041153, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.03221024508041153 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583703, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583703 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6678899082568808, + "acc_stderr": 0.020192682985423344, + "acc_norm": 0.6678899082568808, + "acc_norm_stderr": 0.020192682985423344 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.02830457667314111, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.02830457667314111 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.041733491480835, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.041733491480835 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249034, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249034 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.02022394600507429, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.02022394600507429 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611327, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611327 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997865, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997865 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19776536312849163, + "acc_stderr": 0.013321620594050947, + "acc_norm": 0.19776536312849163, + "acc_norm_stderr": 0.013321620594050947 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.03023375855159645, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.03023375855159645 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.031680911612338825, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.031680911612338825 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7426160337552743, + "acc_stderr": 0.028458820991460316, + "acc_norm": 0.7426160337552743, + "acc_norm_stderr": 0.028458820991460316 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39374185136897, + "acc_stderr": 0.012478532272564439, + "acc_norm": 0.39374185136897, + "acc_norm_stderr": 0.012478532272564439 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.0368105086916155, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.0368105086916155 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.01594506858123662, + "mc2": 0.45290421507806095, + "mc2_stderr": 0.015621878420558599 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5631641086186541, + "acc_stderr": 0.017052633559856072, + "acc_norm": 0.615112160566706, + "acc_norm_stderr": 0.016728579701498644 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "rrw-x2/KoSOLAR-10.9B-v1.0", + "model_sha": "aea1c6d6b3d502de15cd617a721169b46039f58e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/rufjdk5480/ko-llama7b-merged/result_2023-12-04 07:04:26.json b/rufjdk5480/ko-llama7b-merged/result_2023-12-04 07:04:26.json new file mode 100644 index 0000000000000000000000000000000000000000..6d638289d08925842f996a522f6d0f49864e995c --- /dev/null +++ b/rufjdk5480/ko-llama7b-merged/result_2023-12-04 07:04:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2645051194539249, + "acc_stderr": 0.01288927294931337, + "acc_norm": 0.30887372013651876, + "acc_norm_stderr": 0.013501770929344 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33479386576379205, + "acc_stderr": 0.004709538864916327, + "acc_norm": 0.4118701453893647, + "acc_norm_stderr": 0.0049116598845061485 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690879, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690879 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36270753512132825, + "acc_stderr": 0.0171927086746023, + "acc_norm": 0.36270753512132825, + "acc_norm_stderr": 0.0171927086746023 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.039725528847851375, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.039725528847851375 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.03571609230053481, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.03571609230053481 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.35691318327974275, + "acc_stderr": 0.027210420375934012, + "acc_norm": 0.35691318327974275, + "acc_norm_stderr": 0.027210420375934012 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.031493846709941306, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.031493846709941306 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.32323232323232326, + "acc_stderr": 0.03332299921070645, + "acc_norm": 0.32323232323232326, + "acc_norm_stderr": 0.03332299921070645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.039609335494512087, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.039609335494512087 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3067226890756303, + "acc_stderr": 0.029953823891887044, + "acc_norm": 0.3067226890756303, + "acc_norm_stderr": 0.029953823891887044 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.29743589743589743, + "acc_stderr": 0.023177408131465942, + "acc_norm": 0.29743589743589743, + "acc_norm_stderr": 0.023177408131465942 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.31527093596059114, + "acc_stderr": 0.03269080871970186, + "acc_norm": 0.31527093596059114, + "acc_norm_stderr": 0.03269080871970186 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.02637756702864586, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.02637756702864586 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.4829059829059829, + "acc_stderr": 0.032736940493481824, + "acc_norm": 0.4829059829059829, + "acc_norm_stderr": 0.032736940493481824 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670716, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670716 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230186, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230186 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.034791855725996614, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.034791855725996614 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4577114427860697, + "acc_stderr": 0.03522865864099597, + "acc_norm": 0.4577114427860697, + "acc_norm_stderr": 0.03522865864099597 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.0332055644308557, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.0332055644308557 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.02241804289111394, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.02241804289111394 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03800968060554859, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03800968060554859 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3487654320987654, + "acc_stderr": 0.02651759772446501, + "acc_norm": 0.3487654320987654, + "acc_norm_stderr": 0.02651759772446501 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.034234651001042844, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.034234651001042844 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03947152782669415, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03947152782669415 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26788990825688075, + "acc_stderr": 0.01898746225797865, + "acc_norm": 0.26788990825688075, + "acc_norm_stderr": 0.01898746225797865 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.045190820213197716, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.045190820213197716 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.0378272898086547, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.0378272898086547 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3006535947712418, + "acc_stderr": 0.018550634502952964, + "acc_norm": 0.3006535947712418, + "acc_norm_stderr": 0.018550634502952964 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101355, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101355 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03114144782353604, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03114144782353604 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2426470588235294, + "acc_stderr": 0.026040662474201278, + "acc_norm": 0.2426470588235294, + "acc_norm_stderr": 0.026040662474201278 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3836734693877551, + "acc_stderr": 0.031130880396235922, + "acc_norm": 0.3836734693877551, + "acc_norm_stderr": 0.031130880396235922 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3291139240506329, + "acc_stderr": 0.030587326294702365, + "acc_norm": 0.3291139240506329, + "acc_norm_stderr": 0.030587326294702365 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26988265971316816, + "acc_stderr": 0.011337381084250394, + "acc_norm": 0.26988265971316816, + "acc_norm_stderr": 0.011337381084250394 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.01570210709062788, + "mc2": 0.46317433331488955, + "mc2_stderr": 0.015481757792093615 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27744982290436837, + "acc_stderr": 0.015393630236605971, + "acc_norm": 0.345926800472255, + "acc_norm_stderr": 0.01635385341434757 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "rufjdk5480/ko-llama7b-merged", + "model_sha": "210250b684221c12bf9593c72f94e6b6ce5e12e7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/saltlux/Ko-Llama3-Luxia-8B/result_2024-05-27 20:10:28.json b/saltlux/Ko-Llama3-Luxia-8B/result_2024-05-27 20:10:28.json new file mode 100644 index 0000000000000000000000000000000000000000..ff0111e22480e3f0367ec18b72f9621c6e7e383b --- /dev/null +++ b/saltlux/Ko-Llama3-Luxia-8B/result_2024-05-27 20:10:28.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4104095563139932, + "acc_stderr": 0.014374922192642666, + "acc_norm": 0.4598976109215017, + "acc_norm_stderr": 0.01456431885692485 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41047600079665403, + "acc_stderr": 0.004909148239488292, + "acc_norm": 0.5488946425014938, + "acc_norm_stderr": 0.004965866098318175 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5440613026819924, + "acc_stderr": 0.017810403925435345, + "acc_norm": 0.5440613026819924, + "acc_norm_stderr": 0.017810403925435345 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564584, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564584 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6313131313131313, + "acc_stderr": 0.034373055019806184, + "acc_norm": 0.6313131313131313, + "acc_norm_stderr": 0.034373055019806184 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.02533466708095497, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.02533466708095497 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.034711928605184676, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.034711928605184676 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749472, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.03067609659938918, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.03067609659938918 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652459, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652459 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092056, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092056 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.024180497164376907, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.024180497164376907 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.041406856391115014, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.041406856391115014 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5889908256880734, + "acc_stderr": 0.02109505068727765, + "acc_norm": 0.5889908256880734, + "acc_norm_stderr": 0.02109505068727765 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138303, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138303 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.02000791273935936, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.02000791273935936 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.375, + "acc_stderr": 0.033016908987210894, + "acc_norm": 0.375, + "acc_norm_stderr": 0.033016908987210894 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3016759776536313, + "acc_stderr": 0.015350767572220285, + "acc_norm": 0.3016759776536313, + "acc_norm_stderr": 0.015350767572220285 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280055, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280055 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.03137624072561619, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.03137624072561619 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3428943937418514, + "acc_stderr": 0.012123463271585897, + "acc_norm": 0.3428943937418514, + "acc_norm_stderr": 0.012123463271585897 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.034924061041636124, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.034924061041636124 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.03843566993588717, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.03843566993588717 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.44782372526014474, + "mc2_stderr": 0.015380769346979286 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4946871310507674, + "acc_stderr": 0.01718938362722969, + "acc_norm": 0.5997638724911453, + "acc_norm_stderr": 0.01684469351050505 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "saltlux/Ko-Llama3-Luxia-8B", + "model_sha": "14533c55dc476c57220427bc65ce2539133b37db", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/saltlux/luxia-21.4b-alignment-v1.0/result_2024-07-26 17:38:41.json b/saltlux/luxia-21.4b-alignment-v1.0/result_2024-07-26 17:38:41.json new file mode 100644 index 0000000000000000000000000000000000000000..2940ae1f58aec6619b6a0fefa04aa755026be7ed --- /dev/null +++ b/saltlux/luxia-21.4b-alignment-v1.0/result_2024-07-26 17:38:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4308873720136519, + "acc_stderr": 0.01447113339264246, + "acc_norm": 0.47952218430034127, + "acc_norm_stderr": 0.014599131353035014 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4099780920135431, + "acc_stderr": 0.004908241354310214, + "acc_norm": 0.525592511451902, + "acc_norm_stderr": 0.004983240744101376 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45338441890166026, + "acc_stderr": 0.017802087135850294, + "acc_norm": 0.45338441890166026, + "acc_norm_stderr": 0.017802087135850294 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.03257901482099835, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.03257901482099835 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201943, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201943 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44871794871794873, + "acc_stderr": 0.025217315184846482, + "acc_norm": 0.44871794871794873, + "acc_norm_stderr": 0.025217315184846482 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190193, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190193 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561056, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561056 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.030351527323344934, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.030351527323344934 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37407407407407406, + "acc_stderr": 0.029502861128955293, + "acc_norm": 0.37407407407407406, + "acc_norm_stderr": 0.029502861128955293 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.036928207672648664, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.036928207672648664 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4470899470899471, + "acc_stderr": 0.025606723995777025, + "acc_norm": 0.4470899470899471, + "acc_norm_stderr": 0.025606723995777025 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.02143699835976533, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.02143699835976533 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292535, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292535 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483205, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.019977422600227467, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.019977422600227467 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.02899908090480618, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.02899908090480618 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.33854748603351953, + "acc_stderr": 0.01582670009648135, + "acc_norm": 0.33854748603351953, + "acc_norm_stderr": 0.01582670009648135 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.74, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.74, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34810951760104303, + "acc_stderr": 0.012166738993698188, + "acc_norm": 0.34810951760104303, + "acc_norm_stderr": 0.012166738993698188 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.03508637358630573, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.03508637358630573 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.40269277845777235, + "mc1_stderr": 0.017168830935187226, + "mc2": 0.6069229907750474, + "mc2_stderr": 0.01613240933775233 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4049586776859504, + "acc_stderr": 0.016876941165045612, + "acc_norm": 0.43565525383707204, + "acc_norm_stderr": 0.01704741522947634 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "saltlux/luxia-21.4b-alignment-v1.0", + "model_sha": "87d5673e6d9f60462f195e9414a0bf6874c89ceb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/saltlux/luxia-21.4b-alignment-v1.2/result_2024-07-27 02:50:35.json b/saltlux/luxia-21.4b-alignment-v1.2/result_2024-07-27 02:50:35.json new file mode 100644 index 0000000000000000000000000000000000000000..67423e71740d4ba8f3dd7d7c2af0b64a7ee1d576 --- /dev/null +++ b/saltlux/luxia-21.4b-alignment-v1.2/result_2024-07-27 02:50:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4121160409556314, + "acc_stderr": 0.014383915302225403, + "acc_norm": 0.4564846416382253, + "acc_norm_stderr": 0.01455594976049644 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39613622784305913, + "acc_stderr": 0.004880937933163283, + "acc_norm": 0.5039832702648874, + "acc_norm_stderr": 0.004989623068778798 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44189016602809705, + "acc_stderr": 0.01775880053421441, + "acc_norm": 0.44189016602809705, + "acc_norm_stderr": 0.01775880053421441 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.034961309720561266, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.034961309720561266 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.46206896551724136, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.46206896551724136, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077636, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.032437180551374095, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.032437180551374095 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.41935483870967744, + "acc_stderr": 0.02807158890109185, + "acc_norm": 0.41935483870967744, + "acc_norm_stderr": 0.02807158890109185 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688173, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688173 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.03053333843046751, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.03053333843046751 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630882, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630882 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268815, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268815 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.037038511930995215, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.037038511930995215 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4708994708994709, + "acc_stderr": 0.025707658614154954, + "acc_norm": 0.4708994708994709, + "acc_norm_stderr": 0.025707658614154954 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.02687408588351835, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.02687408588351835 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.039277056007874414, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.039277056007874414 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.03602573571288441, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.03602573571288441 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4935779816513762, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.4935779816513762, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.039531733777491924, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.039531733777491924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4035947712418301, + "acc_stderr": 0.019848280168401157, + "acc_norm": 0.4035947712418301, + "acc_norm_stderr": 0.019848280168401157 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125146, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.358659217877095, + "acc_stderr": 0.016040454426164478, + "acc_norm": 0.358659217877095, + "acc_norm_stderr": 0.016040454426164478 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670733, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670733 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.032335327775334835, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.032335327775334835 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3389830508474576, + "acc_stderr": 0.012089941857584474, + "acc_norm": 0.3389830508474576, + "acc_norm_stderr": 0.012089941857584474 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4039167686658507, + "mc1_stderr": 0.01717727682258428, + "mc2": 0.5961107238961795, + "mc2_stderr": 0.016391692758123674 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42857142857142855, + "acc_stderr": 0.017014038119297473, + "acc_norm": 0.4557260920897285, + "acc_norm_stderr": 0.017122829143292644 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "saltlux/luxia-21.4b-alignment-v1.2", + "model_sha": "eed12b5574fa49cc81e57a88aff24c08c13721c0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sanghwa-na/llama2-13b.kor.v1/result_2023-10-30 07:41:35.json b/sanghwa-na/llama2-13b.kor.v1/result_2023-10-30 07:41:35.json new file mode 100644 index 0000000000000000000000000000000000000000..f212d5b6a323c61310e324a08bad8dd040783bc9 --- /dev/null +++ b/sanghwa-na/llama2-13b.kor.v1/result_2023-10-30 07:41:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3225255972696246, + "acc_stderr": 0.01365998089427738, + "acc_norm": 0.3796928327645051, + "acc_norm_stderr": 0.014182119866974872 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36168094005178253, + "acc_stderr": 0.004795051037917731, + "acc_norm": 0.4652459669388568, + "acc_norm_stderr": 0.004977713073899333 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.0381107966983353, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.0381107966983353 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4878671775223499, + "acc_stderr": 0.01787469866749135, + "acc_norm": 0.4878671775223499, + "acc_norm_stderr": 0.01787469866749135 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.036643147772880864, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.036643147772880864 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.02825666072336019, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.02825666072336019 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.03244305283008731, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.03244305283008731 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.043285772152629715, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.043285772152629715 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4494949494949495, + "acc_stderr": 0.03544132491947969, + "acc_norm": 0.4494949494949495, + "acc_norm_stderr": 0.03544132491947969 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36554621848739494, + "acc_stderr": 0.03128217706368461, + "acc_norm": 0.36554621848739494, + "acc_norm_stderr": 0.03128217706368461 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3923076923076923, + "acc_stderr": 0.024756000382130945, + "acc_norm": 0.3923076923076923, + "acc_norm_stderr": 0.024756000382130945 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347354, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347354 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03255326307272487, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03255326307272487 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.36981132075471695, + "acc_stderr": 0.029711421880107922, + "acc_norm": 0.36981132075471695, + "acc_norm_stderr": 0.029711421880107922 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145658, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145658 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.035149425512674394, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.035149425512674394 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2830687830687831, + "acc_stderr": 0.023201392938194978, + "acc_norm": 0.2830687830687831, + "acc_norm_stderr": 0.023201392938194978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.02658923114217426, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.02658923114217426 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.37423312883435583, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.37423312883435583, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02712511551316686, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02712511551316686 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.41651376146788993, + "acc_stderr": 0.021136376504030878, + "acc_norm": 0.41651376146788993, + "acc_norm_stderr": 0.021136376504030878 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.02833239748366427, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.02833239748366427 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.019184639328092487, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.019184639328092487 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.042032772914677614, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.042032772914677614 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.031415546294025445, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.031415546294025445 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3161764705882353, + "acc_stderr": 0.02824568739146291, + "acc_norm": 0.3161764705882353, + "acc_norm_stderr": 0.02824568739146291 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3224489795918367, + "acc_stderr": 0.029923100563683913, + "acc_norm": 0.3224489795918367, + "acc_norm_stderr": 0.029923100563683913 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4472573839662447, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.4472573839662447, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30247718383311606, + "acc_stderr": 0.011731524234165703, + "acc_norm": 0.30247718383311606, + "acc_norm_stderr": 0.011731524234165703 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.033540924375915195, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.033540924375915195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.4489814958425013, + "mc2_stderr": 0.015305629142879413 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45336481700118064, + "acc_stderr": 0.01711541822522687, + "acc_norm": 0.564344746162928, + "acc_norm_stderr": 0.017047415229476316 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sanghwa-na/llama2-13b.kor.v1", + "model_sha": "c9b4aa22c6fe71a0e0deb30dc58dc40ad83637db", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sanghwa-na/llama2-13b.kor.v2/result_2023-11-02 10:05:17.json b/sanghwa-na/llama2-13b.kor.v2/result_2023-11-02 10:05:17.json new file mode 100644 index 0000000000000000000000000000000000000000..152c2b928d1d31849e29a75611d9a7ccff9ae540 --- /dev/null +++ b/sanghwa-na/llama2-13b.kor.v2/result_2023-11-02 10:05:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3916382252559727, + "acc_stderr": 0.014264122124938215, + "acc_norm": 0.44197952218430037, + "acc_norm_stderr": 0.014512682523128347 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41206930890260907, + "acc_stderr": 0.004912015369160081, + "acc_norm": 0.5485958972316272, + "acc_norm_stderr": 0.004966158142645405 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5108556832694764, + "acc_stderr": 0.017875748840242407, + "acc_norm": 0.5108556832694764, + "acc_norm_stderr": 0.017875748840242407 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.031068985963122155, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.031068985963122155 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115476, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115476 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.033408675019233246, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.033408675019233246 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179963, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179963 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561953, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561953 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.46774193548387094, + "acc_stderr": 0.028384747788813326, + "acc_norm": 0.46774193548387094, + "acc_norm_stderr": 0.028384747788813326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.030463656747340265, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.030463656747340265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983045, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983045 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.03522865864099598, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.03522865864099598 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.036812296333943194, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.036812296333943194 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101806, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101806 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4479768786127168, + "acc_stderr": 0.026772990653361823, + "acc_norm": 0.4479768786127168, + "acc_norm_stderr": 0.026772990653361823 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.03874102859818081, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.03874102859818081 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194045, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281335, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281335 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44587155963302755, + "acc_stderr": 0.02131133500970858, + "acc_norm": 0.44587155963302755, + "acc_norm_stderr": 0.02131133500970858 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626567, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626567 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249032, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249032 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.01924978569171721, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.01924978569171721 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755806, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755806 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.026679252270103124, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.026679252270103124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5021097046413502, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.5021097046413502, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3089960886571056, + "acc_stderr": 0.01180172977723924, + "acc_norm": 0.3089960886571056, + "acc_norm_stderr": 0.01180172977723924 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.03492406104163613, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.03492406104163613 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26805385556915545, + "mc1_stderr": 0.015506204722834562, + "mc2": 0.4302371643016868, + "mc2_stderr": 0.015064732208462774 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4085005903187721, + "acc_stderr": 0.016900062879427115, + "acc_norm": 0.5312868949232585, + "acc_norm_stderr": 0.017156666859785476 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sanghwa-na/llama2-13b.kor.v2", + "model_sha": "677c1badec4e73f7c98b8a8d2bab178a2330a330", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sanghwa-na/llama2-13b.kor/result_2023-10-27 14:03:39.json b/sanghwa-na/llama2-13b.kor/result_2023-10-27 14:03:39.json new file mode 100644 index 0000000000000000000000000000000000000000..cca05930eca5998e31834209e645449e85c9d8a1 --- /dev/null +++ b/sanghwa-na/llama2-13b.kor/result_2023-10-27 14:03:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35665529010238906, + "acc_stderr": 0.013998056902620196, + "acc_norm": 0.3873720136518771, + "acc_norm_stderr": 0.014235872487909874 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3745269866560446, + "acc_stderr": 0.004830113797327052, + "acc_norm": 0.48645688109938257, + "acc_norm_stderr": 0.004987950663406551 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4368932038834951, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.4368932038834951, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4955300127713921, + "acc_stderr": 0.017879248970584398, + "acc_norm": 0.4955300127713921, + "acc_norm_stderr": 0.017879248970584398 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413926, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657551, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657551 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540632, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540632 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.36981132075471695, + "acc_stderr": 0.029711421880107926, + "acc_norm": 0.36981132075471695, + "acc_norm_stderr": 0.029711421880107926 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145654, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145654 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.47761194029850745, + "acc_stderr": 0.035319879302087305, + "acc_norm": 0.47761194029850745, + "acc_norm_stderr": 0.035319879302087305 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.28835978835978837, + "acc_stderr": 0.023330654054535896, + "acc_norm": 0.28835978835978837, + "acc_norm_stderr": 0.023330654054535896 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.03814269893261837, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.03814269893261837 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005138, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005138 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.40932642487046633, + "acc_stderr": 0.03548608168860807, + "acc_norm": 0.40932642487046633, + "acc_norm_stderr": 0.03548608168860807 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4055045871559633, + "acc_stderr": 0.02105099799189684, + "acc_norm": 0.4055045871559633, + "acc_norm_stderr": 0.02105099799189684 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.038932596106046734, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.038932596106046734 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02818059632825929, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02818059632825929 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3415032679738562, + "acc_stderr": 0.019184639328092484, + "acc_norm": 0.3415032679738562, + "acc_norm_stderr": 0.019184639328092484 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.1875, + "acc_stderr": 0.0370468111477387, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.0370468111477387 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257013, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.23161764705882354, + "acc_stderr": 0.025626533803777565, + "acc_norm": 0.23161764705882354, + "acc_norm_stderr": 0.025626533803777565 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3346938775510204, + "acc_stderr": 0.030209235226242307, + "acc_norm": 0.3346938775510204, + "acc_norm_stderr": 0.030209235226242307 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5189873417721519, + "acc_stderr": 0.03252375148090447, + "acc_norm": 0.5189873417721519, + "acc_norm_stderr": 0.03252375148090447 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30964797913950454, + "acc_stderr": 0.01180859826250332, + "acc_norm": 0.30964797913950454, + "acc_norm_stderr": 0.01180859826250332 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904718, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904718 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219374, + "mc2": 0.42845182361852463, + "mc2_stderr": 0.015173261969256705 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43565525383707204, + "acc_stderr": 0.017047415229476327, + "acc_norm": 0.577331759149941, + "acc_norm_stderr": 0.016983506079577607 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sanghwa-na/llama2-13b.kor", + "model_sha": "1e0dfa0e076117cf22754ff55a94bac1f72672ca", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sanghwa-na/mistrallite.kor/result_2023-10-29 10:51:09.json b/sanghwa-na/mistrallite.kor/result_2023-10-29 10:51:09.json new file mode 100644 index 0000000000000000000000000000000000000000..953ca82ac316c28a654ec3b20e979461b9a26f23 --- /dev/null +++ b/sanghwa-na/mistrallite.kor/result_2023-10-29 10:51:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3054607508532423, + "acc_stderr": 0.0134600804780025, + "acc_norm": 0.34044368600682595, + "acc_norm_stderr": 0.013847460518892976 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3329018123879705, + "acc_stderr": 0.004702886273189413, + "acc_norm": 0.4133638717386975, + "acc_norm_stderr": 0.004914305798575699 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.03660298834049162, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.03660298834049162 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.421455938697318, + "acc_stderr": 0.017657976412654857, + "acc_norm": 0.421455938697318, + "acc_norm_stderr": 0.017657976412654857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3991031390134529, + "acc_stderr": 0.03286745312567961, + "acc_norm": 0.3991031390134529, + "acc_norm_stderr": 0.03286745312567961 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.040287315329475604, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.040287315329475604 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.41596638655462187, + "acc_stderr": 0.03201650100739615, + "acc_norm": 0.41596638655462187, + "acc_norm_stderr": 0.03201650100739615 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3769230769230769, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.3769230769230769, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575515, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575515 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4096774193548387, + "acc_stderr": 0.027976054915347357, + "acc_norm": 0.4096774193548387, + "acc_norm_stderr": 0.027976054915347357 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6367521367521367, + "acc_stderr": 0.03150712523091265, + "acc_norm": 0.6367521367521367, + "acc_norm_stderr": 0.03150712523091265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.030052580579557838, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.030052580579557838 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.0472457740573157, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.0472457740573157 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4925373134328358, + "acc_stderr": 0.035351400842767194, + "acc_norm": 0.4925373134328358, + "acc_norm_stderr": 0.035351400842767194 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.033205564430855705, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.033205564430855705 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983063, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983063 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.025816756791584197, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.025816756791584197 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.38860103626943004, + "acc_stderr": 0.03517739796373131, + "acc_norm": 0.38860103626943004, + "acc_norm_stderr": 0.03517739796373131 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3614678899082569, + "acc_stderr": 0.020598082009937374, + "acc_norm": 0.3614678899082569, + "acc_norm_stderr": 0.020598082009937374 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.02795604616542451, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.02795604616542451 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252603, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252603 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3092105263157895, + "acc_stderr": 0.037610708698674805, + "acc_norm": 0.3092105263157895, + "acc_norm_stderr": 0.037610708698674805 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.01869085027359529, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.01869085027359529 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.02981263070156974, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.02981263070156974 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.35918367346938773, + "acc_stderr": 0.030713560455108493, + "acc_norm": 0.35918367346938773, + "acc_norm_stderr": 0.030713560455108493 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4050632911392405, + "acc_stderr": 0.031955147413706725, + "acc_norm": 0.4050632911392405, + "acc_norm_stderr": 0.031955147413706725 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2790091264667536, + "acc_stderr": 0.011455208832803529, + "acc_norm": 0.2790091264667536, + "acc_norm_stderr": 0.011455208832803529 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674119, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674119 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.03697442205031596, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.03697442205031596 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384308, + "mc2": 0.45952110519785677, + "mc2_stderr": 0.01589536011034475 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2833530106257379, + "acc_stderr": 0.015492852084597232, + "acc_norm": 0.4297520661157025, + "acc_norm_stderr": 0.01701984753597222 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sanghwa-na/mistrallite.kor", + "model_sha": "7fa22118f6d96ee3873002e4ec1ccdc0dd53d976", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sboseong124/test3/result_2024-08-02 05:39:29.json b/sboseong124/test3/result_2024-08-02 05:39:29.json new file mode 100644 index 0000000000000000000000000000000000000000..3ab23913b3282c2bc1524044f9d404dfdc668bab --- /dev/null +++ b/sboseong124/test3/result_2024-08-02 05:39:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4803754266211604, + "acc_stderr": 0.014600132075947085, + "acc_norm": 0.53839590443686, + "acc_norm_stderr": 0.014568245550296358 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4651463851822346, + "acc_stderr": 0.004977643730848596, + "acc_norm": 0.6416052579167496, + "acc_norm_stderr": 0.004785488626807573 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6842105263157895, + "acc_stderr": 0.03565079670708311, + "acc_norm": 0.6842105263157895, + "acc_norm_stderr": 0.03565079670708311 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.70242656449553, + "acc_stderr": 0.016349111912909435, + "acc_norm": 0.70242656449553, + "acc_norm_stderr": 0.016349111912909435 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.032278345101462685, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.032278345101462685 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6012861736334405, + "acc_stderr": 0.0278093225857745, + "acc_norm": 0.6012861736334405, + "acc_norm_stderr": 0.0278093225857745 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.547085201793722, + "acc_stderr": 0.033408675019233246, + "acc_norm": 0.547085201793722, + "acc_norm_stderr": 0.033408675019233246 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5648854961832062, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.5648854961832062, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.702020202020202, + "acc_stderr": 0.03258630383836556, + "acc_norm": 0.702020202020202, + "acc_norm_stderr": 0.03258630383836556 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5076923076923077, + "acc_stderr": 0.025348006031534767, + "acc_norm": 0.5076923076923077, + "acc_norm_stderr": 0.025348006031534767 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.567741935483871, + "acc_stderr": 0.028181739720019406, + "acc_norm": 0.567741935483871, + "acc_norm_stderr": 0.028181739720019406 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.03077265364207565, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.03077265364207565 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6915422885572139, + "acc_stderr": 0.03265819588512697, + "acc_norm": 0.6915422885572139, + "acc_norm_stderr": 0.03265819588512697 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.02507598176760168, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.02507598176760168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.74, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5289017341040463, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.5289017341040463, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5644171779141104, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.5644171779141104, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6049382716049383, + "acc_stderr": 0.027201117666925644, + "acc_norm": 0.6049382716049383, + "acc_norm_stderr": 0.027201117666925644 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6683937823834197, + "acc_stderr": 0.03397636541089118, + "acc_norm": 0.6683937823834197, + "acc_norm_stderr": 0.03397636541089118 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6256880733944954, + "acc_stderr": 0.020748959408988316, + "acc_norm": 0.6256880733944954, + "acc_norm_stderr": 0.020748959408988316 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147125, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147125 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.028332397483664278, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.028332397483664278 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.020226106567657807, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.020226106567657807 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861131, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861131 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.03284738857647206, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.03284738857647206 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372432, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372432 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.0302114796091216, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.0302114796091216 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556166, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556166 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7341772151898734, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.7341772151898734, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.363754889178618, + "acc_stderr": 0.012286991879902886, + "acc_norm": 0.363754889178618, + "acc_norm_stderr": 0.012286991879902886 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.0332057461294543, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.0332057461294543 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502346, + "mc2": 0.42514924774791196, + "mc2_stderr": 0.014878152011948058 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6458087367178277, + "acc_stderr": 0.01644317574921476, + "acc_norm": 0.6824085005903188, + "acc_norm_stderr": 0.016005581876229295 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sboseong124/test3", + "model_sha": "32cec2fbaf499443d694221d023f17f8814d6dec", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sdhan/SD_SOLAR_10.7B_v1.0/result_2024-05-12 18:34:31.json b/sdhan/SD_SOLAR_10.7B_v1.0/result_2024-05-12 18:34:31.json new file mode 100644 index 0000000000000000000000000000000000000000..4e7b594d49d93c09f8e4415e9eb3f7000cc68148 --- /dev/null +++ b/sdhan/SD_SOLAR_10.7B_v1.0/result_2024-05-12 18:34:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7457337883959044, + "acc_stderr": 0.012724999945157736, + "acc_norm": 0.7824232081911263, + "acc_norm_stderr": 0.012057262020972506 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6512646883091018, + "acc_stderr": 0.004755960559929161, + "acc_norm": 0.7849034056960765, + "acc_norm_stderr": 0.004100495978108378 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6842105263157895, + "acc_stderr": 0.03565079670708311, + "acc_norm": 0.6842105263157895, + "acc_norm_stderr": 0.03565079670708311 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7475728155339806, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.7475728155339806, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6513409961685823, + "acc_stderr": 0.01704124314349099, + "acc_norm": 0.6513409961685823, + "acc_norm_stderr": 0.01704124314349099 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5980707395498392, + "acc_stderr": 0.027846476005930477, + "acc_norm": 0.5980707395498392, + "acc_norm_stderr": 0.027846476005930477 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6412556053811659, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.6412556053811659, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.03191178226713547, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.03191178226713547 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6256410256410256, + "acc_stderr": 0.0245375915728305, + "acc_norm": 0.6256410256410256, + "acc_norm_stderr": 0.0245375915728305 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5870967741935483, + "acc_stderr": 0.028009138125400387, + "acc_norm": 0.5870967741935483, + "acc_norm_stderr": 0.028009138125400387 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.025372139671722933, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.025372139671722933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5886792452830188, + "acc_stderr": 0.03028500925900979, + "acc_norm": 0.5886792452830188, + "acc_norm_stderr": 0.03028500925900979 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.0284934650910286, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.0284934650910286 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.03170056183497309, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.03170056183497309 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.455026455026455, + "acc_stderr": 0.025646928361049395, + "acc_norm": 0.455026455026455, + "acc_norm_stderr": 0.025646928361049395 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5902777777777778, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.5902777777777778, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5751445086705202, + "acc_stderr": 0.026613350840261746, + "acc_norm": 0.5751445086705202, + "acc_norm_stderr": 0.026613350840261746 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6196319018404908, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.6196319018404908, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.026869490744815264, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.026869490744815264 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.03097543638684542, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.03097543638684542 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7137614678899082, + "acc_stderr": 0.019379436628919958, + "acc_norm": 0.7137614678899082, + "acc_norm_stderr": 0.019379436628919958 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.028332397483664278, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.028332397483664278 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6447368421052632, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.6447368421052632, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5620915032679739, + "acc_stderr": 0.020071257886886525, + "acc_norm": 0.5620915032679739, + "acc_norm_stderr": 0.020071257886886525 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4078014184397163, + "acc_stderr": 0.02931601177634356, + "acc_norm": 0.4078014184397163, + "acc_norm_stderr": 0.02931601177634356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.30726256983240224, + "acc_stderr": 0.015430158846469609, + "acc_norm": 0.30726256983240224, + "acc_norm_stderr": 0.015430158846469609 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6489795918367347, + "acc_stderr": 0.030555316755573644, + "acc_norm": 0.6489795918367347, + "acc_norm_stderr": 0.030555316755573644 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7215189873417721, + "acc_stderr": 0.029178682304842538, + "acc_norm": 0.7215189873417721, + "acc_norm_stderr": 0.029178682304842538 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4471968709256845, + "acc_stderr": 0.012698825252435118, + "acc_norm": 0.4471968709256845, + "acc_norm_stderr": 0.012698825252435118 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.033086111132364364, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.033086111132364364 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.037131580674819135, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.037131580674819135 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.8102815177478581, + "mc1_stderr": 0.013725485265185093, + "mc2": 0.8738461646183845, + "mc2_stderr": 0.011212708938985069 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.49940968122786306, + "acc_stderr": 0.017190342123448662, + "acc_norm": 0.5194805194805194, + "acc_norm_stderr": 0.01717730199234254 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sdhan/SD_SOLAR_10.7B_v1.0", + "model_sha": "a32d708eb74495fcdbb8b88766cb680a81aedc2b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sel303/gemma-1.1-2b-it-merged_lora-ko-summarizaiton/result_2024-05-20 08:15:32.json b/sel303/gemma-1.1-2b-it-merged_lora-ko-summarizaiton/result_2024-05-20 08:15:32.json new file mode 100644 index 0000000000000000000000000000000000000000..d47535ab713182f88daffffdcb28a296becaecbd --- /dev/null +++ b/sel303/gemma-1.1-2b-it-merged_lora-ko-summarizaiton/result_2024-05-20 08:15:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.23208191126279865, + "acc_stderr": 0.012336718284948853, + "acc_norm": 0.27474402730375425, + "acc_norm_stderr": 0.013044617212771227 + }, + "harness|ko_hellaswag|10": { + "acc": 0.30173272256522604, + "acc_stderr": 0.004580718115992502, + "acc_norm": 0.3521210914160526, + "acc_norm_stderr": 0.004766553336917502 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.23976608187134502, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.23976608187134502, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.045821241601615506, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.045821241601615506 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3167305236270754, + "acc_stderr": 0.016635566427712488, + "acc_norm": 0.3167305236270754, + "acc_norm_stderr": 0.016635566427712488 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.03885004245800254, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.03885004245800254 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.036643147772880864, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.036643147772880864 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426122, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426122 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3721973094170404, + "acc_stderr": 0.032443052830087304, + "acc_norm": 0.3721973094170404, + "acc_norm_stderr": 0.032443052830087304 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.03074630074212451, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.03074630074212451 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.33793103448275863, + "acc_stderr": 0.039417076320648906, + "acc_norm": 0.33793103448275863, + "acc_norm_stderr": 0.039417076320648906 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.02755361446786377, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.02755361446786377 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2512820512820513, + "acc_stderr": 0.021992016662370568, + "acc_norm": 0.2512820512820513, + "acc_norm_stderr": 0.021992016662370568 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978815, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978815 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.03108982600293752, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.03108982600293752 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2870967741935484, + "acc_stderr": 0.025736542745594528, + "acc_norm": 0.2870967741935484, + "acc_norm_stderr": 0.025736542745594528 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.41452991452991456, + "acc_stderr": 0.03227396567623778, + "acc_norm": 0.41452991452991456, + "acc_norm_stderr": 0.03227396567623778 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2943396226415094, + "acc_stderr": 0.02804918631569524, + "acc_norm": 0.2943396226415094, + "acc_norm_stderr": 0.02804918631569524 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.046075820907199756, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.046075820907199756 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712156, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712156 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.30845771144278605, + "acc_stderr": 0.03265819588512699, + "acc_norm": 0.30845771144278605, + "acc_norm_stderr": 0.03265819588512699 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788992, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788992 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577657, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577657 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.023786203255508283, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.023786203255508283 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2993827160493827, + "acc_stderr": 0.02548311560119547, + "acc_norm": 0.2993827160493827, + "acc_norm_stderr": 0.02548311560119547 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22279792746113988, + "acc_stderr": 0.030031147977641545, + "acc_norm": 0.22279792746113988, + "acc_norm_stderr": 0.030031147977641545 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.0414243971948936, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.0414243971948936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27889908256880735, + "acc_stderr": 0.01922746887646351, + "acc_norm": 0.27889908256880735, + "acc_norm_stderr": 0.01922746887646351 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.038522733649243156, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.038522733649243156 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.024630048979824765, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.024630048979824765 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.32231404958677684, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.034597776068105365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.034597776068105365 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24673202614379086, + "acc_stderr": 0.0174408203674025, + "acc_norm": 0.24673202614379086, + "acc_norm_stderr": 0.0174408203674025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.026789172351140235, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.026789172351140235 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468643, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468643 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.02533684856333237, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.02533684856333237 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.02653704531214531, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.02653704531214531 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2911392405063291, + "acc_stderr": 0.029571601065753374, + "acc_norm": 0.2911392405063291, + "acc_norm_stderr": 0.029571601065753374 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2666232073011734, + "acc_stderr": 0.011293836031612122, + "acc_norm": 0.2666232073011734, + "acc_norm_stderr": 0.011293836031612122 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.031660096793998116, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.031660096793998116 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03477691162163659, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03477691162163659 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.015945068581236618, + "mc2": 0.45814522962449467, + "mc2_stderr": 0.01607773088053265 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22668240850059032, + "acc_stderr": 0.014394701800505907, + "acc_norm": 0.30342384887839435, + "acc_norm_stderr": 0.015806072717909566 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sel303/gemma-1.1-2b-it-merged_lora-ko-summarizaiton", + "model_sha": "ecbb1cbbe8de9e914df058024a21151ec9034a47", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sel303/gpt2-ko-base_3/result_2024-03-11 01:54:26.json b/sel303/gpt2-ko-base_3/result_2024-03-11 01:54:26.json new file mode 100644 index 0000000000000000000000000000000000000000..cae6ebcf81d531891107efa7c6814a3973c2e932 --- /dev/null +++ b/sel303/gpt2-ko-base_3/result_2024-03-11 01:54:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20051194539249148, + "acc_stderr": 0.011700318050499354, + "acc_norm": 0.23976109215017063, + "acc_norm_stderr": 0.012476304127453952 + }, + "harness|ko_hellaswag|10": { + "acc": 0.27076279625572597, + "acc_stderr": 0.004434456717097587, + "acc_norm": 0.276638119896435, + "acc_norm_stderr": 0.004464217420693349 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28735632183908044, + "acc_stderr": 0.0161824107306827, + "acc_norm": 0.28735632183908044, + "acc_norm_stderr": 0.0161824107306827 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066654, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066654 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3072289156626506, + "acc_stderr": 0.03591566797824664, + "acc_norm": 0.3072289156626506, + "acc_norm_stderr": 0.03591566797824664 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3022508038585209, + "acc_stderr": 0.02608270069539966, + "acc_norm": 0.3022508038585209, + "acc_norm_stderr": 0.02608270069539966 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596919, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596919 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3319327731092437, + "acc_stderr": 0.030588697013783663, + "acc_norm": 0.3319327731092437, + "acc_norm_stderr": 0.030588697013783663 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33076923076923076, + "acc_stderr": 0.02385479568097113, + "acc_norm": 0.33076923076923076, + "acc_norm_stderr": 0.02385479568097113 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.03127090713297698, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.03127090713297698 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.29354838709677417, + "acc_stderr": 0.025906087021319288, + "acc_norm": 0.29354838709677417, + "acc_norm_stderr": 0.025906087021319288 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21367521367521367, + "acc_stderr": 0.026853450377009164, + "acc_norm": 0.21367521367521367, + "acc_norm_stderr": 0.026853450377009164 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.033367670865679766, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.033367670865679766 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.030631145539198823, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.030631145539198823 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.034878251684978906, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.034878251684978906 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2849740932642487, + "acc_stderr": 0.03257714077709661, + "acc_norm": 0.2849740932642487, + "acc_norm_stderr": 0.03257714077709661 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893596, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276863, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276863 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.02355083135199509, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.02355083135199509 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.016819028375736386, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.016819028375736386 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25177304964539005, + "acc_stderr": 0.025892151156709405, + "acc_norm": 0.25177304964539005, + "acc_norm_stderr": 0.025892151156709405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467763, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467763 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.17, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.17, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.28308823529411764, + "acc_stderr": 0.027365861131513815, + "acc_norm": 0.28308823529411764, + "acc_norm_stderr": 0.027365861131513815 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.1673469387755102, + "acc_stderr": 0.02389714476891452, + "acc_norm": 0.1673469387755102, + "acc_norm_stderr": 0.02389714476891452 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24315514993481094, + "acc_stderr": 0.010956556654417353, + "acc_norm": 0.24315514993481094, + "acc_norm_stderr": 0.010956556654417353 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139405, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139405 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.015744027248256055, + "mc2": 0.5037339054359827, + "mc2_stderr": 0.016205575955315504 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32585596221959856, + "acc_stderr": 0.01611402389480034, + "acc_norm": 0.42502951593860683, + "acc_norm_stderr": 0.016996016308362883 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sel303/gpt2-ko-base_3", + "model_sha": "9b7ee30c48c90e50f95848ae0715b9405cb5a509", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sel303/llama3-diverce-ver1.0/result_2024-07-08 01:53:29.json b/sel303/llama3-diverce-ver1.0/result_2024-07-08 01:53:29.json new file mode 100644 index 0000000000000000000000000000000000000000..49f393731360b169d6484f1413e1c94f96fee82e --- /dev/null +++ b/sel303/llama3-diverce-ver1.0/result_2024-07-08 01:53:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.318259385665529, + "acc_stderr": 0.013611993916971451, + "acc_norm": 0.3703071672354949, + "acc_norm_stderr": 0.01411129875167495 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35371439952200756, + "acc_stderr": 0.004771447244095126, + "acc_norm": 0.46634136626170086, + "acc_norm_stderr": 0.0049784626909669385 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.42017879948914433, + "acc_stderr": 0.01765065136307799, + "acc_norm": 0.42017879948914433, + "acc_norm_stderr": 0.01765065136307799 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.038201699145179055, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.038201699145179055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.41479099678456594, + "acc_stderr": 0.02798268045975956, + "acc_norm": 0.41479099678456594, + "acc_norm_stderr": 0.02798268045975956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.03076935200822914, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.03076935200822914 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.035476014940069384, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.035476014940069384 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126167, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126167 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978815, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978815 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173095, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173095 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961823, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961823 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524593, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.035319879302087305, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.035319879302087305 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.02658923114217425, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.02658923114217425 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.32515337423312884, + "acc_stderr": 0.03680350371286461, + "acc_norm": 0.32515337423312884, + "acc_norm_stderr": 0.03680350371286461 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413324, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413324 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.02138786335035399, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.02138786335035399 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.045454545454545484, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.045454545454545484 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.040463368839782514, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.040463368839782514 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.01871806705262323, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.01871806705262323 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2927374301675978, + "acc_stderr": 0.015218109544410184, + "acc_norm": 0.2927374301675978, + "acc_norm_stderr": 0.015218109544410184 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4963235294117647, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.4963235294117647, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4810126582278481, + "acc_stderr": 0.03252375148090448, + "acc_norm": 0.4810126582278481, + "acc_norm_stderr": 0.03252375148090448 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.29595827900912647, + "acc_stderr": 0.011658518525277045, + "acc_norm": 0.29595827900912647, + "acc_norm_stderr": 0.011658518525277045 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380758, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380758 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.037818873532059816, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.037818873532059816 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148123, + "mc2": 0.4014366801326606, + "mc2_stderr": 0.015063076810132843 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3565525383707202, + "acc_stderr": 0.01646770698152745, + "acc_norm": 0.5147579693034239, + "acc_norm_stderr": 0.01718286443499856 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sel303/llama3-diverce-ver1.0", + "model_sha": "66de22402dd84fa30b3efb354340cf963bac3683", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sel303/llama3-diverce-ver1.6/result_2024-07-15 06:43:05.json b/sel303/llama3-diverce-ver1.6/result_2024-07-15 06:43:05.json new file mode 100644 index 0000000000000000000000000000000000000000..d433d8565b9a961c8b344ff86946b8031d625c9e --- /dev/null +++ b/sel303/llama3-diverce-ver1.6/result_2024-07-15 06:43:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3430034129692833, + "acc_stderr": 0.013872423223718164, + "acc_norm": 0.37457337883959047, + "acc_norm_stderr": 0.014144193471893444 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35441147181836286, + "acc_stderr": 0.004773570096185051, + "acc_norm": 0.47092212706632147, + "acc_norm_stderr": 0.004981336318033644 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.421455938697318, + "acc_stderr": 0.01765797641265486, + "acc_norm": 0.421455938697318, + "acc_norm_stderr": 0.01765797641265486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03820169914517906, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03820169914517906 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010602, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010602 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.41479099678456594, + "acc_stderr": 0.027982680459759556, + "acc_norm": 0.41479099678456594, + "acc_norm_stderr": 0.027982680459759556 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.336322869955157, + "acc_stderr": 0.031708824268455, + "acc_norm": 0.336322869955157, + "acc_norm_stderr": 0.031708824268455 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319617, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319617 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5897435897435898, + "acc_stderr": 0.03222414045241107, + "acc_norm": 0.5897435897435898, + "acc_norm_stderr": 0.03222414045241107 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.030656748696739435, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.030656748696739435 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02831753349606646, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02831753349606646 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.035123109641239374, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.035123109641239374 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.0376574669386515, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.0376574669386515 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307702, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307702 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41329479768786126, + "acc_stderr": 0.02651126136940924, + "acc_norm": 0.41329479768786126, + "acc_norm_stderr": 0.02651126136940924 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3558282208588957, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.3558282208588957, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194045, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194045 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442203, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442203 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159395, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159395 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5284403669724771, + "acc_stderr": 0.021402615697348054, + "acc_norm": 0.5284403669724771, + "acc_norm_stderr": 0.021402615697348054 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.040633027314866725, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.040633027314866725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3104575163398693, + "acc_stderr": 0.018718067052623234, + "acc_norm": 0.3104575163398693, + "acc_norm_stderr": 0.018718067052623234 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419073, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419073 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28044692737430166, + "acc_stderr": 0.015024083883322879, + "acc_norm": 0.28044692737430166, + "acc_norm_stderr": 0.015024083883322879 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.49264705882352944, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.49264705882352944, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.031987615467631264, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.031987615467631264 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3109517601043025, + "acc_stderr": 0.011822252917799201, + "acc_norm": 0.3109517601043025, + "acc_norm_stderr": 0.011822252917799201 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4484848484848485, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.4484848484848485, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219374, + "mc2": 0.43530847894697894, + "mc2_stderr": 0.01540047469531341 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4297520661157025, + "acc_stderr": 0.01701984753597221, + "acc_norm": 0.5726092089728453, + "acc_norm_stderr": 0.017008129844823156 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sel303/llama3-diverce-ver1.6", + "model_sha": "e71e9a0b15480023d45a3964225d041a25b7f2c5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sel303/llama3-instruct-diverce-v2.0/result_2024-08-05 06:31:51.json b/sel303/llama3-instruct-diverce-v2.0/result_2024-08-05 06:31:51.json new file mode 100644 index 0000000000000000000000000000000000000000..0b1e0674cc3e3b349d6907a880eb92996f77a343 --- /dev/null +++ b/sel303/llama3-instruct-diverce-v2.0/result_2024-08-05 06:31:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.01385583128749772, + "acc_norm": 0.3873720136518771, + "acc_norm_stderr": 0.014235872487909872 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3661621190997809, + "acc_stderr": 0.004807699539973412, + "acc_norm": 0.4676359290977893, + "acc_norm_stderr": 0.004979317515432527 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45338441890166026, + "acc_stderr": 0.017802087135850297, + "acc_norm": 0.45338441890166026, + "acc_norm_stderr": 0.017802087135850297 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.441025641025641, + "acc_stderr": 0.025174048384000766, + "acc_norm": 0.441025641025641, + "acc_norm_stderr": 0.025174048384000766 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.030980296992618558, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.030980296992618558 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066468, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066468 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.0350349092367328, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.0350349092367328 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.024419234966819057, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.024419234966819057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111502, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111502 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.02684298551961537, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.02684298551961537 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.038956324641389366, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.038956324641389366 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413313, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413313 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4935779816513762, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.4935779816513762, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.04343525428949098, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.04343525428949098 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.028629305194003543, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.028629305194003543 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.04051646342874141, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.04051646342874141 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024103, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024103 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340455, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25251396648044694, + "acc_stderr": 0.014530330201468636, + "acc_norm": 0.25251396648044694, + "acc_norm_stderr": 0.014530330201468636 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.02833295951403121, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.02833295951403121 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.031717528240626645, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.031717528240626645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897627, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897627 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.035010383276358976, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.035010383276358976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30599755201958384, + "mc1_stderr": 0.016132229728155055, + "mc2": 0.46936312721663837, + "mc2_stderr": 0.015567048292607315 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42739079102715466, + "acc_stderr": 0.01700812984482316, + "acc_norm": 0.4769775678866588, + "acc_norm_stderr": 0.017172121546727634 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sel303/llama3-instruct-diverce-v2.0", + "model_sha": "2506d237b544d6311b28ecab28a6cafba245507c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/seungduk/Bookworm-10.7B-v0.1/result_2023-12-31 14:12:29.json b/seungduk/Bookworm-10.7B-v0.1/result_2023-12-31 14:12:29.json new file mode 100644 index 0000000000000000000000000000000000000000..9a3c631f04336c128fd765f152dc5c18d296a39e --- /dev/null +++ b/seungduk/Bookworm-10.7B-v0.1/result_2023-12-31 14:12:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44112627986348124, + "acc_stderr": 0.014509747749064663, + "acc_norm": 0.49402730375426623, + "acc_norm_stderr": 0.014610348300255795 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4231228838876718, + "acc_stderr": 0.004930448527146661, + "acc_norm": 0.5552678749253137, + "acc_norm_stderr": 0.004959204773046202 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5708812260536399, + "acc_stderr": 0.017699388483126785, + "acc_norm": 0.5708812260536399, + "acc_norm_stderr": 0.017699388483126785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742401, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742401 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.034812853382329645, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.034812853382329645 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.025342671293807247, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.025342671293807247 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051448, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051448 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4838709677419355, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.4838709677419355, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.02987257770889118, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.02987257770889118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871934, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871934 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.034005985055990146, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.034005985055990146 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596433, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596433 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334385, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334385 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5154320987654321, + "acc_stderr": 0.0278074900442762, + "acc_norm": 0.5154320987654321, + "acc_norm_stderr": 0.0278074900442762 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.021364122533881688, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.021364122533881688 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355442, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101366, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101366 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.034063153607115086, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.034063153607115086 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.01493131670322051, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.01493131670322051 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4632352941176471, + "acc_stderr": 0.030290619180485694, + "acc_norm": 0.4632352941176471, + "acc_norm_stderr": 0.030290619180485694 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4530612244897959, + "acc_stderr": 0.031867859300041296, + "acc_norm": 0.4530612244897959, + "acc_norm_stderr": 0.031867859300041296 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31290743155149936, + "acc_stderr": 0.011842529823063, + "acc_norm": 0.31290743155149936, + "acc_norm_stderr": 0.011842529823063 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5393939393939394, + "acc_stderr": 0.03892207016552012, + "acc_norm": 0.5393939393939394, + "acc_norm_stderr": 0.03892207016552012 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31334149326805383, + "mc1_stderr": 0.016238065069059615, + "mc2": 0.4560706988391086, + "mc2_stderr": 0.01540149887311665 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42857142857142855, + "acc_stderr": 0.017014038119297473, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.01716156394991635 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "seungduk/Bookworm-10.7B-v0.1", + "model_sha": "0c8247a5eaddb9de4b6357e1eeb6be2b4d8b975c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/seungduk/Bookworm-10.7B-v0.2/result_2024-01-01 04:53:34.json b/seungduk/Bookworm-10.7B-v0.2/result_2024-01-01 04:53:34.json new file mode 100644 index 0000000000000000000000000000000000000000..7818fed883c25e3e883eb855771cdaf557fe2986 --- /dev/null +++ b/seungduk/Bookworm-10.7B-v0.2/result_2024-01-01 04:53:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4590443686006826, + "acc_stderr": 0.014562291073601236, + "acc_norm": 0.5093856655290102, + "acc_norm_stderr": 0.014608816322065 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46654052977494526, + "acc_stderr": 0.004978596394045433, + "acc_norm": 0.6260705038836885, + "acc_norm_stderr": 0.004828564090620287 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.03786720706234214, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.03786720706234214 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3300970873786408, + "acc_stderr": 0.0465614711001235, + "acc_norm": 0.3300970873786408, + "acc_norm_stderr": 0.0465614711001235 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.39719029374201786, + "acc_stderr": 0.01749790503715937, + "acc_norm": 0.39719029374201786, + "acc_norm_stderr": 0.01749790503715937 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102956, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102956 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288088, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288088 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.028125340983972708, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.028125340983972708 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.31390134529147984, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.31390134529147984, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.38620689655172413, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.38620689655172413, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931673, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931673 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.29743589743589743, + "acc_stderr": 0.02317740813146594, + "acc_norm": 0.29743589743589743, + "acc_norm_stderr": 0.02317740813146594 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3548387096774194, + "acc_stderr": 0.027218889773308753, + "acc_norm": 0.3548387096774194, + "acc_norm_stderr": 0.027218889773308753 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5042735042735043, + "acc_stderr": 0.03275489264382132, + "acc_norm": 0.5042735042735043, + "acc_norm_stderr": 0.03275489264382132 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.044612721759105085, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.044612721759105085 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3034825870646766, + "acc_stderr": 0.03251006816458616, + "acc_norm": 0.3034825870646766, + "acc_norm_stderr": 0.03251006816458616 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0356760379963917, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0356760379963917 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101817, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101817 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548574, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548574 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.33815028901734107, + "acc_stderr": 0.025469770149400172, + "acc_norm": 0.33815028901734107, + "acc_norm_stderr": 0.025469770149400172 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4012345679012346, + "acc_stderr": 0.027272582849839796, + "acc_norm": 0.4012345679012346, + "acc_norm_stderr": 0.027272582849839796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32642487046632124, + "acc_stderr": 0.03384028621143294, + "acc_norm": 0.32642487046632124, + "acc_norm_stderr": 0.03384028621143294 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29908256880733947, + "acc_stderr": 0.019630417285415185, + "acc_norm": 0.29908256880733947, + "acc_norm_stderr": 0.019630417285415185 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.0361960452412425, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.0361960452412425 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.027634176689602656, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.027634176689602656 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.47107438016528924, + "acc_stderr": 0.04556710331269498, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.04556710331269498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25, + "acc_stderr": 0.01751781884501444, + "acc_norm": 0.25, + "acc_norm_stderr": 0.01751781884501444 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460997, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460997 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044792, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044792 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.0301619119307671, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.0301619119307671 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3877551020408163, + "acc_stderr": 0.03119223072679566, + "acc_norm": 0.3877551020408163, + "acc_norm_stderr": 0.03119223072679566 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35443037974683544, + "acc_stderr": 0.031137304297185812, + "acc_norm": 0.35443037974683544, + "acc_norm_stderr": 0.031137304297185812 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25749674054758803, + "acc_stderr": 0.011167706014904156, + "acc_norm": 0.25749674054758803, + "acc_norm_stderr": 0.011167706014904156 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.0350323529636799, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.0350323529636799 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6848484848484848, + "acc_stderr": 0.0362773057502241, + "acc_norm": 0.6848484848484848, + "acc_norm_stderr": 0.0362773057502241 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31334149326805383, + "mc1_stderr": 0.016238065069059608, + "mc2": 0.4749448840417777, + "mc2_stderr": 0.015366581435312424 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5430932703659976, + "acc_stderr": 0.01712638909308678, + "acc_norm": 0.5749704840613932, + "acc_norm_stderr": 0.016996016308362887 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "seungduk/Bookworm-10.7B-v0.2", + "model_sha": "f8229b91127baa27ccf2a136809618d9cf2442e0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/seungduk/Bookworm-10.7B-v0.3/result_2024-01-01 11:11:55.json b/seungduk/Bookworm-10.7B-v0.3/result_2024-01-01 11:11:55.json new file mode 100644 index 0000000000000000000000000000000000000000..9de2ef82a746c1dadba4fe6dd8996b61ad39b2ea --- /dev/null +++ b/seungduk/Bookworm-10.7B-v0.3/result_2024-01-01 11:11:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.45819112627986347, + "acc_stderr": 0.014560220308714691, + "acc_norm": 0.523037542662116, + "acc_norm_stderr": 0.014595873205358267 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46863174666401114, + "acc_stderr": 0.004979952166595525, + "acc_norm": 0.6290579565823541, + "acc_norm_stderr": 0.004820697457420421 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6608187134502924, + "acc_stderr": 0.036310534964889056, + "acc_norm": 0.6608187134502924, + "acc_norm_stderr": 0.036310534964889056 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6717752234993615, + "acc_stderr": 0.01679168564019289, + "acc_norm": 0.6717752234993615, + "acc_norm_stderr": 0.01679168564019289 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.548936170212766, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.548936170212766, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6205787781350482, + "acc_stderr": 0.02755994980234782, + "acc_norm": 0.6205787781350482, + "acc_norm_stderr": 0.02755994980234782 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.042258754519696386, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.042258754519696386 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7121212121212122, + "acc_stderr": 0.03225883512300993, + "acc_norm": 0.7121212121212122, + "acc_norm_stderr": 0.03225883512300993 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5966386554621849, + "acc_stderr": 0.031866081214088314, + "acc_norm": 0.5966386554621849, + "acc_norm_stderr": 0.031866081214088314 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5743589743589743, + "acc_stderr": 0.02506909438729651, + "acc_norm": 0.5743589743589743, + "acc_norm_stderr": 0.02506909438729651 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406795, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406795 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6258064516129033, + "acc_stderr": 0.0275289042998457, + "acc_norm": 0.6258064516129033, + "acc_norm_stderr": 0.0275289042998457 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.539622641509434, + "acc_stderr": 0.030676096599389177, + "acc_norm": 0.539622641509434, + "acc_norm_stderr": 0.030676096599389177 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473075, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.0320384104021332, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.0320384104021332 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332786, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332786 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5208333333333334, + "acc_stderr": 0.04177578950739994, + "acc_norm": 0.5208333333333334, + "acc_norm_stderr": 0.04177578950739994 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6069364161849711, + "acc_stderr": 0.026296227915613674, + "acc_norm": 0.6069364161849711, + "acc_norm_stderr": 0.026296227915613674 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5398773006134969, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.5398773006134969, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6172839506172839, + "acc_stderr": 0.027044538138402605, + "acc_norm": 0.6172839506172839, + "acc_norm_stderr": 0.027044538138402605 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985905, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985905 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6839378238341969, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.6839378238341969, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7009174311926606, + "acc_stderr": 0.019630417285415164, + "acc_norm": 0.7009174311926606, + "acc_norm_stderr": 0.019630417285415164 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.028332397483664278, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.028332397483664278 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6118421052631579, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.6118421052631579, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.020227402794434867, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.020227402794434867 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053756, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053756 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.034086558679777494, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.034086558679777494 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.01489339173524962, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.01489339173524962 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5514705882352942, + "acc_stderr": 0.0302114796091216, + "acc_norm": 0.5514705882352942, + "acc_norm_stderr": 0.0302114796091216 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.673469387755102, + "acc_stderr": 0.030021056238440317, + "acc_norm": 0.673469387755102, + "acc_norm_stderr": 0.030021056238440317 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7172995780590717, + "acc_stderr": 0.029312814153955934, + "acc_norm": 0.7172995780590717, + "acc_norm_stderr": 0.029312814153955934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3767926988265971, + "acc_stderr": 0.012376459593894398, + "acc_norm": 0.3767926988265971, + "acc_norm_stderr": 0.012376459593894398 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.033086111132364364, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.033086111132364364 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31211750305997554, + "mc1_stderr": 0.01622075676952091, + "mc2": 0.4672255375589832, + "mc2_stderr": 0.01520341293906113 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5419126328217237, + "acc_stderr": 0.017129852117911147, + "acc_norm": 0.5761511216056671, + "acc_norm_stderr": 0.01698981083462825 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "seungduk/Bookworm-10.7B-v0.3", + "model_sha": "53fea0a56d3e4777709ed55347d1cbb1f93a6a48", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/seungduk/KoSOLAR-10.7B-v0.1/result_2023-12-28 09:04:26.json b/seungduk/KoSOLAR-10.7B-v0.1/result_2023-12-28 09:04:26.json new file mode 100644 index 0000000000000000000000000000000000000000..aa2e03065b1eb116e3ebf882d2bd6ce02fbf1a9e --- /dev/null +++ b/seungduk/KoSOLAR-10.7B-v0.1/result_2023-12-28 09:04:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4180887372013652, + "acc_stderr": 0.01441398839699608, + "acc_norm": 0.4718430034129693, + "acc_norm_stderr": 0.014588204105102202 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4325831507667795, + "acc_stderr": 0.004944215937021397, + "acc_norm": 0.595399322844055, + "acc_norm_stderr": 0.00489811511097503 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6023391812865497, + "acc_stderr": 0.03753638955761691, + "acc_norm": 0.6023391812865497, + "acc_norm_stderr": 0.03753638955761691 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6334610472541508, + "acc_stderr": 0.017231244626797027, + "acc_norm": 0.6334610472541508, + "acc_norm_stderr": 0.017231244626797027 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542125, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542125 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789959, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789959 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5562700964630225, + "acc_stderr": 0.028217683556652308, + "acc_norm": 0.5562700964630225, + "acc_norm_stderr": 0.028217683556652308 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5605381165919282, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.5605381165919282, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6793893129770993, + "acc_stderr": 0.040933292298342784, + "acc_norm": 0.6793893129770993, + "acc_norm_stderr": 0.040933292298342784 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.702020202020202, + "acc_stderr": 0.032586303838365555, + "acc_norm": 0.702020202020202, + "acc_norm_stderr": 0.032586303838365555 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.025294608023986462, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.025294608023986462 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5741935483870968, + "acc_stderr": 0.028129112709165904, + "acc_norm": 0.5741935483870968, + "acc_norm_stderr": 0.028129112709165904 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7863247863247863, + "acc_stderr": 0.02685345037700914, + "acc_norm": 0.7863247863247863, + "acc_norm_stderr": 0.02685345037700914 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5320754716981132, + "acc_stderr": 0.03070948699255655, + "acc_norm": 0.5320754716981132, + "acc_norm_stderr": 0.03070948699255655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.047245774057315726, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.047245774057315726 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948496, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7014925373134329, + "acc_stderr": 0.032357437893550445, + "acc_norm": 0.7014925373134329, + "acc_norm_stderr": 0.032357437893550445 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273957, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273957 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3941798941798942, + "acc_stderr": 0.02516798233389415, + "acc_norm": 0.3941798941798942, + "acc_norm_stderr": 0.02516798233389415 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.026756255129663765, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.026756255129663765 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.027513747284379428, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.027513747284379428 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6580310880829016, + "acc_stderr": 0.03423465100104283, + "acc_norm": 0.6580310880829016, + "acc_norm_stderr": 0.03423465100104283 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594964, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594964 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6458715596330276, + "acc_stderr": 0.02050472901382911, + "acc_norm": 0.6458715596330276, + "acc_norm_stderr": 0.02050472901382911 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5522875816993464, + "acc_stderr": 0.02847293847803353, + "acc_norm": 0.5522875816993464, + "acc_norm_stderr": 0.02847293847803353 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5197368421052632, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.5197368421052632, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49836601307189543, + "acc_stderr": 0.020227726838150117, + "acc_norm": 0.49836601307189543, + "acc_norm_stderr": 0.020227726838150117 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.02872386385328128, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.02872386385328128 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5046296296296297, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.5046296296296297, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331152, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331152 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5755102040816327, + "acc_stderr": 0.03164209487942942, + "acc_norm": 0.5755102040816327, + "acc_norm_stderr": 0.03164209487942942 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036423, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036423 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.39374185136897, + "acc_stderr": 0.012478532272564432, + "acc_norm": 0.39374185136897, + "acc_norm_stderr": 0.012478532272564432 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.03364487286088298, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.03364487286088298 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205983, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205983 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253597, + "mc2": 0.4183988499603794, + "mc2_stderr": 0.015219858524944204 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5808736717827627, + "acc_stderr": 0.016963995010862792, + "acc_norm": 0.6139315230224321, + "acc_norm_stderr": 0.016738130760321747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "seungduk/KoSOLAR-10.7B-v0.1", + "model_sha": "65c534a804afe9e7b207dc3ada10b1b08d5deda0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sh2orc/Llama-3-Kor-BCCard-8B/result_2024-05-29 22:05:25.json b/sh2orc/Llama-3-Kor-BCCard-8B/result_2024-05-29 22:05:25.json new file mode 100644 index 0000000000000000000000000000000000000000..e62973899a1c858de0473423e33e83de9e51cfc0 --- /dev/null +++ b/sh2orc/Llama-3-Kor-BCCard-8B/result_2024-05-29 22:05:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3242320819112628, + "acc_stderr": 0.013678810399518819, + "acc_norm": 0.3609215017064846, + "acc_norm_stderr": 0.014034761386175452 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33569010157339174, + "acc_stderr": 0.004712660409846833, + "acc_norm": 0.4135630352519418, + "acc_norm_stderr": 0.004914655063329499 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3391812865497076, + "acc_stderr": 0.03631053496488905, + "acc_norm": 0.3391812865497076, + "acc_norm_stderr": 0.03631053496488905 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3652618135376756, + "acc_stderr": 0.01721853002883864, + "acc_norm": 0.3652618135376756, + "acc_norm_stderr": 0.01721853002883864 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0314108219759624, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0314108219759624 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3633440514469453, + "acc_stderr": 0.027316847674192703, + "acc_norm": 0.3633440514469453, + "acc_norm_stderr": 0.027316847674192703 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.398989898989899, + "acc_stderr": 0.03488901616852731, + "acc_norm": 0.398989898989899, + "acc_norm_stderr": 0.03488901616852731 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.36551724137931035, + "acc_stderr": 0.040131241954243856, + "acc_norm": 0.36551724137931035, + "acc_norm_stderr": 0.040131241954243856 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.382051282051282, + "acc_stderr": 0.024635549163908223, + "acc_norm": 0.382051282051282, + "acc_norm_stderr": 0.024635549163908223 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051448, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051448 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4129032258064516, + "acc_stderr": 0.028009138125400384, + "acc_norm": 0.4129032258064516, + "acc_norm_stderr": 0.028009138125400384 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5213675213675214, + "acc_stderr": 0.032726164476349545, + "acc_norm": 0.5213675213675214, + "acc_norm_stderr": 0.032726164476349545 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.38113207547169814, + "acc_stderr": 0.02989060968628662, + "acc_norm": 0.38113207547169814, + "acc_norm_stderr": 0.02989060968628662 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176095, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176095 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4626865671641791, + "acc_stderr": 0.035256751674679745, + "acc_norm": 0.4626865671641791, + "acc_norm_stderr": 0.035256751674679745 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353158, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353158 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307702, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307702 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548594, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548594 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.37283236994219654, + "acc_stderr": 0.026033890613576284, + "acc_norm": 0.37283236994219654, + "acc_norm_stderr": 0.026033890613576284 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3734567901234568, + "acc_stderr": 0.02691500301138015, + "acc_norm": 0.3734567901234568, + "acc_norm_stderr": 0.02691500301138015 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39896373056994816, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.39896373056994816, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3761467889908257, + "acc_stderr": 0.020769231968205074, + "acc_norm": 0.3761467889908257, + "acc_norm_stderr": 0.020769231968205074 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.02858034106513829, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.02858034106513829 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.0403356566784832, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.0403356566784832 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2826797385620915, + "acc_stderr": 0.018217269552053442, + "acc_norm": 0.2826797385620915, + "acc_norm_stderr": 0.018217269552053442 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.20921985815602837, + "acc_stderr": 0.024264769439988485, + "acc_norm": 0.20921985815602837, + "acc_norm_stderr": 0.024264769439988485 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833586, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833586 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260657, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260657 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.02997280717046462, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.02997280717046462 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.031052391937584353, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.031052391937584353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2666232073011734, + "acc_stderr": 0.011293836031612145, + "acc_norm": 0.2666232073011734, + "acc_norm_stderr": 0.011293836031612145 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.32727272727272727, + "acc_stderr": 0.03663974994391244, + "acc_norm": 0.32727272727272727, + "acc_norm_stderr": 0.03663974994391244 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2460220318237454, + "mc1_stderr": 0.015077219200662557, + "mc2": 0.42148616362309455, + "mc2_stderr": 0.016320690935570224 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2715466351829988, + "acc_stderr": 0.01529107111731038, + "acc_norm": 0.32585596221959856, + "acc_norm_stderr": 0.016114023894800347 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sh2orc/Llama-3-Kor-BCCard-8B", + "model_sha": "f4f58c28d4f6eea816cdda21d38296e62d75c6b9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sh2orc/Llama-3-Korean-8B/result_2024-05-29 22:05:05.json b/sh2orc/Llama-3-Korean-8B/result_2024-05-29 22:05:05.json new file mode 100644 index 0000000000000000000000000000000000000000..4cc03018d847880dc708d55c5dc628bf43b5596b --- /dev/null +++ b/sh2orc/Llama-3-Korean-8B/result_2024-05-29 22:05:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34982935153583616, + "acc_stderr": 0.013936809212158287, + "acc_norm": 0.4052901023890785, + "acc_norm_stderr": 0.014346869060229334 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35132443736307506, + "acc_stderr": 0.004764084597176899, + "acc_norm": 0.44234216291575384, + "acc_norm_stderr": 0.004956494059864895 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.03762738699917055, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.03762738699917055 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4099616858237548, + "acc_stderr": 0.01758767231233605, + "acc_norm": 0.4099616858237548, + "acc_norm_stderr": 0.01758767231233605 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.39228295819935693, + "acc_stderr": 0.02773125864701199, + "acc_norm": 0.39228295819935693, + "acc_norm_stderr": 0.02773125864701199 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.03259625118416827, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.03259625118416827 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115007, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.047803436269367894, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.047803436269367894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.033959703819985726, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.033959703819985726 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761005, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761005 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5470085470085471, + "acc_stderr": 0.0326109987309862, + "acc_norm": 0.5470085470085471, + "acc_norm_stderr": 0.0326109987309862 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228423, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228423 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.42196531791907516, + "acc_stderr": 0.03765746693865151, + "acc_norm": 0.42196531791907516, + "acc_norm_stderr": 0.03765746693865151 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332783, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.41329479768786126, + "acc_stderr": 0.026511261369409244, + "acc_norm": 0.41329479768786126, + "acc_norm_stderr": 0.026511261369409244 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831029, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831029 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45137614678899085, + "acc_stderr": 0.0213357147112688, + "acc_norm": 0.45137614678899085, + "acc_norm_stderr": 0.0213357147112688 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.0416345303130286, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.0416345303130286 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.028614624752805427, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.028614624752805427 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5537190082644629, + "acc_stderr": 0.0453793517794788, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.0453793517794788 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3284313725490196, + "acc_stderr": 0.01899970738316267, + "acc_norm": 0.3284313725490196, + "acc_norm_stderr": 0.01899970738316267 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285714, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285714 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.033509916046960436, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.033509916046960436 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574896, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574896 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.459915611814346, + "acc_stderr": 0.03244246810187914, + "acc_norm": 0.459915611814346, + "acc_norm_stderr": 0.03244246810187914 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28748370273794005, + "acc_stderr": 0.011559337355708502, + "acc_norm": 0.28748370273794005, + "acc_norm_stderr": 0.011559337355708502 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253602, + "mc2": 0.4520114540548119, + "mc2_stderr": 0.016071957802092095 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2892561983471074, + "acc_stderr": 0.015588800386053559, + "acc_norm": 0.3293978748524203, + "acc_norm_stderr": 0.016158746868147143 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sh2orc/Llama-3-Korean-8B", + "model_sha": "a9d4c032f1f9ef249eeef114b36033ab29a2c6a5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shadowml/BeagSake-7B/result_2024-05-13 16:11:44.json b/shadowml/BeagSake-7B/result_2024-05-13 16:11:44.json new file mode 100644 index 0000000000000000000000000000000000000000..031e65ddd6f68dd916ca95104032b9c6bec947f6 --- /dev/null +++ b/shadowml/BeagSake-7B/result_2024-05-13 16:11:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3856655290102389, + "acc_stderr": 0.014224250973257179, + "acc_norm": 0.44368600682593856, + "acc_norm_stderr": 0.014518421825670445 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39772953594901417, + "acc_stderr": 0.004884287515461501, + "acc_norm": 0.5262895837482573, + "acc_norm_stderr": 0.0049828793406914125 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47381864623243936, + "acc_stderr": 0.017855434554041975, + "acc_norm": 0.47381864623243936, + "acc_norm_stderr": 0.017855434554041975 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562793, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562793 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271344, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271344 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246012, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246012 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.041443118108781506, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.041443118108781506 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417614, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417614 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524586, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524586 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969654, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969654 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3988439306358382, + "acc_stderr": 0.03733626655383509, + "acc_norm": 0.3988439306358382, + "acc_norm_stderr": 0.03733626655383509 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.03606065001832919, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.03606065001832919 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5284403669724771, + "acc_stderr": 0.02140261569734805, + "acc_norm": 0.5284403669724771, + "acc_norm_stderr": 0.02140261569734805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.044359328928514664, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.044359328928514664 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5702479338842975, + "acc_stderr": 0.04519082021319773, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.04519082021319773 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.019944914136873583, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.019944914136873583 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596157, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596157 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.03338473403207401, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.03338473403207401 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2782122905027933, + "acc_stderr": 0.014987325439963546, + "acc_norm": 0.2782122905027933, + "acc_norm_stderr": 0.014987325439963546 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252611, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252611 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.028888193103988637, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.028888193103988637 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3520208604954368, + "acc_stderr": 0.0121981406053536, + "acc_norm": 0.3520208604954368, + "acc_norm_stderr": 0.0121981406053536 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.41982864137086906, + "mc1_stderr": 0.017277030301775766, + "mc2": 0.5796867538374282, + "mc2_stderr": 0.016210749877100413 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4297520661157025, + "acc_stderr": 0.01701984753597221, + "acc_norm": 0.4309327036599764, + "acc_norm_stderr": 0.017025558196043136 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shadowml/BeagSake-7B", + "model_sha": "b7a3b25a188a4608fd05fc4247ddd504c1f529d1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama-2-ko-7b-ck-sft/result_2023-12-03 02:59:41.json b/shangrilar/llama-2-ko-7b-ck-sft/result_2023-12-03 02:59:41.json new file mode 100644 index 0000000000000000000000000000000000000000..6d85bf3e0911355286bba558e83574560bc9ec7a --- /dev/null +++ b/shangrilar/llama-2-ko-7b-ck-sft/result_2023-12-03 02:59:41.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30887372013651876, + "acc_stderr": 0.013501770929344003, + "acc_norm": 0.3651877133105802, + "acc_norm_stderr": 0.0140702655192688 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3851822346146186, + "acc_stderr": 0.0048564379557198565, + "acc_norm": 0.5034853614817766, + "acc_norm_stderr": 0.004989660180792183 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.0352821125824523, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.0352821125824523 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.04498676320572924, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.04498676320572924 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.016997123346113432, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.016997123346113432 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.030472973363380052, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.030472973363380052 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.034106466140718564, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.034106466140718564 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36977491961414793, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.36977491961414793, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3901345291479821, + "acc_stderr": 0.03273766725459156, + "acc_norm": 0.3901345291479821, + "acc_norm_stderr": 0.03273766725459156 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.03915345408847835, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.03915345408847835 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.030388353551886838, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.030388353551886838 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.258974358974359, + "acc_stderr": 0.022211106810061672, + "acc_norm": 0.258974358974359, + "acc_norm_stderr": 0.022211106810061672 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3387096774193548, + "acc_stderr": 0.026923446059302844, + "acc_norm": 0.3387096774193548, + "acc_norm_stderr": 0.026923446059302844 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04265792110940588, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04265792110940588 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844065, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.031343283582089536, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.031343283582089536 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173044, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173044 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.034765901043041336, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.034765901043041336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.024405173935783238, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.024405173935783238 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.025630824975621344, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.025630824975621344 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3137614678899083, + "acc_stderr": 0.019894723341469127, + "acc_norm": 0.3137614678899083, + "acc_norm_stderr": 0.019894723341469127 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.03395490020856111, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.03395490020856111 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.02763417668960266, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.02763417668960266 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23026315789473684, + "acc_stderr": 0.03426059424403165, + "acc_norm": 0.23026315789473684, + "acc_norm_stderr": 0.03426059424403165 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.018120224251484587, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.018120224251484587 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.02668456434046099, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.02668456434046099 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467762, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467762 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335314, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3080168776371308, + "acc_stderr": 0.0300523893356057, + "acc_norm": 0.3080168776371308, + "acc_norm_stderr": 0.0300523893356057 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27509778357235987, + "acc_stderr": 0.01140544362099692, + "acc_norm": 0.27509778357235987, + "acc_norm_stderr": 0.01140544362099692 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.030964517926923393, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.030964517926923393 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2215422276621787, + "mc1_stderr": 0.014537867601301142, + "mc2": 0.36630728160236614, + "mc2_stderr": 0.014877510069800457 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30460448642266824, + "acc_stderr": 0.015823367273129392, + "acc_norm": 0.3730814639905549, + "acc_norm_stderr": 0.016627318275137443 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama-2-ko-7b-ck-sft", + "model_sha": "5320a852b356e0d909c7b47330714de4f11c43fb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama-2-ko-7b-ck-sfte/result_2023-12-03 03:00:06.json b/shangrilar/llama-2-ko-7b-ck-sfte/result_2023-12-03 03:00:06.json new file mode 100644 index 0000000000000000000000000000000000000000..81dbb6dbdb9d3feeebd2b35dcaf1c60087a6ec7e --- /dev/null +++ b/shangrilar/llama-2-ko-7b-ck-sfte/result_2023-12-03 03:00:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3037542662116041, + "acc_stderr": 0.013438909184778757, + "acc_norm": 0.35238907849829354, + "acc_norm_stderr": 0.013960142600598678 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38418641704839673, + "acc_stderr": 0.004854082479916908, + "acc_norm": 0.49900418243377814, + "acc_norm_stderr": 0.004989771515176699 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579215, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579215 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.04582124160161551, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.04582124160161551 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3550446998722861, + "acc_stderr": 0.017112085772772984, + "acc_norm": 0.3550446998722861, + "acc_norm_stderr": 0.017112085772772984 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3021276595744681, + "acc_stderr": 0.030017554471880557, + "acc_norm": 0.3021276595744681, + "acc_norm_stderr": 0.030017554471880557 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.03384429155233137, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.03384429155233137 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3504823151125402, + "acc_stderr": 0.027098652621301747, + "acc_norm": 0.3504823151125402, + "acc_norm_stderr": 0.027098652621301747 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306085, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306085 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29292929292929293, + "acc_stderr": 0.03242497958178817, + "acc_norm": 0.29292929292929293, + "acc_norm_stderr": 0.03242497958178817 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24789915966386555, + "acc_stderr": 0.02804796722417689, + "acc_norm": 0.24789915966386555, + "acc_norm_stderr": 0.02804796722417689 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.02248938979365483, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.02248938979365483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.02606936229533513, + "acc_norm": 0.3, + "acc_norm_stderr": 0.02606936229533513 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.405982905982906, + "acc_stderr": 0.03217180182641087, + "acc_norm": 0.405982905982906, + "acc_norm_stderr": 0.03217180182641087 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.27169811320754716, + "acc_stderr": 0.027377706624670713, + "acc_norm": 0.27169811320754716, + "acc_norm_stderr": 0.027377706624670713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.044612721759105065, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.044612721759105065 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.025497532639609556, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.025497532639609556 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.036313298039696545, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.036313298039696545 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2835820895522388, + "acc_stderr": 0.03187187537919797, + "acc_norm": 0.2835820895522388, + "acc_norm_stderr": 0.03187187537919797 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173044, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173044 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24338624338624337, + "acc_stderr": 0.022101128787415412, + "acc_norm": 0.24338624338624337, + "acc_norm_stderr": 0.022101128787415412 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.02481835012943659, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.02481835012943659 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664743, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664743 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.025842248700902164, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.025842248700902164 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.03074890536390989, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.03074890536390989 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28990825688073396, + "acc_stderr": 0.0194530666092016, + "acc_norm": 0.28990825688073396, + "acc_norm_stderr": 0.0194530666092016 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.032684540130117436, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.032684540130117436 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2973856209150327, + "acc_stderr": 0.02617390850671858, + "acc_norm": 0.2973856209150327, + "acc_norm_stderr": 0.02617390850671858 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.040655781409087044, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.040655781409087044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.26973684210526316, + "acc_stderr": 0.03611780560284898, + "acc_norm": 0.26973684210526316, + "acc_norm_stderr": 0.03611780560284898 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.30718954248366015, + "acc_stderr": 0.018663359671463677, + "acc_norm": 0.30718954248366015, + "acc_norm_stderr": 0.018663359671463677 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.044939490686135404, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.044939490686135404 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.028765111718046944, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.028765111718046944 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.027678468642144693, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.027678468642144693 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.1836734693877551, + "acc_stderr": 0.024789071332007622, + "acc_norm": 0.1836734693877551, + "acc_norm_stderr": 0.024789071332007622 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.28270042194092826, + "acc_stderr": 0.029312814153955934, + "acc_norm": 0.28270042194092826, + "acc_norm_stderr": 0.029312814153955934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2646675358539765, + "acc_stderr": 0.011267332992845528, + "acc_norm": 0.2646675358539765, + "acc_norm_stderr": 0.011267332992845528 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.03077855467869326, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.03077855467869326 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.03346409881055953, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.03346409881055953 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237033, + "mc2": 0.3920379268511032, + "mc2_stderr": 0.014920139415816195 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2975206611570248, + "acc_stderr": 0.015717742205089914, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.01653869160332771 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama-2-ko-7b-ck-sfte", + "model_sha": "6efe784c199bd0f1a4136fa1803d0b77a1f4f23a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama-2-ko-7b-ck-sftem/result_2023-12-03 03:01:32.json b/shangrilar/llama-2-ko-7b-ck-sftem/result_2023-12-03 03:01:32.json new file mode 100644 index 0000000000000000000000000000000000000000..9d6eada4f91fda46424eb108c7678e5fabe96997 --- /dev/null +++ b/shangrilar/llama-2-ko-7b-ck-sftem/result_2023-12-03 03:01:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.29948805460750855, + "acc_stderr": 0.013385021637313572, + "acc_norm": 0.3677474402730375, + "acc_norm_stderr": 0.014090995618168478 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38627763393746267, + "acc_stderr": 0.0048590041846946095, + "acc_norm": 0.5016928898625772, + "acc_norm_stderr": 0.004989752811173406 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.03488647713457922, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.03488647713457922 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.351213282247765, + "acc_stderr": 0.017069982051499427, + "acc_norm": 0.351213282247765, + "acc_norm_stderr": 0.017069982051499427 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.03944624162501116, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.03944624162501116 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28936170212765955, + "acc_stderr": 0.029644006577009618, + "acc_norm": 0.28936170212765955, + "acc_norm_stderr": 0.029644006577009618 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.03460579907553027, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.03460579907553027 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3440514469453376, + "acc_stderr": 0.026981478043648022, + "acc_norm": 0.3440514469453376, + "acc_norm_stderr": 0.026981478043648022 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.33183856502242154, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.33183856502242154, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.21374045801526717, + "acc_stderr": 0.0359546161177469, + "acc_norm": 0.21374045801526717, + "acc_norm_stderr": 0.0359546161177469 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.032586303838365555, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.032586303838365555 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307811, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307811 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863814, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863814 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26153846153846155, + "acc_stderr": 0.022282141204204426, + "acc_norm": 0.26153846153846155, + "acc_norm_stderr": 0.022282141204204426 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.030315099285617722, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.030315099285617722 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.025988500792411887, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.025988500792411887 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3803418803418803, + "acc_stderr": 0.03180425204384099, + "acc_norm": 0.3803418803418803, + "acc_norm_stderr": 0.03180425204384099 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2641509433962264, + "acc_stderr": 0.027134291628741716, + "acc_norm": 0.2641509433962264, + "acc_norm_stderr": 0.027134291628741716 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2636363636363636, + "acc_stderr": 0.04220224692971987, + "acc_norm": 0.2636363636363636, + "acc_norm_stderr": 0.04220224692971987 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844072, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.27860696517412936, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.27860696517412936, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.03186209851641144, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.03186209851641144 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.024027745155265016, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.024027745155265016 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26993865030674846, + "acc_stderr": 0.03487825168497892, + "acc_norm": 0.26993865030674846, + "acc_norm_stderr": 0.03487825168497892 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.33024691358024694, + "acc_stderr": 0.026168298456732846, + "acc_norm": 0.33024691358024694, + "acc_norm_stderr": 0.026168298456732846 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24870466321243523, + "acc_stderr": 0.031195840877700293, + "acc_norm": 0.24870466321243523, + "acc_norm_stderr": 0.031195840877700293 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28256880733944956, + "acc_stderr": 0.019304243497707152, + "acc_norm": 0.28256880733944956, + "acc_norm_stderr": 0.019304243497707152 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.0339549002085611, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.0339549002085611 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.02656892101545715, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.02656892101545715 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.39669421487603307, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.24342105263157895, + "acc_stderr": 0.034923496688842384, + "acc_norm": 0.24342105263157895, + "acc_norm_stderr": 0.034923496688842384 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.018185218954318082, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.018185218954318082 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2765957446808511, + "acc_stderr": 0.026684564340460987, + "acc_norm": 0.2765957446808511, + "acc_norm_stderr": 0.026684564340460987 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.02649191472735513, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.02649191472735513 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1948529411764706, + "acc_stderr": 0.024060599423487424, + "acc_norm": 0.1948529411764706, + "acc_norm_stderr": 0.024060599423487424 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2, + "acc_stderr": 0.02560737598657916, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02560737598657916 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31645569620253167, + "acc_stderr": 0.030274974880218977, + "acc_norm": 0.31645569620253167, + "acc_norm_stderr": 0.030274974880218977 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.26140808344198174, + "acc_stderr": 0.01122252816977131, + "acc_norm": 0.26140808344198174, + "acc_norm_stderr": 0.01122252816977131 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.03149328104507957, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.03149328104507957 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.03524390844511785, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.03524390844511785 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.01497482727975233, + "mc2": 0.3838342466674412, + "mc2_stderr": 0.0149406355520632 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2727272727272727, + "acc_stderr": 0.015311853110300352, + "acc_norm": 0.31641086186540734, + "acc_norm_stderr": 0.015989617951065477 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama-2-ko-7b-ck-sftem", + "model_sha": "f76cbec4da7eaa0823db2a9f25a9f1e8fb305567", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama-2-ko-7b-ck-sftm/result_2023-12-03 02:59:57.json b/shangrilar/llama-2-ko-7b-ck-sftm/result_2023-12-03 02:59:57.json new file mode 100644 index 0000000000000000000000000000000000000000..8dfad45d12e5f6861e691174df1991f7f848d16c --- /dev/null +++ b/shangrilar/llama-2-ko-7b-ck-sftm/result_2023-12-03 02:59:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3216723549488055, + "acc_stderr": 0.013650488084494164, + "acc_norm": 0.371160409556314, + "acc_norm_stderr": 0.014117971901142817 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3851822346146186, + "acc_stderr": 0.004856437955719859, + "acc_norm": 0.5043815972913762, + "acc_norm_stderr": 0.00498958981618023 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30409356725146197, + "acc_stderr": 0.0352821125824523, + "acc_norm": 0.30409356725146197, + "acc_norm_stderr": 0.0352821125824523 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.32038834951456313, + "acc_stderr": 0.0462028408228004, + "acc_norm": 0.32038834951456313, + "acc_norm_stderr": 0.0462028408228004 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.32567049808429116, + "acc_stderr": 0.016757989458549682, + "acc_norm": 0.32567049808429116, + "acc_norm_stderr": 0.016757989458549682 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2710843373493976, + "acc_stderr": 0.034605799075530276, + "acc_norm": 0.2710843373493976, + "acc_norm_stderr": 0.034605799075530276 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3311897106109325, + "acc_stderr": 0.026730620728004913, + "acc_norm": 0.3311897106109325, + "acc_norm_stderr": 0.026730620728004913 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.36771300448430494, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.36771300448430494, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082394, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082394 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124498, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124498 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24369747899159663, + "acc_stderr": 0.02788682807838058, + "acc_norm": 0.24369747899159663, + "acc_norm_stderr": 0.02788682807838058 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24358974358974358, + "acc_stderr": 0.02176373368417392, + "acc_norm": 0.24358974358974358, + "acc_norm_stderr": 0.02176373368417392 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733545, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.02556060472102289, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.02556060472102289 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3418803418803419, + "acc_stderr": 0.03107502852650775, + "acc_norm": 0.3418803418803419, + "acc_norm_stderr": 0.03107502852650775 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2641509433962264, + "acc_stderr": 0.027134291628741713, + "acc_norm": 0.2641509433962264, + "acc_norm_stderr": 0.027134291628741713 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505415, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505415 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25555555555555554, + "acc_stderr": 0.026593939101844065, + "acc_norm": 0.25555555555555554, + "acc_norm_stderr": 0.026593939101844065 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2251655629139073, + "acc_stderr": 0.03410435282008936, + "acc_norm": 0.2251655629139073, + "acc_norm_stderr": 0.03410435282008936 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1907514450867052, + "acc_stderr": 0.029957851329869337, + "acc_norm": 0.1907514450867052, + "acc_norm_stderr": 0.029957851329869337 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.24305555555555555, + "acc_stderr": 0.03586879280080341, + "acc_norm": 0.24305555555555555, + "acc_norm_stderr": 0.03586879280080341 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.024027745155265016, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.024027745155265016 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.025407197798890155, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.025407197798890155 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.02977866303775295, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.02977866303775295 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.24954128440366974, + "acc_stderr": 0.018553897629501624, + "acc_norm": 0.24954128440366974, + "acc_norm_stderr": 0.018553897629501624 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.032684540130117436, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.032684540130117436 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.0252616912197295, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.0252616912197295 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.03984979653302871, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.03984979653302871 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.20394736842105263, + "acc_stderr": 0.03279000406310049, + "acc_norm": 0.20394736842105263, + "acc_norm_stderr": 0.03279000406310049 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.26143790849673204, + "acc_stderr": 0.01777694715752803, + "acc_norm": 0.26143790849673204, + "acc_norm_stderr": 0.01777694715752803 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180848, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180848 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.025416428388767478, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.025416428388767478 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2536764705882353, + "acc_stderr": 0.026431329870789538, + "acc_norm": 0.2536764705882353, + "acc_norm_stderr": 0.026431329870789538 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.02540930195322568, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.02540930195322568 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598035, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598035 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.23989569752281617, + "acc_stderr": 0.010906282617981648, + "acc_norm": 0.23989569752281617, + "acc_norm_stderr": 0.010906282617981648 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22058823529411764, + "acc_stderr": 0.02910225438967409, + "acc_norm": 0.22058823529411764, + "acc_norm_stderr": 0.02910225438967409 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22766217870257038, + "mc1_stderr": 0.01467925503211107, + "mc2": 0.3687513230749265, + "mc2_stderr": 0.014956938558145557 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29161747343565525, + "acc_stderr": 0.015626276690070242, + "acc_norm": 0.3624557260920897, + "acc_norm_stderr": 0.016527131240453706 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama-2-ko-7b-ck-sftm", + "model_sha": "98291ad089efcc9013a22607275f4b4589a91e7e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama-2-ko-7b-ck/result_2023-12-02 13:15:15.json b/shangrilar/llama-2-ko-7b-ck/result_2023-12-02 13:15:15.json new file mode 100644 index 0000000000000000000000000000000000000000..1f182e8cf4750dd3252d7aa06c92338da195672b --- /dev/null +++ b/shangrilar/llama-2-ko-7b-ck/result_2023-12-02 13:15:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3165529010238908, + "acc_stderr": 0.01359243151906808, + "acc_norm": 0.3796928327645051, + "acc_norm_stderr": 0.014182119866974872 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38109938259310894, + "acc_stderr": 0.004846643735666549, + "acc_norm": 0.5036845249950209, + "acc_norm_stderr": 0.004989645929811448 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3563218390804598, + "acc_stderr": 0.0171258537627559, + "acc_norm": 0.3563218390804598, + "acc_norm_stderr": 0.0171258537627559 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996796, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996796 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231004, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231004 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.03711725190740749, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.03711725190740749 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3440514469453376, + "acc_stderr": 0.026981478043648022, + "acc_norm": 0.3440514469453376, + "acc_norm_stderr": 0.026981478043648022 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.03219079200419996, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.03219079200419996 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3838383838383838, + "acc_stderr": 0.03464881675016339, + "acc_norm": 0.3838383838383838, + "acc_norm_stderr": 0.03464881675016339 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003336, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.029719142876342863, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.029719142876342863 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.022139081103971524, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.022139081103971524 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650743, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650743 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0317852971064275, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0317852971064275 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517418, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517418 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.32051282051282054, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.32051282051282054, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.02646611753895991, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.02646611753895991 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.27860696517412936, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.27860696517412936, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24867724867724866, + "acc_stderr": 0.02226181769240017, + "acc_norm": 0.24867724867724866, + "acc_norm_stderr": 0.02226181769240017 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.034370793441061344, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.034370793441061344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.024818350129436593, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.024818350129436593 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22085889570552147, + "acc_stderr": 0.03259177392742178, + "acc_norm": 0.22085889570552147, + "acc_norm_stderr": 0.03259177392742178 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.026229649178821157, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.026229649178821157 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916647, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916647 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.04185774424022056, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.04185774424022056 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3761467889908257, + "acc_stderr": 0.020769231968205074, + "acc_norm": 0.3761467889908257, + "acc_norm_stderr": 0.020769231968205074 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.03395490020856111, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.03395490020856111 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.027530078447110307, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.027530078447110307 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.4214876033057851, + "acc_stderr": 0.045077322787750944, + "acc_norm": 0.4214876033057851, + "acc_norm_stderr": 0.045077322787750944 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.017740899509177795, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.017740899509177795 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160835, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160835 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961441, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961441 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03068582059661081, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03068582059661081 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27183833116036504, + "acc_stderr": 0.011363135278651411, + "acc_norm": 0.27183833116036504, + "acc_norm_stderr": 0.011363135278651411 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373616, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373616 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885415, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885415 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2350061199510404, + "mc1_stderr": 0.014843061507731618, + "mc2": 0.3746009857468504, + "mc2_stderr": 0.01475413087944273 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2680047225501771, + "acc_stderr": 0.015227905796335145, + "acc_norm": 0.38488783943329397, + "acc_norm_stderr": 0.016728579701498658 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama-2-ko-7b-ck", + "model_sha": "de35087a933260a50d9260fd5ccf1247e43c1c5a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama-2-ko-7b-sft/result_2023-12-03 07:45:17.json b/shangrilar/llama-2-ko-7b-sft/result_2023-12-03 07:45:17.json new file mode 100644 index 0000000000000000000000000000000000000000..bbb1efc28127d35b0e3cf4461333ebf85b16bfcb --- /dev/null +++ b/shangrilar/llama-2-ko-7b-sft/result_2023-12-03 07:45:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.302901023890785, + "acc_stderr": 0.013428241573185349, + "acc_norm": 0.35580204778157, + "acc_norm_stderr": 0.013990571137918762 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3829914359689305, + "acc_stderr": 0.004851227527070894, + "acc_norm": 0.5017924716191994, + "acc_norm_stderr": 0.004989749347461088 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03565079670708312, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03565079670708312 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3397190293742018, + "acc_stderr": 0.01693639411430165, + "acc_norm": 0.3397190293742018, + "acc_norm_stderr": 0.01693639411430165 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02989614568209546, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02989614568209546 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.03571609230053481, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.03571609230053481 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3247588424437299, + "acc_stderr": 0.026596782287697043, + "acc_norm": 0.3247588424437299, + "acc_norm_stderr": 0.026596782287697043 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.29770992366412213, + "acc_stderr": 0.040103589424622034, + "acc_norm": 0.29770992366412213, + "acc_norm_stderr": 0.040103589424622034 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.29797979797979796, + "acc_stderr": 0.03258630383836556, + "acc_norm": 0.29797979797979796, + "acc_norm_stderr": 0.03258630383836556 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727772, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727772 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.29831932773109243, + "acc_stderr": 0.029719142876342853, + "acc_norm": 0.29831932773109243, + "acc_norm_stderr": 0.029719142876342853 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.022556551010132354, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.022556551010132354 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358607, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358607 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.02637756702864586, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.02637756702864586 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.028254200344438655, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.028254200344438655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3, + "acc_stderr": 0.04389311454644286, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04389311454644286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23333333333333334, + "acc_stderr": 0.025787874220959305, + "acc_norm": 0.23333333333333334, + "acc_norm_stderr": 0.025787874220959305 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23121387283236994, + "acc_stderr": 0.032147373020294696, + "acc_norm": 0.23121387283236994, + "acc_norm_stderr": 0.032147373020294696 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948368, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948368 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.034765901043041336, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.034765901043041336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.024405173935783234, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.024405173935783234 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32098765432098764, + "acc_stderr": 0.025976566010862744, + "acc_norm": 0.32098765432098764, + "acc_norm_stderr": 0.025976566010862744 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.25906735751295334, + "acc_stderr": 0.031618779179354115, + "acc_norm": 0.25906735751295334, + "acc_norm_stderr": 0.031618779179354115 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.27522935779816515, + "acc_stderr": 0.019149093743155196, + "acc_norm": 0.27522935779816515, + "acc_norm_stderr": 0.019149093743155196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.03395490020856111, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.03395490020856111 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.31699346405228757, + "acc_stderr": 0.02664327847450875, + "acc_norm": 0.31699346405228757, + "acc_norm_stderr": 0.02664327847450875 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.037385206761196686, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.037385206761196686 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.272875816993464, + "acc_stderr": 0.01802047414839358, + "acc_norm": 0.272875816993464, + "acc_norm_stderr": 0.01802047414839358 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.026358065698880592, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.026358065698880592 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.0305467452649532, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.0305467452649532 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670736, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670736 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23673469387755103, + "acc_stderr": 0.02721283588407315, + "acc_norm": 0.23673469387755103, + "acc_norm_stderr": 0.02721283588407315 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0306858205966108, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0306858205966108 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27835723598435463, + "acc_stderr": 0.011446990197380985, + "acc_norm": 0.27835723598435463, + "acc_norm_stderr": 0.011446990197380985 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.031822318676475544, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.031822318676475544 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.03427743175816524, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.03427743175816524 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23745410036719705, + "mc1_stderr": 0.014896277441041857, + "mc2": 0.3666390551157725, + "mc2_stderr": 0.014763767483291076 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27744982290436837, + "acc_stderr": 0.015393630236605973, + "acc_norm": 0.3577331759149941, + "acc_norm_stderr": 0.01647980893574998 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama-2-ko-7b-sft", + "model_sha": "c46445b8e3d815fb4d36d7deca69343f2a7df17a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama-2-ko-7b-sfte/result_2023-12-03 07:45:21.json b/shangrilar/llama-2-ko-7b-sfte/result_2023-12-03 07:45:21.json new file mode 100644 index 0000000000000000000000000000000000000000..a649da1a737f3a328e9d7160d55698678e25e02d --- /dev/null +++ b/shangrilar/llama-2-ko-7b-sfte/result_2023-12-03 07:45:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31313993174061433, + "acc_stderr": 0.013552671543623504, + "acc_norm": 0.363481228668942, + "acc_norm_stderr": 0.014056207319068285 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38348934475204144, + "acc_stderr": 0.004852420856631481, + "acc_norm": 0.5002987452698665, + "acc_norm_stderr": 0.004989780520782245 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.045821241601615506, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.045821241601615506 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36909323116219667, + "acc_stderr": 0.017256283109124616, + "acc_norm": 0.36909323116219667, + "acc_norm_stderr": 0.017256283109124616 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.0402477840197711, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.0402477840197711 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.31063829787234043, + "acc_stderr": 0.03025123757921317, + "acc_norm": 0.31063829787234043, + "acc_norm_stderr": 0.03025123757921317 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.27710843373493976, + "acc_stderr": 0.034843315926805875, + "acc_norm": 0.27710843373493976, + "acc_norm_stderr": 0.034843315926805875 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3665594855305466, + "acc_stderr": 0.02736807824397162, + "acc_norm": 0.3665594855305466, + "acc_norm_stderr": 0.02736807824397162 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3282828282828283, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.3282828282828283, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.037800192304380135, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.037800192304380135 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.24789915966386555, + "acc_stderr": 0.028047967224176896, + "acc_norm": 0.24789915966386555, + "acc_norm_stderr": 0.028047967224176896 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.25384615384615383, + "acc_stderr": 0.022066054378726257, + "acc_norm": 0.25384615384615383, + "acc_norm_stderr": 0.022066054378726257 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678243, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678243 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2967741935483871, + "acc_stderr": 0.025988500792411894, + "acc_norm": 0.2967741935483871, + "acc_norm_stderr": 0.025988500792411894 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3974358974358974, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.3974358974358974, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.028450154794118627, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.028450154794118627 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360382, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360382 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.03152439186555402, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.03152439186555402 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.034765901043041336, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.034765901043041336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.02530525813187971, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.02530525813187971 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.31790123456790126, + "acc_stderr": 0.025910063528240865, + "acc_norm": 0.31790123456790126, + "acc_norm_stderr": 0.025910063528240865 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565319, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565319 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3100917431192661, + "acc_stderr": 0.01983084968443975, + "acc_norm": 0.3100917431192661, + "acc_norm_stderr": 0.01983084968443975 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15873015873015872, + "acc_stderr": 0.032684540130117436, + "acc_norm": 0.15873015873015872, + "acc_norm_stderr": 0.032684540130117436 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.026992544339297243, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.026992544339297243 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2809917355371901, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.2809917355371901, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119669, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119669 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2957516339869281, + "acc_stderr": 0.018463154132632824, + "acc_norm": 0.2957516339869281, + "acc_norm_stderr": 0.018463154132632824 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2907801418439716, + "acc_stderr": 0.027090664368353178, + "acc_norm": 0.2907801418439716, + "acc_norm_stderr": 0.027090664368353178 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.044939490686135404, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.044939490686135404 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.028139689444859655, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.028139689444859655 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.0416333199893227, + "acc_norm": 0.22, + "acc_norm_stderr": 0.0416333199893227 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35443037974683544, + "acc_stderr": 0.03113730429718581, + "acc_norm": 0.35443037974683544, + "acc_norm_stderr": 0.03113730429718581 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2842242503259452, + "acc_stderr": 0.011519880596516076, + "acc_norm": 0.2842242503259452, + "acc_norm_stderr": 0.011519880596516076 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373616, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373616 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3212121212121212, + "acc_stderr": 0.03646204963253812, + "acc_norm": 0.3212121212121212, + "acc_norm_stderr": 0.03646204963253812 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.015201522246299946, + "mc2": 0.3889465942097803, + "mc2_stderr": 0.01485269997681454 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2715466351829988, + "acc_stderr": 0.015291071117310382, + "acc_norm": 0.3742621015348288, + "acc_norm_stderr": 0.016637917789798742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama-2-ko-7b-sfte", + "model_sha": "7bc9f9eecb357e17900b9a99a579820f5b130afe", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama-2-ko-7b-sftem/result_2023-12-03 07:45:31.json b/shangrilar/llama-2-ko-7b-sftem/result_2023-12-03 07:45:31.json new file mode 100644 index 0000000000000000000000000000000000000000..8f0b6081b732ca974a886ccc76b77ace2b717a23 --- /dev/null +++ b/shangrilar/llama-2-ko-7b-sftem/result_2023-12-03 07:45:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3267918088737201, + "acc_stderr": 0.01370666597558734, + "acc_norm": 0.37457337883959047, + "acc_norm_stderr": 0.014144193471893447 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38627763393746267, + "acc_stderr": 0.00485900418469461, + "acc_norm": 0.4965146385182235, + "acc_norm_stderr": 0.004989660180792161 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393162, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393162 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2621359223300971, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.2621359223300971, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.35759897828863346, + "acc_stderr": 0.017139488998803288, + "acc_norm": 0.35759897828863346, + "acc_norm_stderr": 0.017139488998803288 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.30638297872340425, + "acc_stderr": 0.03013590647851756, + "acc_norm": 0.30638297872340425, + "acc_norm_stderr": 0.03013590647851756 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3132530120481928, + "acc_stderr": 0.036108050180310235, + "acc_norm": 0.3132530120481928, + "acc_norm_stderr": 0.036108050180310235 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3311897106109325, + "acc_stderr": 0.026730620728004913, + "acc_norm": 0.3311897106109325, + "acc_norm_stderr": 0.026730620728004913 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.26717557251908397, + "acc_stderr": 0.03880848301082396, + "acc_norm": 0.26717557251908397, + "acc_norm_stderr": 0.03880848301082396 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.31313131313131315, + "acc_stderr": 0.033042050878136525, + "acc_norm": 0.31313131313131315, + "acc_norm_stderr": 0.033042050878136525 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.02820554503327772, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.02820554503327772 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.022139081103971524, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.022139081103971524 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233484, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233484 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.31290322580645163, + "acc_stderr": 0.02637756702864586, + "acc_norm": 0.31290322580645163, + "acc_norm_stderr": 0.02637756702864586 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.03142616993791923, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.03142616993791923 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3169811320754717, + "acc_stderr": 0.028637235639800925, + "acc_norm": 0.3169811320754717, + "acc_norm_stderr": 0.028637235639800925 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302505, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302505 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.025928876132766128, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.025928876132766128 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119996, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119996 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.03115715086935556, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.03115715086935556 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.03126511206173043, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.03126511206173043 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23809523809523808, + "acc_stderr": 0.021935878081184756, + "acc_norm": 0.23809523809523808, + "acc_norm_stderr": 0.021935878081184756 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.024476994076247326, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.024476994076247326 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.27607361963190186, + "acc_stderr": 0.0351238528370505, + "acc_norm": 0.27607361963190186, + "acc_norm_stderr": 0.0351238528370505 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3395061728395062, + "acc_stderr": 0.026348564412011635, + "acc_norm": 0.3395061728395062, + "acc_norm_stderr": 0.026348564412011635 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.0314102478056532, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.0314102478056532 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3155963302752294, + "acc_stderr": 0.019926117513869666, + "acc_norm": 0.3155963302752294, + "acc_norm_stderr": 0.019926117513869666 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3202614379084967, + "acc_stderr": 0.026716118380156847, + "acc_norm": 0.3202614379084967, + "acc_norm_stderr": 0.026716118380156847 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810536, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810536 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.02689170942834396, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.02689170942834396 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.026491914727355143, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.026491914727355143 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.027257202606114944, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.027257202606114944 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.02768297952296023, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.02768297952296023 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.31223628691983124, + "acc_stderr": 0.030165137867847008, + "acc_norm": 0.31223628691983124, + "acc_norm_stderr": 0.030165137867847008 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2848761408083442, + "acc_stderr": 0.011527830846368999, + "acc_norm": 0.2848761408083442, + "acc_norm_stderr": 0.011527830846368999 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.29901960784313725, + "acc_stderr": 0.03213325717373616, + "acc_norm": 0.29901960784313725, + "acc_norm_stderr": 0.03213325717373616 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.01476194517486267, + "mc2": 0.3765048864381823, + "mc2_stderr": 0.014810224803999636 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.269185360094451, + "acc_stderr": 0.015249098024144526, + "acc_norm": 0.35182998819362454, + "acc_norm_stderr": 0.016418206451218054 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama-2-ko-7b-sftem", + "model_sha": "4bd1271926402613fc82bf548033028b4467ccbe", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shangrilar/llama2-ko-7b-kullm-base/result_2023-10-03 23:40:33.json b/shangrilar/llama2-ko-7b-kullm-base/result_2023-10-03 23:40:33.json new file mode 100644 index 0000000000000000000000000000000000000000..c20d9cff6f0760569ec2e701faf093db8a1a2ef5 --- /dev/null +++ b/shangrilar/llama2-ko-7b-kullm-base/result_2023-10-03 23:40:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3174061433447099, + "acc_stderr": 0.01360223908803817, + "acc_norm": 0.38054607508532423, + "acc_norm_stderr": 0.014188277712349814 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38309101772555265, + "acc_stderr": 0.004851466623601449, + "acc_norm": 0.49571798446524595, + "acc_norm_stderr": 0.004989598426249537 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.03377310252209194, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.03377310252209194 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2815533980582524, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.2815533980582524, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3371647509578544, + "acc_stderr": 0.016905207420803554, + "acc_norm": 0.3371647509578544, + "acc_norm_stderr": 0.016905207420803554 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3021276595744681, + "acc_stderr": 0.030017554471880557, + "acc_norm": 0.3021276595744681, + "acc_norm_stderr": 0.030017554471880557 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.0266644108869376, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.0266644108869376 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3094170403587444, + "acc_stderr": 0.031024411740572196, + "acc_norm": 0.3094170403587444, + "acc_norm_stderr": 0.031024411740572196 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.40404040404040403, + "acc_stderr": 0.03496130972056128, + "acc_norm": 0.40404040404040403, + "acc_norm_stderr": 0.03496130972056128 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3103448275862069, + "acc_stderr": 0.03855289616378949, + "acc_norm": 0.3103448275862069, + "acc_norm_stderr": 0.03855289616378949 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31932773109243695, + "acc_stderr": 0.030283995525884396, + "acc_norm": 0.31932773109243695, + "acc_norm_stderr": 0.030283995525884396 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2717948717948718, + "acc_stderr": 0.02255655101013235, + "acc_norm": 0.2717948717948718, + "acc_norm_stderr": 0.02255655101013235 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.03161856335358609, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.03161856335358609 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3032258064516129, + "acc_stderr": 0.026148685930671746, + "acc_norm": 0.3032258064516129, + "acc_norm_stderr": 0.026148685930671746 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.36752136752136755, + "acc_stderr": 0.031585391577456365, + "acc_norm": 0.36752136752136755, + "acc_norm_stderr": 0.031585391577456365 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.32452830188679244, + "acc_stderr": 0.028815615713432115, + "acc_norm": 0.32452830188679244, + "acc_norm_stderr": 0.028815615713432115 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505416, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505416 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712156, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712156 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.3383084577114428, + "acc_stderr": 0.03345563070339192, + "acc_norm": 0.3383084577114428, + "acc_norm_stderr": 0.03345563070339192 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2152777777777778, + "acc_stderr": 0.03437079344106134, + "acc_norm": 0.2152777777777778, + "acc_norm_stderr": 0.03437079344106134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.32947976878612717, + "acc_stderr": 0.0253052581318797, + "acc_norm": 0.32947976878612717, + "acc_norm_stderr": 0.0253052581318797 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3271604938271605, + "acc_stderr": 0.026105673861409825, + "acc_norm": 0.3271604938271605, + "acc_norm_stderr": 0.026105673861409825 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.27461139896373055, + "acc_stderr": 0.03221024508041153, + "acc_norm": 0.27461139896373055, + "acc_norm_stderr": 0.03221024508041153 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3798165137614679, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.3798165137614679, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.03619604524124249, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.03619604524124249 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.027582811415159614, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.027582811415159614 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3884297520661157, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.3884297520661157, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.018120224251484577, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.018120224251484577 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936484, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936484 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.02962466358115969, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.02962466358115969 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35864978902953587, + "acc_stderr": 0.031219569445301833, + "acc_norm": 0.35864978902953587, + "acc_norm_stderr": 0.031219569445301833 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2900912646675359, + "acc_stderr": 0.011590375554733096, + "acc_norm": 0.2900912646675359, + "acc_norm_stderr": 0.011590375554733096 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2696078431372549, + "acc_stderr": 0.03114557065948678, + "acc_norm": 0.2696078431372549, + "acc_norm_stderr": 0.03114557065948678 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.014761945174862677, + "mc2": 0.3706017104903605, + "mc2_stderr": 0.014735026291520032 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.22077922077922077, + "acc_stderr": 0.01426015280354004, + "acc_norm": 0.30932703659976385, + "acc_norm_stderr": 0.015891320505520893 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shangrilar/llama2-ko-7b-kullm-base", + "model_sha": "b7db1fa5f45f178d4e98ac52ece14064ded1b7c0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-7b-ko-dpo-v1/result_2024-01-08 02:05:13.json b/shleeeee/mistral-7b-ko-dpo-v1/result_2024-01-08 02:05:13.json new file mode 100644 index 0000000000000000000000000000000000000000..65a53a310ae10e23da24063fe918363a880d878a --- /dev/null +++ b/shleeeee/mistral-7b-ko-dpo-v1/result_2024-01-08 02:05:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145683, + "acc_norm": 0.3856655290102389, + "acc_norm_stderr": 0.014224250973257175 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3761202947619996, + "acc_stderr": 0.004834207964061322, + "acc_norm": 0.4927305317665804, + "acc_norm_stderr": 0.0049892540118957615 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4521072796934866, + "acc_stderr": 0.017797751493865623, + "acc_norm": 0.4521072796934866, + "acc_norm_stderr": 0.017797751493865623 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255099, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255099 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03242225027115006, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03242225027115006 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986483, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.03050329201334259, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.03050329201334259 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.02479606060269995, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.02479606060269995 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5115606936416185, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.5115606936416185, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749255, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138286, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138286 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635464, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635464 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3937908496732026, + "acc_stderr": 0.01976621199107307, + "acc_norm": 0.3937908496732026, + "acc_norm_stderr": 0.01976621199107307 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03372343271653063, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03372343271653063 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3307262569832402, + "acc_stderr": 0.01573502625896612, + "acc_norm": 0.3307262569832402, + "acc_norm_stderr": 0.01573502625896612 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.029674288281311183, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.029674288281311183 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3409387222946545, + "acc_stderr": 0.01210681720306721, + "acc_norm": 0.3409387222946545, + "acc_norm_stderr": 0.01210681720306721 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3023255813953488, + "mc1_stderr": 0.016077509266133033, + "mc2": 0.47846810620077734, + "mc2_stderr": 0.015501031302441884 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5076741440377804, + "acc_stderr": 0.017188329219654273, + "acc_norm": 0.5430932703659976, + "acc_norm_stderr": 0.017126389093086777 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-7b-ko-dpo-v1", + "model_sha": "4c29bb64537425577d752bfe6ddec247935b255e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-7b-ko-v1/result_2023-12-27 04:37:48.json b/shleeeee/mistral-7b-ko-v1/result_2023-12-27 04:37:48.json new file mode 100644 index 0000000000000000000000000000000000000000..1740e0b4c2ea1e693cea657509338a4a7e12b9dd --- /dev/null +++ b/shleeeee/mistral-7b-ko-v1/result_2023-12-27 04:37:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30716723549488056, + "acc_stderr": 0.013481034054980945, + "acc_norm": 0.3395904436860068, + "acc_norm_stderr": 0.01383903976282016 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3599880501892053, + "acc_stderr": 0.0047901553709934494, + "acc_norm": 0.4552877912766381, + "acc_norm_stderr": 0.004969790407117545 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.038268824176603704, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.038268824176603704 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.017570705239256537, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.017570705239256537 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.03141082197596241, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.03141082197596241 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4212218649517685, + "acc_stderr": 0.028043399858210635, + "acc_norm": 0.4212218649517685, + "acc_norm_stderr": 0.028043399858210635 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.033141902221106564, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.033141902221106564 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2824427480916031, + "acc_stderr": 0.03948406125768362, + "acc_norm": 0.2824427480916031, + "acc_norm_stderr": 0.03948406125768362 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4696969696969697, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.4696969696969697, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42016806722689076, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.42016806722689076, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.37948717948717947, + "acc_stderr": 0.024603626924097413, + "acc_norm": 0.37948717948717947, + "acc_norm_stderr": 0.024603626924097413 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.03413963805906235, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.03413963805906235 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36774193548387096, + "acc_stderr": 0.02743086657997347, + "acc_norm": 0.36774193548387096, + "acc_norm_stderr": 0.02743086657997347 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.02974504857267407, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.02974504857267407 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.03028500925900981, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.03028500925900981 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113114, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255169, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255169 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.035281314729336065, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.035281314729336065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.024278568024307695, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.024278568024307695 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379424, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379424 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3908256880733945, + "acc_stderr": 0.02092005834611107, + "acc_norm": 0.3908256880733945, + "acc_norm_stderr": 0.02092005834611107 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.028384256704883034, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.028384256704883034 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536671, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536671 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.375, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.375, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3464052287581699, + "acc_stderr": 0.01924978569171721, + "acc_norm": 0.3464052287581699, + "acc_norm_stderr": 0.01924978569171721 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764377, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764377 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3016759776536313, + "acc_stderr": 0.015350767572220285, + "acc_norm": 0.3016759776536313, + "acc_norm_stderr": 0.015350767572220285 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.02972215209928006, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.02972215209928006 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.03106721126287249, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.03106721126287249 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.032419206846933335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.032419206846933335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.012134433741002574, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.012134433741002574 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28151774785801714, + "mc1_stderr": 0.01574402724825605, + "mc2": 0.4682545459171819, + "mc2_stderr": 0.015428736721872028 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42621015348288077, + "acc_stderr": 0.017002122609489263, + "acc_norm": 0.5100354191263282, + "acc_norm_stderr": 0.017186891286894074 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-7b-ko-v1", + "model_sha": "7fade3acf30fa50ff8ae5e11f85fdb6abad37f0b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-7b-wiki/result_2023-11-28 13:22:58.json b/shleeeee/mistral-7b-wiki/result_2023-11-28 13:22:58.json new file mode 100644 index 0000000000000000000000000000000000000000..c1b027408930db12b200ef2f2efd99a08cc261e8 --- /dev/null +++ b/shleeeee/mistral-7b-wiki/result_2023-11-28 13:22:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32764505119453924, + "acc_stderr": 0.01371584794071934, + "acc_norm": 0.363481228668942, + "acc_norm_stderr": 0.014056207319068283 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36516630153355906, + "acc_stderr": 0.00480492760877313, + "acc_norm": 0.4706233817964549, + "acc_norm_stderr": 0.004981161746388225 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4648786717752235, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.4648786717752235, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.44358974358974357, + "acc_stderr": 0.025189149894764194, + "acc_norm": 0.44358974358974357, + "acc_norm_stderr": 0.025189149894764194 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437056, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437056 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.03502544650845872, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.03502544650845872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.02832774309156106, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.02832774309156106 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.688034188034188, + "acc_stderr": 0.03035152732334493, + "acc_norm": 0.688034188034188, + "acc_norm_stderr": 0.03035152732334493 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39622641509433965, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.39622641509433965, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3439153439153439, + "acc_stderr": 0.024464426625596426, + "acc_norm": 0.3439153439153439, + "acc_norm_stderr": 0.024464426625596426 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.02690290045866664, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.02690290045866664 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668787, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668787 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5100917431192661, + "acc_stderr": 0.021432956203453316, + "acc_norm": 0.5100917431192661, + "acc_norm_stderr": 0.021432956203453316 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626567, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626567 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39052287581699346, + "acc_stderr": 0.019737008998094604, + "acc_norm": 0.39052287581699346, + "acc_norm_stderr": 0.019737008998094604 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.033812000056435254, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.033812000056435254 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.32737430167597764, + "acc_stderr": 0.015694238967737386, + "acc_norm": 0.32737430167597764, + "acc_norm_stderr": 0.015694238967737386 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225418, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225418 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.03197694118713673, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.03197694118713673 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5358649789029536, + "acc_stderr": 0.03246338898055659, + "acc_norm": 0.5358649789029536, + "acc_norm_stderr": 0.03246338898055659 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3246414602346806, + "acc_stderr": 0.011959089388530027, + "acc_norm": 0.3246414602346806, + "acc_norm_stderr": 0.011959089388530027 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.0154610276272536, + "mc2": 0.4510146040568402, + "mc2_stderr": 0.015548731962691761 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46989374262101535, + "acc_stderr": 0.01715916359017022, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.017122829143292648 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-7b-wiki", + "model_sha": "2f4fcb2ee0756dd46308e60f0a0791caa9c71b75", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-7b-tech/result_2023-11-29 15:45:35.json b/shleeeee/mistral-ko-7b-tech/result_2023-11-29 15:45:35.json new file mode 100644 index 0000000000000000000000000000000000000000..35e03c27b31657274b97c2ee0445b5381f6fba38 --- /dev/null +++ b/shleeeee/mistral-ko-7b-tech/result_2023-11-29 15:45:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3199658703071672, + "acc_stderr": 0.013631345807016196, + "acc_norm": 0.3771331058020478, + "acc_norm_stderr": 0.014163366896192593 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36745668193586933, + "acc_stderr": 0.004811269975450612, + "acc_norm": 0.47829117705636326, + "acc_norm_stderr": 0.004985076094464756 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458935, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44189016602809705, + "acc_stderr": 0.017758800534214414, + "acc_norm": 0.44189016602809705, + "acc_norm_stderr": 0.017758800534214414 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863526, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863526 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.042207736591714534, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.042207736591714534 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115007, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240627, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240627 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.03502544650845872, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.03502544650845872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4161290322580645, + "acc_stderr": 0.02804098138076155, + "acc_norm": 0.4161290322580645, + "acc_norm_stderr": 0.02804098138076155 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.02920254015343118, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.02920254015343118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228405, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228405 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.03419832608176007, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.03419832608176007 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3306878306878307, + "acc_stderr": 0.024229965298425082, + "acc_norm": 0.3306878306878307, + "acc_norm_stderr": 0.024229965298425082 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.037738099906869334, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.037738099906869334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353928, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353928 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214334, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142635, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142635 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.380718954248366, + "acc_stderr": 0.019643801557924803, + "acc_norm": 0.380718954248366, + "acc_norm_stderr": 0.019643801557924803 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611327, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611327 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053756, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053756 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3329608938547486, + "acc_stderr": 0.015761716178397563, + "acc_norm": 0.3329608938547486, + "acc_norm_stderr": 0.015761716178397563 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280065, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280065 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33833116036505867, + "acc_stderr": 0.012084265626344215, + "acc_norm": 0.33833116036505867, + "acc_norm_stderr": 0.012084265626344215 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.03393388584958404, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.03393388584958404 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.03851716319398396, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.03851716319398396 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237274, + "mc2": 0.43089635616262106, + "mc2_stderr": 0.015490661650732165 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4675324675324675, + "acc_stderr": 0.017154073716682865, + "acc_norm": 0.5100354191263282, + "acc_norm_stderr": 0.017186891286894063 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-7b-tech", + "model_sha": "5d238deeb4e026361623067f5ee59a89699a4f66", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-7b-wiki-neft/result_2023-11-29 04:54:52.json b/shleeeee/mistral-ko-7b-wiki-neft/result_2023-11-29 04:54:52.json new file mode 100644 index 0000000000000000000000000000000000000000..c21cc04c0b03d93190f3a7996f0213d54019f307 --- /dev/null +++ b/shleeeee/mistral-ko-7b-wiki-neft/result_2023-11-29 04:54:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27559726962457337, + "acc_stderr": 0.013057169655761838, + "acc_norm": 0.3302047781569966, + "acc_norm_stderr": 0.013743085603760426 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33897629954192393, + "acc_stderr": 0.004723943549005987, + "acc_norm": 0.410973909579765, + "acc_norm_stderr": 0.004910049928688087 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4093567251461988, + "acc_stderr": 0.037712831076265434, + "acc_norm": 0.4093567251461988, + "acc_norm_stderr": 0.037712831076265434 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4174757281553398, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.4174757281553398, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4163473818646232, + "acc_stderr": 0.017627948030430298, + "acc_norm": 0.4163473818646232, + "acc_norm_stderr": 0.017627948030430298 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.028256660723360187, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.028256660723360187 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3004484304932735, + "acc_stderr": 0.03076935200822915, + "acc_norm": 0.3004484304932735, + "acc_norm_stderr": 0.03076935200822915 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.37373737373737376, + "acc_stderr": 0.03446897738659333, + "acc_norm": 0.37373737373737376, + "acc_norm_stderr": 0.03446897738659333 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938145, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938145 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3580645161290323, + "acc_stderr": 0.027273890594300645, + "acc_norm": 0.3580645161290323, + "acc_norm_stderr": 0.027273890594300645 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651047, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651047 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.35471698113207545, + "acc_stderr": 0.029445175328199593, + "acc_norm": 0.35471698113207545, + "acc_norm_stderr": 0.029445175328199593 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.02831753349606648, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.02831753349606648 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.035197027175769155, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.035197027175769155 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0356760379963917, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0356760379963917 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3201058201058201, + "acc_stderr": 0.0240268463928735, + "acc_norm": 0.3201058201058201, + "acc_norm_stderr": 0.0240268463928735 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.42485549132947975, + "acc_stderr": 0.02661335084026174, + "acc_norm": 0.42485549132947975, + "acc_norm_stderr": 0.02661335084026174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607718, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607718 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537317, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3889908256880734, + "acc_stderr": 0.02090230088739286, + "acc_norm": 0.3889908256880734, + "acc_norm_stderr": 0.02090230088739286 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.04104947269903394, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.04104947269903394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849726, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849726 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.019117213911495144, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.019117213911495144 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.31620111731843575, + "acc_stderr": 0.015551673652172552, + "acc_norm": 0.31620111731843575, + "acc_norm_stderr": 0.015551673652172552 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396563, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396563 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163907, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163907 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.45569620253164556, + "acc_stderr": 0.03241920684693335, + "acc_norm": 0.45569620253164556, + "acc_norm_stderr": 0.03241920684693335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30182529335071706, + "acc_stderr": 0.01172435051810589, + "acc_norm": 0.30182529335071706, + "acc_norm_stderr": 0.01172435051810589 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4121212121212121, + "acc_stderr": 0.03843566993588718, + "acc_norm": 0.4121212121212121, + "acc_norm_stderr": 0.03843566993588718 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2423500611995104, + "mc1_stderr": 0.015000674373570345, + "mc2": 0.410345331144556, + "mc2_stderr": 0.015747686091796973 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36835891381345925, + "acc_stderr": 0.016583858982639074, + "acc_norm": 0.4380165289256198, + "acc_norm_stderr": 0.017057753702160287 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-7b-wiki-neft", + "model_sha": "5e22bcb5df31050b2bca6d82b593c87b7fffe462", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-OpenOrca-2000/result_2023-12-04 13:24:07.json b/shleeeee/mistral-ko-OpenOrca-2000/result_2023-12-04 13:24:07.json new file mode 100644 index 0000000000000000000000000000000000000000..f59617995f869323c57a58cafcedb932a21623d1 --- /dev/null +++ b/shleeeee/mistral-ko-OpenOrca-2000/result_2023-12-04 13:24:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3225255972696246, + "acc_stderr": 0.013659980894277371, + "acc_norm": 0.3720136518771331, + "acc_norm_stderr": 0.014124597881844454 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37094204341764586, + "acc_stderr": 0.004820697457420417, + "acc_norm": 0.47769368651663013, + "acc_norm_stderr": 0.0049848133910162075 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.44572158365261816, + "acc_stderr": 0.017774297282479503, + "acc_norm": 0.44572158365261816, + "acc_norm_stderr": 0.017774297282479503 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.031410821975962386, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.031410821975962386 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42443729903536975, + "acc_stderr": 0.028071928247946208, + "acc_norm": 0.42443729903536975, + "acc_norm_stderr": 0.028071928247946208 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4260089686098655, + "acc_stderr": 0.0331883328621728, + "acc_norm": 0.4260089686098655, + "acc_norm_stderr": 0.0331883328621728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077636, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077636 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.44537815126050423, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.44537815126050423, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.02515826601686855, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.02515826601686855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431187, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431187 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286102, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286102 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137588, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137588 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.040329990539607195, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.040329990539607195 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.027586006221607718, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.027586006221607718 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.03597524411734578, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.03597524411734578 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46055045871559636, + "acc_stderr": 0.021370494609995096, + "acc_norm": 0.46055045871559636, + "acc_norm_stderr": 0.021370494609995096 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.040260970832965565, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.040260970832965565 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529672, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529672 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125146, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.36574074074074076, + "acc_stderr": 0.032847388576472056, + "acc_norm": 0.36574074074074076, + "acc_norm_stderr": 0.032847388576472056 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2346368715083799, + "acc_stderr": 0.014173044098303673, + "acc_norm": 0.2346368715083799, + "acc_norm_stderr": 0.014173044098303673 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.02993534270787775, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.02993534270787775 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3551020408163265, + "acc_stderr": 0.03063565515038764, + "acc_norm": 0.3551020408163265, + "acc_norm_stderr": 0.03063565515038764 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3239895697522816, + "acc_stderr": 0.011952840809646561, + "acc_norm": 0.3239895697522816, + "acc_norm_stderr": 0.011952840809646561 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.034341311647191286, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.034341311647191286 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.38181818181818183, + "acc_stderr": 0.03793713171165634, + "acc_norm": 0.38181818181818183, + "acc_norm_stderr": 0.03793713171165634 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.016058999026100623, + "mc2": 0.48435280097322475, + "mc2_stderr": 0.015611046017023626 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4309327036599764, + "acc_stderr": 0.017025558196043133, + "acc_norm": 0.4722550177095632, + "acc_norm_stderr": 0.017163867979456005 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-OpenOrca-2000", + "model_sha": "ca1dfe364d0d30557b5fec19ba988b3a0aecc443", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-OpenOrca-Platypus-v1/result_2023-12-07 22:05:15.json b/shleeeee/mistral-ko-OpenOrca-Platypus-v1/result_2023-12-07 22:05:15.json new file mode 100644 index 0000000000000000000000000000000000000000..a2a2716f61e24fc0ca47db29e2c9deb27c6e134a --- /dev/null +++ b/shleeeee/mistral-ko-OpenOrca-Platypus-v1/result_2023-12-07 22:05:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3319112627986348, + "acc_stderr": 0.013760988200880533, + "acc_norm": 0.3839590443686007, + "acc_norm_stderr": 0.014212444980651894 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3668591913961362, + "acc_stderr": 0.004809626723626832, + "acc_norm": 0.4765982871937861, + "acc_norm_stderr": 0.00498431320579144 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45849297573435505, + "acc_stderr": 0.01781824860346556, + "acc_norm": 0.45849297573435505, + "acc_norm_stderr": 0.01781824860346556 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984545, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984545 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4080717488789238, + "acc_stderr": 0.03298574607842821, + "acc_norm": 0.4080717488789238, + "acc_norm_stderr": 0.03298574607842821 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5798319327731093, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.5798319327731093, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.02860595370200425, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.02860595370200425 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.030656748696739428, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.030656748696739428 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113115, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113115 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02487081525105709, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02487081525105709 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668784, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668784 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384486, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384486 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48990825688073397, + "acc_stderr": 0.02143295620345332, + "acc_norm": 0.48990825688073397, + "acc_norm_stderr": 0.02143295620345332 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142624, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142624 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.020017629214213087, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.020017629214213087 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861131, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861131 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.03324708911809117, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.03324708911809117 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3128491620111732, + "acc_stderr": 0.015506892594647272, + "acc_norm": 0.3128491620111732, + "acc_norm_stderr": 0.015506892594647272 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464622, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464622 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3285528031290743, + "acc_stderr": 0.011996027247502932, + "acc_norm": 0.3285528031290743, + "acc_norm_stderr": 0.011996027247502932 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.45820405883067095, + "mc2_stderr": 0.015482841809930594 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5088547815820543, + "acc_stderr": 0.017187658199336743, + "acc_norm": 0.5655253837072018, + "acc_norm_stderr": 0.01704209862082492 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-OpenOrca-Platypus-v1", + "model_sha": "ed7028364195063e6e55255259908bd4a5e46b7c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-OpenOrca-Platypus-v2/result_2023-12-19 08:08:58.json b/shleeeee/mistral-ko-OpenOrca-Platypus-v2/result_2023-12-19 08:08:58.json new file mode 100644 index 0000000000000000000000000000000000000000..91a9c263e12c0db28129cac48c9f1bef2eb558bd --- /dev/null +++ b/shleeeee/mistral-ko-OpenOrca-Platypus-v2/result_2023-12-19 08:08:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3293515358361775, + "acc_stderr": 0.013734057652635476, + "acc_norm": 0.37627986348122866, + "acc_norm_stderr": 0.014157022555407161 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36735710017924716, + "acc_stderr": 0.004810996652324741, + "acc_norm": 0.47739494124676357, + "acc_norm_stderr": 0.004984679359375628 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4521072796934866, + "acc_stderr": 0.017797751493865623, + "acc_norm": 0.4521072796934866, + "acc_norm_stderr": 0.017797751493865623 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3659574468085106, + "acc_stderr": 0.0314895582974553, + "acc_norm": 0.3659574468085106, + "acc_norm_stderr": 0.0314895582974553 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330315, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330315 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.043482080516448585, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.043482080516448585 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.045766654032077615, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.045766654032077615 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5504201680672269, + "acc_stderr": 0.03231293497137707, + "acc_norm": 0.5504201680672269, + "acc_norm_stderr": 0.03231293497137707 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4358974358974359, + "acc_stderr": 0.025141801511177498, + "acc_norm": 0.4358974358974359, + "acc_norm_stderr": 0.025141801511177498 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.028327743091561056, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.028327743091561056 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.028120966503914394, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.028120966503914394 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.037242495958177295, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.037242495958177295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.02455229220934265, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.02455229220934265 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03942082639927213, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03942082639927213 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4972477064220184, + "acc_stderr": 0.02143699835976532, + "acc_norm": 0.4972477064220184, + "acc_norm_stderr": 0.02143699835976532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.04190596438871136, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.04190596438871136 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.019922115682786692, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.019922115682786692 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03362277436608043, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03362277436608043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2916201117318436, + "acc_stderr": 0.015201032512520427, + "acc_norm": 0.2916201117318436, + "acc_norm_stderr": 0.015201032512520427 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464626, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464626 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131775, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131775 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32985658409387225, + "acc_stderr": 0.012008129938540483, + "acc_norm": 0.32985658409387225, + "acc_norm_stderr": 0.012008129938540483 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.015764770836777315, + "mc2": 0.4594938797803625, + "mc2_stderr": 0.01549038818993178 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5289256198347108, + "acc_stderr": 0.017161563949916345, + "acc_norm": 0.5702479338842975, + "acc_norm_stderr": 0.017019847535972205 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-OpenOrca-Platypus-v2", + "model_sha": "b1035824a7a1e57c0b5814912599a4165dd39138", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-OpenOrca-wiki-v1/result_2023-12-05 04:52:36.json b/shleeeee/mistral-ko-OpenOrca-wiki-v1/result_2023-12-05 04:52:36.json new file mode 100644 index 0000000000000000000000000000000000000000..2153afc05b1e3e76fa9e39e0e206d7755fd3a02e --- /dev/null +++ b/shleeeee/mistral-ko-OpenOrca-wiki-v1/result_2023-12-05 04:52:36.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31143344709897613, + "acc_stderr": 0.013532472099850949, + "acc_norm": 0.363481228668942, + "acc_norm_stderr": 0.014056207319068283 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3685520812587134, + "acc_stderr": 0.004814261966376847, + "acc_norm": 0.47520414260107546, + "acc_norm_stderr": 0.004983641854351151 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47381864623243936, + "acc_stderr": 0.017855434554041982, + "acc_norm": 0.47381864623243936, + "acc_norm_stderr": 0.017855434554041982 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.030976692998534432, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.030976692998534432 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4115755627009646, + "acc_stderr": 0.02795048149440127, + "acc_norm": 0.4115755627009646, + "acc_norm_stderr": 0.02795048149440127 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.03314190222110657, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.03314190222110657 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.33587786259541985, + "acc_stderr": 0.04142313771996664, + "acc_norm": 0.33587786259541985, + "acc_norm_stderr": 0.04142313771996664 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5050505050505051, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.5050505050505051, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.0407032901370707, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.0407032901370707 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4307692307692308, + "acc_stderr": 0.025106820660539743, + "acc_norm": 0.4307692307692308, + "acc_norm_stderr": 0.025106820660539743 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.027906150826041143, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.027906150826041143 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.02999695185834948, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.02999695185834948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776296, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776296 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.02931820364520686, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.02931820364520686 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5074626865671642, + "acc_stderr": 0.035351400842767194, + "acc_norm": 0.5074626865671642, + "acc_norm_stderr": 0.035351400842767194 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.35260115606936415, + "acc_stderr": 0.03643037168958548, + "acc_norm": 0.35260115606936415, + "acc_norm_stderr": 0.03643037168958548 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602842, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602842 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03981240543717861, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03981240543717861 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45375722543352603, + "acc_stderr": 0.02680372058320619, + "acc_norm": 0.45375722543352603, + "acc_norm_stderr": 0.02680372058320619 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160667, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537315, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537315 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44770642201834865, + "acc_stderr": 0.021319754962425455, + "acc_norm": 0.44770642201834865, + "acc_norm_stderr": 0.021319754962425455 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574925, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574925 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.028036092273891765, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.028036092273891765 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.019524316744866346, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.019524316744866346 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176851, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176851 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22905027932960895, + "acc_stderr": 0.014054314935614556, + "acc_norm": 0.22905027932960895, + "acc_norm_stderr": 0.014054314935614556 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3125, + "acc_stderr": 0.02815637344037142, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.02815637344037142 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3673469387755102, + "acc_stderr": 0.030862144921087555, + "acc_norm": 0.3673469387755102, + "acc_norm_stderr": 0.030862144921087555 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.540084388185654, + "acc_stderr": 0.03244246810187913, + "acc_norm": 0.540084388185654, + "acc_norm_stderr": 0.03244246810187913 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31486310299869624, + "acc_stderr": 0.011862561755715944, + "acc_norm": 0.31486310299869624, + "acc_norm_stderr": 0.011862561755715944 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353383, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353383 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.42424242424242425, + "acc_stderr": 0.038592681420702615, + "acc_norm": 0.42424242424242425, + "acc_norm_stderr": 0.038592681420702615 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237043, + "mc2": 0.43990633213087843, + "mc2_stderr": 0.01551429694556166 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43919716646989376, + "acc_stderr": 0.0170627757447807, + "acc_norm": 0.4911452184179457, + "acc_norm_stderr": 0.017187658199336736 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-OpenOrca-wiki-v1", + "model_sha": "e7c8fea7112378edf396bb0753ac5fc3c20b0816", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-exo-mrc-v1/result_2023-12-11 22:03:07.json b/shleeeee/mistral-ko-exo-mrc-v1/result_2023-12-11 22:03:07.json new file mode 100644 index 0000000000000000000000000000000000000000..e34e5f7a2b647db1648c071f64eafccee3230b71 --- /dev/null +++ b/shleeeee/mistral-ko-exo-mrc-v1/result_2023-12-11 22:03:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.013621696119173302, + "acc_norm": 0.37372013651877134, + "acc_norm_stderr": 0.014137708601759086 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3632742481577375, + "acc_stderr": 0.004799599840397375, + "acc_norm": 0.4675363473411671, + "acc_norm_stderr": 0.004979252954977317 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4559386973180077, + "acc_stderr": 0.017810403925435366, + "acc_norm": 0.4559386973180077, + "acc_norm_stderr": 0.017810403925435366 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400352, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400352 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449296, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449296 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03242225027115007, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03242225027115007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954953, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954953 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45806451612903226, + "acc_stderr": 0.028343787250540636, + "acc_norm": 0.45806451612903226, + "acc_norm_stderr": 0.028343787250540636 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749465, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749465 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066482, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066482 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983053, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983053 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.03915857291436971, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.03915857291436971 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470867, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470867 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.45871559633027525, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.45871559633027525, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.019691459052354147, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.019691459052354147 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219588, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219588 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.015131608849963755, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.015131608849963755 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163906, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.03233532777533484, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.03233532777533484 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32529335071707954, + "acc_stderr": 0.01196531153657153, + "acc_norm": 0.32529335071707954, + "acc_norm_stderr": 0.01196531153657153 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.0154610276272536, + "mc2": 0.44879669499276337, + "mc2_stderr": 0.015443203581643984 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41204250295159384, + "acc_stderr": 0.016922276738528363, + "acc_norm": 0.4805194805194805, + "acc_norm_stderr": 0.01717730199234255 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-exo-mrc-v1", + "model_sha": "4cfebcf52a610101df1d3dad07fae8fe07c6c5b3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-exo-wiki-quiz-v1/result_2023-12-06 03:58:43.json b/shleeeee/mistral-ko-exo-wiki-quiz-v1/result_2023-12-06 03:58:43.json new file mode 100644 index 0000000000000000000000000000000000000000..c3fb25545bac9dbf6a8a5d0d058166d6b254b387 --- /dev/null +++ b/shleeeee/mistral-ko-exo-wiki-quiz-v1/result_2023-12-06 03:58:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.31313993174061433, + "acc_stderr": 0.013552671543623504, + "acc_norm": 0.3626279863481229, + "acc_norm_stderr": 0.014049106564955012 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3604859589723163, + "acc_stderr": 0.004791601975612767, + "acc_norm": 0.46016729735112527, + "acc_norm_stderr": 0.004973922192982237 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4559386973180077, + "acc_stderr": 0.01781040392543537, + "acc_norm": 0.4559386973180077, + "acc_norm_stderr": 0.01781040392543537 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262973, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262973 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.03555804051763929, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.03555804051763929 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.02524277098712617, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.02524277098712617 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.034953345821629324, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.034953345821629324 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.02843453315268184, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.02843453315268184 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.029872577708891183, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.029872577708891183 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4075471698113208, + "acc_stderr": 0.0302422338008545, + "acc_norm": 0.4075471698113208, + "acc_norm_stderr": 0.0302422338008545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.03516184772952167, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.03516184772952167 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.036563436533531585, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.036563436533531585 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.02467786284133278, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.02467786284133278 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357334, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357334 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5346820809248555, + "acc_stderr": 0.02685425792825889, + "acc_norm": 0.5346820809248555, + "acc_norm_stderr": 0.02685425792825889 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138936, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138936 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.027513747284379414, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.027513747284379414 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47668393782383417, + "acc_stderr": 0.03604513672442206, + "acc_norm": 0.47668393782383417, + "acc_norm_stderr": 0.03604513672442206 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556054, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556054 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296558, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296558 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.01978046595477752, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.01978046595477752 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963775, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963775 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.033448873829978666, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.033448873829978666 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3463687150837989, + "acc_stderr": 0.01591354678402012, + "acc_norm": 0.3463687150837989, + "acc_norm_stderr": 0.01591354678402012 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.029289413409403192, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.029289413409403192 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46122448979591835, + "acc_stderr": 0.03191282052669277, + "acc_norm": 0.46122448979591835, + "acc_norm_stderr": 0.03191282052669277 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.032419206846933335, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.032419206846933335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3246414602346806, + "acc_stderr": 0.011959089388530027, + "acc_norm": 0.3246414602346806, + "acc_norm_stderr": 0.011959089388530027 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524753, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524753 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476199, + "mc2": 0.4419757231981567, + "mc2_stderr": 0.01567221917412918 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42266824085005905, + "acc_stderr": 0.016983506079577604, + "acc_norm": 0.4982290436835891, + "acc_norm_stderr": 0.017190246276231863 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-exo-wiki-quiz-v1", + "model_sha": "ade18612c96f02d3524e0a318caf470821f3067b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-openorca-platypus-1epoch/result_2023-12-21 05:12:26.json b/shleeeee/mistral-ko-openorca-platypus-1epoch/result_2023-12-21 05:12:26.json new file mode 100644 index 0000000000000000000000000000000000000000..722549f6cb3c7bddca8f286f51af74988f13402b --- /dev/null +++ b/shleeeee/mistral-ko-openorca-platypus-1epoch/result_2023-12-21 05:12:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.302901023890785, + "acc_stderr": 0.013428241573185349, + "acc_norm": 0.3677474402730375, + "acc_norm_stderr": 0.014090995618168478 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3569010157339175, + "acc_stderr": 0.004781061390873917, + "acc_norm": 0.44284007169886475, + "acc_norm_stderr": 0.004957068377516513 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47509578544061304, + "acc_stderr": 0.01785777070490102, + "acc_norm": 0.47509578544061304, + "acc_norm_stderr": 0.01785777070490102 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.0282908690541976, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.0282908690541976 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449296, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449296 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.043482080516448585, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.043482080516448585 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929777, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5630252100840336, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.5630252100840336, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736125, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736125 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028593, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.03543304234389985, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.03543304234389985 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.03419832608176007, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.03419832608176007 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.037242495958177295, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.037242495958177295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303125, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303125 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49079754601226994, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.49079754601226994, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02764847787741332, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02764847787741332 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.044045561573747685, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.044045561573747685 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214338, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576066, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4276315789473684, + "acc_stderr": 0.04026097083296559, + "acc_norm": 0.4276315789473684, + "acc_norm_stderr": 0.04026097083296559 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449838, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449838 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611306, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611306 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997866, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997866 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3329608938547486, + "acc_stderr": 0.01576171617839756, + "acc_norm": 0.3329608938547486, + "acc_norm_stderr": 0.01576171617839756 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.031843998738112236, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.031843998738112236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3194263363754889, + "acc_stderr": 0.011908357176756158, + "acc_norm": 0.3194263363754889, + "acc_norm_stderr": 0.011908357176756158 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.03872592983524753, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.03872592983524753 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.47712757229255764, + "mc2_stderr": 0.015671925843446344 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5017709563164109, + "acc_stderr": 0.017190246276231853, + "acc_norm": 0.5489964580873672, + "acc_norm_stderr": 0.017107618859549346 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-openorca-platypus-1epoch", + "model_sha": "7252aa91a9c671044b2c871ea90040e60c2fd27a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/shleeeee/mistral-ko-tech-science-v1/result_2023-12-12 22:56:56.json b/shleeeee/mistral-ko-tech-science-v1/result_2023-12-12 22:56:56.json new file mode 100644 index 0000000000000000000000000000000000000000..8960d6bc6bcee71803ee7afb9d95bbce49aa77e4 --- /dev/null +++ b/shleeeee/mistral-ko-tech-science-v1/result_2023-12-12 22:56:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.318259385665529, + "acc_stderr": 0.013611993916971453, + "acc_norm": 0.3728668941979522, + "acc_norm_stderr": 0.014131176760131167 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3648675562636925, + "acc_stderr": 0.004804091708812552, + "acc_norm": 0.4733120892252539, + "acc_norm_stderr": 0.00498266845211894 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5146198830409356, + "acc_stderr": 0.038331852752130254, + "acc_norm": 0.5146198830409356, + "acc_norm_stderr": 0.038331852752130254 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45338441890166026, + "acc_stderr": 0.017802087135850294, + "acc_norm": 0.45338441890166026, + "acc_norm_stderr": 0.017802087135850294 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079022, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079022 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006937, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.02533900301010653, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.02533900301010653 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419871, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419871 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778657, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778657 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.038990736873573344, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.038990736873573344 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.02758600622160773, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.02758600622160773 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44559585492227977, + "acc_stderr": 0.035870149860756595, + "acc_norm": 0.44559585492227977, + "acc_norm_stderr": 0.035870149860756595 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.044629175353369376, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.044629175353369376 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4935779816513762, + "acc_stderr": 0.021435554820013077, + "acc_norm": 0.4935779816513762, + "acc_norm_stderr": 0.021435554820013077 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.028568699752225882, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.028568699752225882 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.019922115682786682, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.019922115682786682 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.033448873829978666, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.033448873829978666 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.35195530726256985, + "acc_stderr": 0.01597266852368907, + "acc_norm": 0.35195530726256985, + "acc_norm_stderr": 0.01597266852368907 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312547, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312547 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.03198761546763126, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.03198761546763126 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.01204966898321493, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.01204966898321493 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015473, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015473 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087307, + "mc2": 0.45024219070187044, + "mc2_stderr": 0.015413860078049907 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4911452184179457, + "acc_stderr": 0.017187658199336736, + "acc_norm": 0.5395513577331759, + "acc_norm_stderr": 0.017136487626049846 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "shleeeee/mistral-ko-tech-science-v1", + "model_sha": "f2c72cef947305f1e867f572d66963209e281788", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sinjy1203/EEVE-Korean-Instruct-10.8B-v1.0-Grade-Retrieval/result_2024-06-05 11:54:52.json b/sinjy1203/EEVE-Korean-Instruct-10.8B-v1.0-Grade-Retrieval/result_2024-06-05 11:54:52.json new file mode 100644 index 0000000000000000000000000000000000000000..71ee9f64e58c1f57f07cd2073517bb4159d319d3 --- /dev/null +++ b/sinjy1203/EEVE-Korean-Instruct-10.8B-v1.0-Grade-Retrieval/result_2024-06-05 11:54:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5136518771331058, + "acc_stderr": 0.014605943429860947, + "acc_norm": 0.5554607508532423, + "acc_norm_stderr": 0.014521226405627079 + }, + "harness|ko_hellaswag|10": { + "acc": 0.48595897231627166, + "acc_stderr": 0.0049878135480190666, + "acc_norm": 0.6595299741087433, + "acc_norm_stderr": 0.004728988167338539 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.695906432748538, + "acc_stderr": 0.03528211258245233, + "acc_norm": 0.695906432748538, + "acc_norm_stderr": 0.03528211258245233 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7381864623243933, + "acc_stderr": 0.015720838678445252, + "acc_norm": 0.7381864623243933, + "acc_norm_stderr": 0.015720838678445252 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5191489361702127, + "acc_stderr": 0.0326620429906468, + "acc_norm": 0.5191489361702127, + "acc_norm_stderr": 0.0326620429906468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5421686746987951, + "acc_stderr": 0.038786267710023595, + "acc_norm": 0.5421686746987951, + "acc_norm_stderr": 0.038786267710023595 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6233183856502242, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.6233183856502242, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262973, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262973 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.029620227874790486, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.029620227874790486 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.031631458075523776, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.031631458075523776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5641025641025641, + "acc_stderr": 0.025141801511177488, + "acc_norm": 0.5641025641025641, + "acc_norm_stderr": 0.025141801511177488 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6516129032258065, + "acc_stderr": 0.02710482632810094, + "acc_norm": 0.6516129032258065, + "acc_norm_stderr": 0.02710482632810094 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.025372139671722933, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.025372139671722933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5547169811320755, + "acc_stderr": 0.030588052974270658, + "acc_norm": 0.5547169811320755, + "acc_norm_stderr": 0.030588052974270658 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652458, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652458 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.031343283582089536, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.031343283582089536 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.02513809138885112, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.02513809138885112 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5902777777777778, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.5902777777777778, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6416184971098265, + "acc_stderr": 0.025816756791584187, + "acc_norm": 0.6416184971098265, + "acc_norm_stderr": 0.025816756791584187 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.588957055214724, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.588957055214724, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6327160493827161, + "acc_stderr": 0.02682280175950789, + "acc_norm": 0.6327160493827161, + "acc_norm_stderr": 0.02682280175950789 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.772020725388601, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.772020725388601, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.04685473041907789, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.04685473041907789 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7431192660550459, + "acc_stderr": 0.018732492928342465, + "acc_norm": 0.7431192660550459, + "acc_norm_stderr": 0.018732492928342465 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.04390259265377561, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.04390259265377561 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6209150326797386, + "acc_stderr": 0.027780141207023334, + "acc_norm": 0.6209150326797386, + "acc_norm_stderr": 0.027780141207023334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6118421052631579, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.6118421052631579, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.020102583895887188, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.020102583895887188 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4148936170212766, + "acc_stderr": 0.0293922365846125, + "acc_norm": 0.4148936170212766, + "acc_norm_stderr": 0.0293922365846125 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.033922384053216174, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.033922384053216174 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.17206703910614526, + "acc_stderr": 0.012623438533220628, + "acc_norm": 0.17206703910614526, + "acc_norm_stderr": 0.012623438533220628 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768081, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768081 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5441176470588235, + "acc_stderr": 0.03025437257397671, + "acc_norm": 0.5441176470588235, + "acc_norm_stderr": 0.03025437257397671 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6775510204081633, + "acc_stderr": 0.02992310056368391, + "acc_norm": 0.6775510204081633, + "acc_norm_stderr": 0.02992310056368391 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7763713080168776, + "acc_stderr": 0.027123298205229966, + "acc_norm": 0.7763713080168776, + "acc_norm_stderr": 0.027123298205229966 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4152542372881356, + "acc_stderr": 0.012585471793400664, + "acc_norm": 0.4152542372881356, + "acc_norm_stderr": 0.012585471793400664 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7696078431372549, + "acc_stderr": 0.029554292605695066, + "acc_norm": 0.7696078431372549, + "acc_norm_stderr": 0.029554292605695066 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.34394124847001223, + "mc1_stderr": 0.01662908751427676, + "mc2": 0.4866029737522003, + "mc2_stderr": 0.015588084468493631 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5277449822904369, + "acc_stderr": 0.01716386797945602, + "acc_norm": 0.5407319952774499, + "acc_norm_stderr": 0.01713321827653767 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sinjy1203/EEVE-Korean-Instruct-10.8B-v1.0-Grade-Retrieval", + "model_sha": "0d8023af006339d9db546a87923e6c4cf7e077ce", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/skt/ko-gpt-trinity-1.2B-v0.5/result_2023-09-27 05:12:48.json b/skt/ko-gpt-trinity-1.2B-v0.5/result_2023-09-27 05:12:48.json new file mode 100644 index 0000000000000000000000000000000000000000..b9c8cc5ba2337301063c19ef97238d0de5ac81f6 --- /dev/null +++ b/skt/ko-gpt-trinity-1.2B-v0.5/result_2023-09-27 05:12:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21331058020477817, + "acc_stderr": 0.011970971742326334, + "acc_norm": 0.2687713310580205, + "acc_norm_stderr": 0.012955065963710686 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3132842063333997, + "acc_stderr": 0.004628809258483527, + "acc_norm": 0.3736307508464449, + "acc_norm_stderr": 0.004827786289074844 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2046783625730994, + "acc_stderr": 0.03094445977853321, + "acc_norm": 0.2046783625730994, + "acc_norm_stderr": 0.03094445977853321 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2669220945083014, + "acc_stderr": 0.01581845089477755, + "acc_norm": 0.2669220945083014, + "acc_norm_stderr": 0.01581845089477755 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03591444084196969, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03591444084196969 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.027678452578212373, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.027678452578212373 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2282958199356913, + "acc_stderr": 0.023839303311398195, + "acc_norm": 0.2282958199356913, + "acc_norm_stderr": 0.023839303311398195 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25757575757575757, + "acc_stderr": 0.03115626951964684, + "acc_norm": 0.25757575757575757, + "acc_norm_stderr": 0.03115626951964684 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.33613445378151263, + "acc_stderr": 0.030684737115135356, + "acc_norm": 0.33613445378151263, + "acc_norm_stderr": 0.030684737115135356 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3230769230769231, + "acc_stderr": 0.02371088850197057, + "acc_norm": 0.3230769230769231, + "acc_norm_stderr": 0.02371088850197057 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243838, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243838 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3054187192118227, + "acc_stderr": 0.03240661565868408, + "acc_norm": 0.3054187192118227, + "acc_norm_stderr": 0.03240661565868408 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3193548387096774, + "acc_stderr": 0.026522709674667768, + "acc_norm": 0.3193548387096774, + "acc_norm_stderr": 0.026522709674667768 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.23504273504273504, + "acc_stderr": 0.02777883590493544, + "acc_norm": 0.23504273504273504, + "acc_norm_stderr": 0.02777883590493544 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.21509433962264152, + "acc_stderr": 0.02528839450289137, + "acc_norm": 0.21509433962264152, + "acc_norm_stderr": 0.02528839450289137 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.19205298013245034, + "acc_stderr": 0.03216298420593612, + "acc_norm": 0.19205298013245034, + "acc_norm_stderr": 0.03216298420593612 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.1791907514450867, + "acc_stderr": 0.02924251305906329, + "acc_norm": 0.1791907514450867, + "acc_norm_stderr": 0.02924251305906329 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25132275132275134, + "acc_stderr": 0.022340482339643898, + "acc_norm": 0.25132275132275134, + "acc_norm_stderr": 0.022340482339643898 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165044, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165044 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25308641975308643, + "acc_stderr": 0.024191808600713002, + "acc_norm": 0.25308641975308643, + "acc_norm_stderr": 0.024191808600713002 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.042295258468165065, + "acc_norm": 0.23, + "acc_norm_stderr": 0.042295258468165065 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3471502590673575, + "acc_stderr": 0.03435696168361355, + "acc_norm": 0.3471502590673575, + "acc_norm_stderr": 0.03435696168361355 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22568807339449543, + "acc_stderr": 0.01792308766780305, + "acc_norm": 0.22568807339449543, + "acc_norm_stderr": 0.01792308766780305 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848877, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848877 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.02463004897982476, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.02463004897982476 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.35537190082644626, + "acc_stderr": 0.04369236326573981, + "acc_norm": 0.35537190082644626, + "acc_norm_stderr": 0.04369236326573981 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17105263157894737, + "acc_stderr": 0.030643607071677105, + "acc_norm": 0.17105263157894737, + "acc_norm_stderr": 0.030643607071677105 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2434640522875817, + "acc_stderr": 0.017362473762146623, + "acc_norm": 0.2434640522875817, + "acc_norm_stderr": 0.017362473762146623 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.17857142857142858, + "acc_stderr": 0.036352091215778065, + "acc_norm": 0.17857142857142858, + "acc_norm_stderr": 0.036352091215778065 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.01485499393801008, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.01485499393801008 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17551020408163265, + "acc_stderr": 0.02435280072297001, + "acc_norm": 0.17551020408163265, + "acc_norm_stderr": 0.02435280072297001 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2911392405063291, + "acc_stderr": 0.02957160106575337, + "acc_norm": 0.2911392405063291, + "acc_norm_stderr": 0.02957160106575337 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2405475880052151, + "acc_stderr": 0.010916406735478947, + "acc_norm": 0.2405475880052151, + "acc_norm_stderr": 0.010916406735478947 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.030587591351604246, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.030587591351604246 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.01517698502770768, + "mc2": 0.4268789482469243, + "mc2_stderr": 0.015138938072410749 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2833530106257379, + "acc_stderr": 0.015492852084597239, + "acc_norm": 0.39433293978748524, + "acc_norm_stderr": 0.016802090674893196 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "skt/ko-gpt-trinity-1.2B-v0.5", + "model_sha": "33f84c0da333d34533f0cfbe8f5972022d681e96", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/skt/kogpt2-base-v2/result_2023-10-04 13:20:51.json b/skt/kogpt2-base-v2/result_2023-10-04 13:20:51.json new file mode 100644 index 0000000000000000000000000000000000000000..81718d32826184ec3f9bbdd75749050420f89714 --- /dev/null +++ b/skt/kogpt2-base-v2/result_2023-10-04 13:20:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19197952218430034, + "acc_stderr": 0.011509598906598086, + "acc_norm": 0.23976109215017063, + "acc_norm_stderr": 0.012476304127453947 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2806213901613224, + "acc_stderr": 0.004483845735187827, + "acc_norm": 0.3103963353913563, + "acc_norm_stderr": 0.0046171032803720095 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28607918263090676, + "acc_stderr": 0.016160871405127526, + "acc_norm": 0.28607918263090676, + "acc_norm_stderr": 0.016160871405127526 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.03785714465066654, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.03785714465066654 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.24680851063829787, + "acc_stderr": 0.0281854413012341, + "acc_norm": 0.24680851063829787, + "acc_norm_stderr": 0.0281854413012341 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.28313253012048195, + "acc_stderr": 0.03507295431370518, + "acc_norm": 0.28313253012048195, + "acc_norm_stderr": 0.03507295431370518 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2797427652733119, + "acc_stderr": 0.025494259350694905, + "acc_norm": 0.2797427652733119, + "acc_norm_stderr": 0.025494259350694905 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3811659192825112, + "acc_stderr": 0.032596251184168264, + "acc_norm": 0.3811659192825112, + "acc_norm_stderr": 0.032596251184168264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.24427480916030533, + "acc_stderr": 0.037683359597287434, + "acc_norm": 0.24427480916030533, + "acc_norm_stderr": 0.037683359597287434 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.20202020202020202, + "acc_stderr": 0.02860620428922987, + "acc_norm": 0.20202020202020202, + "acc_norm_stderr": 0.02860620428922987 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.037245636197746325, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.037245636197746325 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3403361344537815, + "acc_stderr": 0.030778057422931666, + "acc_norm": 0.3403361344537815, + "acc_norm_stderr": 0.030778057422931666 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3564102564102564, + "acc_stderr": 0.024283140529467295, + "acc_norm": 0.3564102564102564, + "acc_norm_stderr": 0.024283140529467295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.043733130409147614, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.043733130409147614 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.29064039408866993, + "acc_stderr": 0.0319474007226554, + "acc_norm": 0.29064039408866993, + "acc_norm_stderr": 0.0319474007226554 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.23870967741935484, + "acc_stderr": 0.024251071262208834, + "acc_norm": 0.23870967741935484, + "acc_norm_stderr": 0.024251071262208834 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2, + "acc_stderr": 0.02461829819586651, + "acc_norm": 0.2, + "acc_norm_stderr": 0.02461829819586651 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721377, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721377 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.0347918557259966, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.0347918557259966 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.02992941540834839, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.02992941540834839 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.28901734104046245, + "acc_stderr": 0.03456425745086999, + "acc_norm": 0.28901734104046245, + "acc_norm_stderr": 0.03456425745086999 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.22916666666666666, + "acc_stderr": 0.03514697467862388, + "acc_norm": 0.22916666666666666, + "acc_norm_stderr": 0.03514697467862388 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.04093601807403326, + "acc_norm": 0.21, + "acc_norm_stderr": 0.04093601807403326 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.21604938271604937, + "acc_stderr": 0.022899162918445796, + "acc_norm": 0.21604938271604937, + "acc_norm_stderr": 0.022899162918445796 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.26424870466321243, + "acc_stderr": 0.03182155050916647, + "acc_norm": 0.26424870466321243, + "acc_norm_stderr": 0.03182155050916647 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21100917431192662, + "acc_stderr": 0.017493922404112648, + "acc_norm": 0.21100917431192662, + "acc_norm_stderr": 0.017493922404112648 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1746031746031746, + "acc_stderr": 0.033954900208561116, + "acc_norm": 0.1746031746031746, + "acc_norm_stderr": 0.033954900208561116 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023805186524888156, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023805186524888156 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2396694214876033, + "acc_stderr": 0.03896878985070417, + "acc_norm": 0.2396694214876033, + "acc_norm_stderr": 0.03896878985070417 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.0315469804508223, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.0315469804508223 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148594, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148594 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.23404255319148937, + "acc_stderr": 0.025257861359432414, + "acc_norm": 0.23404255319148937, + "acc_norm_stderr": 0.025257861359432414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24641460234680573, + "acc_stderr": 0.01100597139992723, + "acc_norm": 0.24641460234680573, + "acc_norm_stderr": 0.01100597139992723 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2533659730722154, + "mc1_stderr": 0.01522589934082682, + "mc2": 0.45650352414713125, + "mc2_stderr": 0.015641592781139333 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.36363636363636365, + "acc_stderr": 0.016538691603327715, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.017161563949916345 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "skt/kogpt2-base-v2", + "model_sha": "d0c0df48bf2b2c9350dd855021a5b216f560c0c7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sminpark/ds-alpha-model-v0.1-merged/result_2023-10-12 05:18:50.json b/sminpark/ds-alpha-model-v0.1-merged/result_2023-10-12 05:18:50.json new file mode 100644 index 0000000000000000000000000000000000000000..e688cc499ee3f3313ac36f7711f00b889083aa73 --- /dev/null +++ b/sminpark/ds-alpha-model-v0.1-merged/result_2023-10-12 05:18:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27559726962457337, + "acc_stderr": 0.01305716965576184, + "acc_norm": 0.3225255972696246, + "acc_norm_stderr": 0.013659980894277366 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3858793069109739, + "acc_stderr": 0.0048580740134439885, + "acc_norm": 0.4965146385182235, + "acc_norm_stderr": 0.0049896601807921685 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822582, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822582 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.25287356321839083, + "acc_stderr": 0.01554337731371968, + "acc_norm": 0.25287356321839083, + "acc_norm_stderr": 0.01554337731371968 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.1925925925925926, + "acc_stderr": 0.034065420585026526, + "acc_norm": 0.1925925925925926, + "acc_norm_stderr": 0.034065420585026526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.28085106382978725, + "acc_stderr": 0.02937917046412482, + "acc_norm": 0.28085106382978725, + "acc_norm_stderr": 0.02937917046412482 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2469879518072289, + "acc_stderr": 0.03357351982064537, + "acc_norm": 0.2469879518072289, + "acc_norm_stderr": 0.03357351982064537 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.29260450160771706, + "acc_stderr": 0.02583989833487798, + "acc_norm": 0.29260450160771706, + "acc_norm_stderr": 0.02583989833487798 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19282511210762332, + "acc_stderr": 0.02647824096048936, + "acc_norm": 0.19282511210762332, + "acc_norm_stderr": 0.02647824096048936 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.0307463007421245, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.0307463007421245 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.23448275862068965, + "acc_stderr": 0.035306258743465914, + "acc_norm": 0.23448275862068965, + "acc_norm_stderr": 0.035306258743465914 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2773109243697479, + "acc_stderr": 0.029079374539480007, + "acc_norm": 0.2773109243697479, + "acc_norm_stderr": 0.029079374539480007 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2923076923076923, + "acc_stderr": 0.023060438380857744, + "acc_norm": 0.2923076923076923, + "acc_norm_stderr": 0.023060438380857744 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.041331194402438376, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.041331194402438376 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733555, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733555 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2064516129032258, + "acc_stderr": 0.02302589961718871, + "acc_norm": 0.2064516129032258, + "acc_norm_stderr": 0.02302589961718871 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.27350427350427353, + "acc_stderr": 0.029202540153431177, + "acc_norm": 0.27350427350427353, + "acc_norm_stderr": 0.029202540153431177 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.26037735849056604, + "acc_stderr": 0.027008766090708097, + "acc_norm": 0.26037735849056604, + "acc_norm_stderr": 0.027008766090708097 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721375, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721375 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.02549753263960954, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.02549753263960954 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.22885572139303484, + "acc_stderr": 0.029705284056772432, + "acc_norm": 0.22885572139303484, + "acc_norm_stderr": 0.029705284056772432 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818318, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818318 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2708333333333333, + "acc_stderr": 0.03716177437566018, + "acc_norm": 0.2708333333333333, + "acc_norm_stderr": 0.03716177437566018 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2514450867052023, + "acc_stderr": 0.02335736578587404, + "acc_norm": 0.2514450867052023, + "acc_norm_stderr": 0.02335736578587404 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3128834355828221, + "acc_stderr": 0.03642914578292404, + "acc_norm": 0.3128834355828221, + "acc_norm_stderr": 0.03642914578292404 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25617283950617287, + "acc_stderr": 0.024288533637726095, + "acc_norm": 0.25617283950617287, + "acc_norm_stderr": 0.024288533637726095 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.03324837939758159, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.03324837939758159 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.041424397194893624, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.041424397194893624 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.26238532110091745, + "acc_stderr": 0.018861885021534734, + "acc_norm": 0.26238532110091745, + "acc_norm_stderr": 0.018861885021534734 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.03512207412302053, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.03512207412302053 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.024630048979824775, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.024630048979824775 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.19008264462809918, + "acc_stderr": 0.03581796951709282, + "acc_norm": 0.19008264462809918, + "acc_norm_stderr": 0.03581796951709282 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.17763157894736842, + "acc_stderr": 0.03110318238312338, + "acc_norm": 0.17763157894736842, + "acc_norm_stderr": 0.03110318238312338 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.01755581809132227, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.01755581809132227 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729906, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729906 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.03385177976044811, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.03385177976044811 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290804, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19852941176470587, + "acc_stderr": 0.02423101337054109, + "acc_norm": 0.19852941176470587, + "acc_norm_stderr": 0.02423101337054109 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.31020408163265306, + "acc_stderr": 0.029613459872484378, + "acc_norm": 0.31020408163265306, + "acc_norm_stderr": 0.029613459872484378 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25316455696202533, + "acc_stderr": 0.0283046579430353, + "acc_norm": 0.25316455696202533, + "acc_norm_stderr": 0.0283046579430353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2588005215123859, + "acc_stderr": 0.011186109046564608, + "acc_norm": 0.2588005215123859, + "acc_norm_stderr": 0.011186109046564608 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350195, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.03477691162163659, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.03477691162163659 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2521419828641371, + "mc1_stderr": 0.01520152224629995, + "mc2": 0.39714724864543566, + "mc2_stderr": 0.014754643585296967 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3246753246753247, + "acc_stderr": 0.016098883939346453, + "acc_norm": 0.38961038961038963, + "acc_norm_stderr": 0.016766161671893497 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sminpark/ds-alpha-model-v0.1-merged", + "model_sha": "877c87e7e62fa297f23e49e4aed3a2c0398a920a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sohohuk/test1/result_2023-11-01 06:10:38.json b/sohohuk/test1/result_2023-11-01 06:10:38.json new file mode 100644 index 0000000000000000000000000000000000000000..0970ae0bd33668e1adc4e4b6d963098b8aa913be --- /dev/null +++ b/sohohuk/test1/result_2023-11-01 06:10:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3191126279863481, + "acc_stderr": 0.013621696119173304, + "acc_norm": 0.36860068259385664, + "acc_norm_stderr": 0.014097810678042196 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35261900019916353, + "acc_stderr": 0.004768088918512186, + "acc_norm": 0.4509061939852619, + "acc_norm_stderr": 0.004965670398127352 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.454661558109834, + "acc_stderr": 0.017806304585052602, + "acc_norm": 0.454661558109834, + "acc_norm_stderr": 0.017806304585052602 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.028256660723360184, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.028256660723360184 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.0305728113102996, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.0305728113102996 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.03028500925900981, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.03028500925900981 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5522388059701493, + "acc_stderr": 0.035161847729521675, + "acc_norm": 0.5522388059701493, + "acc_norm_stderr": 0.035161847729521675 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.03567603799639171, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.03567603799639171 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115978, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465918, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465918 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3803680981595092, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.3803680981595092, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272437, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272437 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47155963302752296, + "acc_stderr": 0.021402615697348047, + "acc_norm": 0.47155963302752296, + "acc_norm_stderr": 0.021402615697348047 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3741830065359477, + "acc_stderr": 0.019576953122088837, + "acc_norm": 0.3741830065359477, + "acc_norm_stderr": 0.019576953122088837 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925303, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925303 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483924, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.032002553478937816, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.032002553478937816 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4978902953586498, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.4978902953586498, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.01185591158704823, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.01185591158704823 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3939393939393939, + "acc_stderr": 0.0381549430868893, + "acc_norm": 0.3939393939393939, + "acc_norm_stderr": 0.0381549430868893 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32068543451652387, + "mc1_stderr": 0.016339170373280906, + "mc2": 0.5002957366542341, + "mc2_stderr": 0.015624413933134037 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4167650531286895, + "acc_stderr": 0.01695048914610883, + "acc_norm": 0.48406139315230223, + "acc_norm_stderr": 0.017181617837190195 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sohohuk/test1", + "model_sha": "b7b22b459409508e80e7d6a72c09e5e5f765428e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/spow12/EEVE_ver_4.1_sft/result_2024-04-18 02:10:43.json b/spow12/EEVE_ver_4.1_sft/result_2024-04-18 02:10:43.json new file mode 100644 index 0000000000000000000000000000000000000000..581ae0b78f968841726201c86a87dc13ac2d85ca --- /dev/null +++ b/spow12/EEVE_ver_4.1_sft/result_2024-04-18 02:10:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.47525597269624575, + "acc_stderr": 0.014593487694937736, + "acc_norm": 0.5221843003412969, + "acc_norm_stderr": 0.014597001927076136 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46195976897032465, + "acc_stderr": 0.004975319435777099, + "acc_norm": 0.6344353714399522, + "acc_norm_stderr": 0.004806039039008948 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.04582124160161552, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.04582124160161552 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7151979565772669, + "acc_stderr": 0.016139174096522595, + "acc_norm": 0.7151979565772669, + "acc_norm_stderr": 0.016139174096522595 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5191489361702127, + "acc_stderr": 0.0326620429906468, + "acc_norm": 0.5191489361702127, + "acc_norm_stderr": 0.0326620429906468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.03891364495835817, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.03891364495835817 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.02764814959975147, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.02764814959975147 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5739910313901345, + "acc_stderr": 0.033188332862172806, + "acc_norm": 0.5739910313901345, + "acc_norm_stderr": 0.033188332862172806 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7676767676767676, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.7676767676767676, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062946, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062946 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6218487394957983, + "acc_stderr": 0.03149930577784906, + "acc_norm": 0.6218487394957983, + "acc_norm_stderr": 0.03149930577784906 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.025294608023986455, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.025294608023986455 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.045245960070300476, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.045245960070300476 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6612903225806451, + "acc_stderr": 0.026923446059302834, + "acc_norm": 0.6612903225806451, + "acc_norm_stderr": 0.026923446059302834 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.811965811965812, + "acc_stderr": 0.025598193686652265, + "acc_norm": 0.811965811965812, + "acc_norm_stderr": 0.025598193686652265 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5811320754716981, + "acc_stderr": 0.030365050829115205, + "acc_norm": 0.5811320754716981, + "acc_norm_stderr": 0.030365050829115205 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547308, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547308 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.03812400565974833, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.03812400565974833 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.025197101074246483, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.025197101074246483 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5486111111111112, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.5486111111111112, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.026261677607806642, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.026261677607806642 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5705521472392638, + "acc_stderr": 0.038890666191127236, + "acc_norm": 0.5705521472392638, + "acc_norm_stderr": 0.038890666191127236 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6265432098765432, + "acc_stderr": 0.026915003011380157, + "acc_norm": 0.6265432098765432, + "acc_norm_stderr": 0.026915003011380157 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7409326424870466, + "acc_stderr": 0.031618779179354094, + "acc_norm": 0.7409326424870466, + "acc_norm_stderr": 0.031618779179354094 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4824561403508772, + "acc_stderr": 0.04700708033551038, + "acc_norm": 0.4824561403508772, + "acc_norm_stderr": 0.04700708033551038 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7247706422018348, + "acc_stderr": 0.019149093743155196, + "acc_norm": 0.7247706422018348, + "acc_norm_stderr": 0.019149093743155196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.028074158947600656, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.028074158947600656 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.020220920829626916, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.020220920829626916 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.02904919034254347, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.02904919034254347 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.046695106638751926, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.046695106638751926 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2346368715083799, + "acc_stderr": 0.014173044098303682, + "acc_norm": 0.2346368715083799, + "acc_norm_stderr": 0.014173044098303682 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.030320243265004137, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.030320243265004137 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6857142857142857, + "acc_stderr": 0.029719329422417468, + "acc_norm": 0.6857142857142857, + "acc_norm_stderr": 0.029719329422417468 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7805907172995781, + "acc_stderr": 0.026939106581553945, + "acc_norm": 0.7805907172995781, + "acc_norm_stderr": 0.026939106581553945 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41199478487614083, + "acc_stderr": 0.012570871032146066, + "acc_norm": 0.41199478487614083, + "acc_norm_stderr": 0.012570871032146066 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7254901960784313, + "acc_stderr": 0.03132179803083291, + "acc_norm": 0.7254901960784313, + "acc_norm_stderr": 0.03132179803083291 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31211750305997554, + "mc1_stderr": 0.01622075676952091, + "mc2": 0.4619508789361263, + "mc2_stderr": 0.015273481705173996 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5501770956316411, + "acc_stderr": 0.01710357334382571, + "acc_norm": 0.5903187721369539, + "acc_norm_stderr": 0.01690756819221948 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "spow12/EEVE_ver_4.1_sft", + "model_sha": "82bc086dde735be1cbc5e3318af148bbfa15553a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/spow12/Ko-Qwen2-7B-Instruct/result_2024-06-12 01:52:30.json b/spow12/Ko-Qwen2-7B-Instruct/result_2024-06-12 01:52:30.json new file mode 100644 index 0000000000000000000000000000000000000000..92d0082615f24273728dafaaadf0eae293168f92 --- /dev/null +++ b/spow12/Ko-Qwen2-7B-Instruct/result_2024-06-12 01:52:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3609215017064846, + "acc_stderr": 0.014034761386175458, + "acc_norm": 0.4308873720136519, + "acc_norm_stderr": 0.014471133392642483 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39026090420235016, + "acc_stderr": 0.00486811759848194, + "acc_norm": 0.5207130053774148, + "acc_norm_stderr": 0.004985498055190362 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.672514619883041, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.672514619883041, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7766990291262136, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.7766990291262136, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6551724137931034, + "acc_stderr": 0.01699712334611345, + "acc_norm": 0.6551724137931034, + "acc_norm_stderr": 0.01699712334611345 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.6212765957446809, + "acc_stderr": 0.03170995606040655, + "acc_norm": 0.6212765957446809, + "acc_norm_stderr": 0.03170995606040655 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.662379421221865, + "acc_stderr": 0.026858825879488554, + "acc_norm": 0.662379421221865, + "acc_norm_stderr": 0.026858825879488554 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7626262626262627, + "acc_stderr": 0.030313710538198917, + "acc_norm": 0.7626262626262627, + "acc_norm_stderr": 0.030313710538198917 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6068965517241379, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.6068965517241379, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.7058823529411765, + "acc_stderr": 0.02959732973097809, + "acc_norm": 0.7058823529411765, + "acc_norm_stderr": 0.02959732973097809 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6743589743589744, + "acc_stderr": 0.023759665767412286, + "acc_norm": 0.6743589743589744, + "acc_norm_stderr": 0.023759665767412286 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.74, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.74, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.04330043749650743, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.04330043749650743 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.5221674876847291, + "acc_stderr": 0.03514528562175007, + "acc_norm": 0.5221674876847291, + "acc_norm_stderr": 0.03514528562175007 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.667741935483871, + "acc_stderr": 0.0267955608481228, + "acc_norm": 0.667741935483871, + "acc_norm_stderr": 0.0267955608481228 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6339622641509434, + "acc_stderr": 0.029647813539365252, + "acc_norm": 0.6339622641509434, + "acc_norm_stderr": 0.029647813539365252 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.046737523336702384, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.046737523336702384 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.03046462171889531, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.03046462171889531 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.032200241045342054, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.032200241045342054 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5838150289017341, + "acc_stderr": 0.03758517775404948, + "acc_norm": 0.5838150289017341, + "acc_norm_stderr": 0.03758517775404948 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.6005291005291006, + "acc_stderr": 0.02522545028406793, + "acc_norm": 0.6005291005291006, + "acc_norm_stderr": 0.02522545028406793 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6271676300578035, + "acc_stderr": 0.026033890613576288, + "acc_norm": 0.6271676300578035, + "acc_norm_stderr": 0.026033890613576288 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334384, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334384 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6234567901234568, + "acc_stderr": 0.026959344518747787, + "acc_norm": 0.6234567901234568, + "acc_norm_stderr": 0.026959344518747787 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6632124352331606, + "acc_stderr": 0.03410780251836184, + "acc_norm": 0.6632124352331606, + "acc_norm_stderr": 0.03410780251836184 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.04702880432049615, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.04702880432049615 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6990825688073394, + "acc_stderr": 0.019664751366802114, + "acc_norm": 0.6990825688073394, + "acc_norm_stderr": 0.019664751366802114 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.49206349206349204, + "acc_stderr": 0.044715725362943486, + "acc_norm": 0.49206349206349204, + "acc_norm_stderr": 0.044715725362943486 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6143790849673203, + "acc_stderr": 0.02787074527829028, + "acc_norm": 0.6143790849673203, + "acc_norm_stderr": 0.02787074527829028 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7603305785123967, + "acc_stderr": 0.03896878985070417, + "acc_norm": 0.7603305785123967, + "acc_norm_stderr": 0.03896878985070417 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6710526315789473, + "acc_stderr": 0.03823428969926604, + "acc_norm": 0.6710526315789473, + "acc_norm_stderr": 0.03823428969926604 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.020212274976302954, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.020212274976302954 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.029658235097666907, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.029658235097666907 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03362277436608044, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03362277436608044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.4424581005586592, + "acc_stderr": 0.016611393687268574, + "acc_norm": 0.4424581005586592, + "acc_norm_stderr": 0.016611393687268574 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768081, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768081 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5808823529411765, + "acc_stderr": 0.029972807170464622, + "acc_norm": 0.5808823529411765, + "acc_norm_stderr": 0.029972807170464622 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6530612244897959, + "acc_stderr": 0.030472526026726496, + "acc_norm": 0.6530612244897959, + "acc_norm_stderr": 0.030472526026726496 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7341772151898734, + "acc_stderr": 0.028756799629658335, + "acc_norm": 0.7341772151898734, + "acc_norm_stderr": 0.028756799629658335 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.408735332464146, + "acc_stderr": 0.01255570134670338, + "acc_norm": 0.408735332464146, + "acc_norm_stderr": 0.01255570134670338 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6813725490196079, + "acc_stderr": 0.032702871814820816, + "acc_norm": 0.6813725490196079, + "acc_norm_stderr": 0.032702871814820816 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.01605899902610062, + "mc2": 0.4662070776724838, + "mc2_stderr": 0.015581509583720368 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6103896103896104, + "acc_stderr": 0.016766161671893504, + "acc_norm": 0.641086186540732, + "acc_norm_stderr": 0.016491802102999036 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "spow12/Ko-Qwen2-7B-Instruct", + "model_sha": "046e9eb04560b2dbb55790d46e6062f52a953b7b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/spow12/KoSOLAR-10.7B_instruct/result_2024-01-05 01:43:09.json b/spow12/KoSOLAR-10.7B_instruct/result_2024-01-05 01:43:09.json new file mode 100644 index 0000000000000000000000000000000000000000..60c8247c77e499d9b9eee3ab01768756c62ab4eb --- /dev/null +++ b/spow12/KoSOLAR-10.7B_instruct/result_2024-01-05 01:43:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4206484641638225, + "acc_stderr": 0.0144262112525084, + "acc_norm": 0.47696245733788395, + "acc_norm_stderr": 0.014595873205358278 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35610436168094006, + "acc_stderr": 0.004778679507786501, + "acc_norm": 0.4565823541127266, + "acc_norm_stderr": 0.004970933420231926 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6257309941520468, + "acc_stderr": 0.03711601185389481, + "acc_norm": 0.6257309941520468, + "acc_norm_stderr": 0.03711601185389481 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.04721188506097171, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.04721188506097171 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6206896551724138, + "acc_stderr": 0.017351268117544442, + "acc_norm": 0.6206896551724138, + "acc_norm_stderr": 0.017351268117544442 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04316378599511324, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04316378599511324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5755627009646302, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.5755627009646302, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.547085201793722, + "acc_stderr": 0.03340867501923324, + "acc_norm": 0.547085201793722, + "acc_norm_stderr": 0.03340867501923324 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870254, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870254 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6919191919191919, + "acc_stderr": 0.03289477330098614, + "acc_norm": 0.6919191919191919, + "acc_norm_stderr": 0.03289477330098614 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.025348006031534805, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.025348006031534805 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5451612903225806, + "acc_stderr": 0.02832774309156106, + "acc_norm": 0.5451612903225806, + "acc_norm_stderr": 0.02832774309156106 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392926, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392926 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.03879687024073327, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.03879687024073327 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919795, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.024833839825562417, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.024833839825562417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.569364161849711, + "acc_stderr": 0.026658800273672376, + "acc_norm": 0.569364161849711, + "acc_norm_stderr": 0.026658800273672376 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5679012345679012, + "acc_stderr": 0.027563010971606672, + "acc_norm": 0.5679012345679012, + "acc_norm_stderr": 0.027563010971606672 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6735751295336787, + "acc_stderr": 0.033840286211432945, + "acc_norm": 0.6735751295336787, + "acc_norm_stderr": 0.033840286211432945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6330275229357798, + "acc_stderr": 0.02066467565952053, + "acc_norm": 0.6330275229357798, + "acc_norm_stderr": 0.02066467565952053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5424836601307189, + "acc_stderr": 0.028526383452142638, + "acc_norm": 0.5424836601307189, + "acc_norm_stderr": 0.028526383452142638 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.56, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.49836601307189543, + "acc_stderr": 0.020227726838150117, + "acc_norm": 0.49836601307189543, + "acc_norm_stderr": 0.020227726838150117 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.0289473388516141, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.0289473388516141 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.033922384053216174, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.033922384053216174 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21787709497206703, + "acc_stderr": 0.013806211780732979, + "acc_norm": 0.21787709497206703, + "acc_norm_stderr": 0.013806211780732979 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.49264705882352944, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.49264705882352944, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.45714285714285713, + "acc_stderr": 0.03189141832421397, + "acc_norm": 0.45714285714285713, + "acc_norm_stderr": 0.03189141832421397 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6962025316455697, + "acc_stderr": 0.029936696387138605, + "acc_norm": 0.6962025316455697, + "acc_norm_stderr": 0.029936696387138605 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.40091264667535853, + "acc_stderr": 0.012516960350640807, + "acc_norm": 0.40091264667535853, + "acc_norm_stderr": 0.012516960350640807 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5, + "acc_stderr": 0.03509312031717982, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03509312031717982 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3696969696969697, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.3696969696969697, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2827417380660955, + "mc1_stderr": 0.01576477083677731, + "mc2": 0.4483854562879145, + "mc2_stderr": 0.01515721025663603 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4805194805194805, + "acc_stderr": 0.01717730199234256, + "acc_norm": 0.526564344746163, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "spow12/KoSOLAR-10.7B_instruct", + "model_sha": "31c2e7b0bc53ca0bb2811bd5c102bcaf4913fdc9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/spow12/Llama3_ko_4.2_sft/result_2024-05-07 04:21:11.json b/spow12/Llama3_ko_4.2_sft/result_2024-05-07 04:21:11.json new file mode 100644 index 0000000000000000000000000000000000000000..08fdc3a782c2893853e3854b9debb2f8ae4277fc --- /dev/null +++ b/spow12/Llama3_ko_4.2_sft/result_2024-05-07 04:21:11.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3856655290102389, + "acc_stderr": 0.014224250973257182, + "acc_norm": 0.44283276450511944, + "acc_norm_stderr": 0.014515573873348907 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38966341366261703, + "acc_stderr": 0.004866772373029921, + "acc_norm": 0.5152360087631945, + "acc_norm_stderr": 0.004987464257999314 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4955300127713921, + "acc_stderr": 0.01787924897058438, + "acc_norm": 0.4955300127713921, + "acc_norm_stderr": 0.01787924897058438 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236786, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236786 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.02819640057419742, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.02819640057419742 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.0414431181087815, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.0414431181087815 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998572, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998572 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4290322580645161, + "acc_stderr": 0.02815603653823321, + "acc_norm": 0.4290322580645161, + "acc_norm_stderr": 0.02815603653823321 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.02999695185834948, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.02999695185834948 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.03050329201334259, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.03050329201334259 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.03528131472933607, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.03528131472933607 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.0236369759961018, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.0236369759961018 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48623853211009177, + "acc_stderr": 0.02142920208987408, + "acc_norm": 0.48623853211009177, + "acc_norm_stderr": 0.02142920208987408 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.04240799327574924, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.04240799327574924 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.028358956313423545, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.028358956313423545 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.01948802574552967, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.01948802574552967 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32269503546099293, + "acc_stderr": 0.027889139300534792, + "acc_norm": 0.32269503546099293, + "acc_norm_stderr": 0.027889139300534792 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.029886910547626974, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.029886910547626974 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2346368715083799, + "acc_stderr": 0.014173044098303673, + "acc_norm": 0.2346368715083799, + "acc_norm_stderr": 0.014173044098303673 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34558823529411764, + "acc_stderr": 0.02888819310398864, + "acc_norm": 0.34558823529411764, + "acc_norm_stderr": 0.02888819310398864 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4489795918367347, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.4489795918367347, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32985658409387225, + "acc_stderr": 0.01200812993854047, + "acc_norm": 0.32985658409387225, + "acc_norm_stderr": 0.01200812993854047 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03471157907953423, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03471157907953423 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237265, + "mc2": 0.4179532062695332, + "mc2_stderr": 0.014953008456501575 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.39787485242030696, + "acc_stderr": 0.01682795905473339, + "acc_norm": 0.4757969303423849, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "spow12/Llama3_ko_4.2_sft", + "model_sha": "fad9c145b6c48ebb00a25a90464c800decbaf4a6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/spow12/POLAR-14B_4.3_very_big_sft/result_2024-05-13 01:31:52.json b/spow12/POLAR-14B_4.3_very_big_sft/result_2024-05-13 01:31:52.json new file mode 100644 index 0000000000000000000000000000000000000000..7065444e616d5a2236db1bdd00f2377cf72b15f2 --- /dev/null +++ b/spow12/POLAR-14B_4.3_very_big_sft/result_2024-05-13 01:31:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.659556313993174, + "acc_stderr": 0.01384746051889298, + "acc_norm": 0.712457337883959, + "acc_norm_stderr": 0.013226719056266134 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4358693487353117, + "acc_stderr": 0.00494856785637387, + "acc_norm": 0.5786695877315275, + "acc_norm_stderr": 0.004927631806477556 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7318007662835249, + "acc_stderr": 0.015842430835269466, + "acc_norm": 0.7318007662835249, + "acc_norm_stderr": 0.015842430835269466 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5787234042553191, + "acc_stderr": 0.032278345101462665, + "acc_norm": 0.5787234042553191, + "acc_norm_stderr": 0.032278345101462665 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5301204819277109, + "acc_stderr": 0.03885425420866767, + "acc_norm": 0.5301204819277109, + "acc_norm_stderr": 0.03885425420866767 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.027604689028581975, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.027604689028581975 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6502242152466368, + "acc_stderr": 0.03200736719484503, + "acc_norm": 0.6502242152466368, + "acc_norm_stderr": 0.03200736719484503 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.648854961832061, + "acc_stderr": 0.0418644516301375, + "acc_norm": 0.648854961832061, + "acc_norm_stderr": 0.0418644516301375 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.031156269519646836, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.031156269519646836 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6470588235294118, + "acc_stderr": 0.031041941304059288, + "acc_norm": 0.6470588235294118, + "acc_norm_stderr": 0.031041941304059288 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6384615384615384, + "acc_stderr": 0.024359581465397014, + "acc_norm": 0.6384615384615384, + "acc_norm_stderr": 0.024359581465397014 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6419354838709678, + "acc_stderr": 0.02727389059430063, + "acc_norm": 0.6419354838709678, + "acc_norm_stderr": 0.02727389059430063 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8547008547008547, + "acc_stderr": 0.023086635086841407, + "acc_norm": 0.8547008547008547, + "acc_norm_stderr": 0.023086635086841407 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6150943396226415, + "acc_stderr": 0.02994649856769995, + "acc_norm": 0.6150943396226415, + "acc_norm_stderr": 0.02994649856769995 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4185185185185185, + "acc_stderr": 0.03007801307502206, + "acc_norm": 0.4185185185185185, + "acc_norm_stderr": 0.03007801307502206 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.039955240076816806, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.039955240076816806 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555403, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555403 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4312169312169312, + "acc_stderr": 0.025506481698138215, + "acc_norm": 0.4312169312169312, + "acc_norm_stderr": 0.025506481698138215 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.81, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.81, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6127167630057804, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.6127167630057804, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334384, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334384 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.026725868809100786, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.026725868809100786 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7512953367875648, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.7512953367875648, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.04685473041907789, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.04685473041907789 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7834862385321101, + "acc_stderr": 0.017658710594443145, + "acc_norm": 0.7834862385321101, + "acc_norm_stderr": 0.017658710594443145 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.02718449890994161, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.02718449890994161 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7603305785123967, + "acc_stderr": 0.03896878985070415, + "acc_norm": 0.7603305785123967, + "acc_norm_stderr": 0.03896878985070415 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6578947368421053, + "acc_stderr": 0.03860731599316092, + "acc_norm": 0.6578947368421053, + "acc_norm_stderr": 0.03860731599316092 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.576797385620915, + "acc_stderr": 0.019987809769482067, + "acc_norm": 0.576797385620915, + "acc_norm_stderr": 0.019987809769482067 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4574468085106383, + "acc_stderr": 0.029719281272236844, + "acc_norm": 0.4574468085106383, + "acc_norm_stderr": 0.029719281272236844 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.45535714285714285, + "acc_stderr": 0.04726835553719099, + "acc_norm": 0.45535714285714285, + "acc_norm_stderr": 0.04726835553719099 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.22681564245810057, + "acc_stderr": 0.014005843570897911, + "acc_norm": 0.22681564245810057, + "acc_norm_stderr": 0.014005843570897911 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5551470588235294, + "acc_stderr": 0.03018753206032939, + "acc_norm": 0.5551470588235294, + "acc_norm_stderr": 0.03018753206032939 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6285714285714286, + "acc_stderr": 0.03093285879278987, + "acc_norm": 0.6285714285714286, + "acc_norm_stderr": 0.03093285879278987 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7130801687763713, + "acc_stderr": 0.02944377302259469, + "acc_norm": 0.7130801687763713, + "acc_norm_stderr": 0.02944377302259469 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44589308996088656, + "acc_stderr": 0.01269524471137978, + "acc_norm": 0.44589308996088656, + "acc_norm_stderr": 0.01269524471137978 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.033086111132364364, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.033086111132364364 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5385556915544676, + "mc1_stderr": 0.017451384104637455, + "mc2": 0.6538320796034501, + "mc2_stderr": 0.014674498989533316 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5796930342384888, + "acc_stderr": 0.01697059828117771, + "acc_norm": 0.6127508854781583, + "acc_norm_stderr": 0.01674757799164278 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "spow12/POLAR-14B_4.3_very_big_sft", + "model_sha": "1adeea521e5d029d10dcdc8951d1a2b1b2563dc8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/spow12/Qwen2-7B-ko-Instruct-orpo-ver_2.0_wo_chat/result_2024-06-20 06:04:50.json b/spow12/Qwen2-7B-ko-Instruct-orpo-ver_2.0_wo_chat/result_2024-06-20 06:04:50.json new file mode 100644 index 0000000000000000000000000000000000000000..08bdf3d759961109113d4c918dfb92679a51c199 --- /dev/null +++ b/spow12/Qwen2-7B-ko-Instruct-orpo-ver_2.0_wo_chat/result_2024-06-20 06:04:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42662116040955633, + "acc_stderr": 0.014453185592920293, + "acc_norm": 0.48293515358361777, + "acc_norm_stderr": 0.014602878388536597 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4012148974307907, + "acc_stderr": 0.0048914265333906285, + "acc_norm": 0.5460067715594503, + "acc_norm_stderr": 0.004968613539309253 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7864077669902912, + "acc_stderr": 0.040580420156460344, + "acc_norm": 0.7864077669902912, + "acc_norm_stderr": 0.040580420156460344 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7139208173690932, + "acc_stderr": 0.016160871405127526, + "acc_norm": 0.7139208173690932, + "acc_norm_stderr": 0.016160871405127526 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.6595744680851063, + "acc_stderr": 0.030976692998534422, + "acc_norm": 0.6595744680851063, + "acc_norm_stderr": 0.030976692998534422 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.0389136449583582, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.0389136449583582 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6752411575562701, + "acc_stderr": 0.026596782287697046, + "acc_norm": 0.6752411575562701, + "acc_norm_stderr": 0.026596782287697046 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6591928251121076, + "acc_stderr": 0.0318114974705536, + "acc_norm": 0.6591928251121076, + "acc_norm_stderr": 0.0318114974705536 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6717557251908397, + "acc_stderr": 0.04118438565806299, + "acc_norm": 0.6717557251908397, + "acc_norm_stderr": 0.04118438565806299 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7676767676767676, + "acc_stderr": 0.030088629490217483, + "acc_norm": 0.7676767676767676, + "acc_norm_stderr": 0.030088629490217483 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6137931034482759, + "acc_stderr": 0.04057324734419035, + "acc_norm": 0.6137931034482759, + "acc_norm_stderr": 0.04057324734419035 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.7058823529411765, + "acc_stderr": 0.029597329730978082, + "acc_norm": 0.7058823529411765, + "acc_norm_stderr": 0.029597329730978082 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.024321738484602354, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.024321738484602354 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.7, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.7, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6944444444444444, + "acc_stderr": 0.044531975073749834, + "acc_norm": 0.6944444444444444, + "acc_norm_stderr": 0.044531975073749834 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.5566502463054187, + "acc_stderr": 0.034953345821629324, + "acc_norm": 0.5566502463054187, + "acc_norm_stderr": 0.034953345821629324 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.7193548387096774, + "acc_stderr": 0.025560604721022902, + "acc_norm": 0.7193548387096774, + "acc_norm_stderr": 0.025560604721022902 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8376068376068376, + "acc_stderr": 0.02416161812798774, + "acc_norm": 0.8376068376068376, + "acc_norm_stderr": 0.02416161812798774 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6037735849056604, + "acc_stderr": 0.03010279378179119, + "acc_norm": 0.6037735849056604, + "acc_norm_stderr": 0.03010279378179119 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425464, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425464 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4925925925925926, + "acc_stderr": 0.0304821923951915, + "acc_norm": 0.4925925925925926, + "acc_norm_stderr": 0.0304821923951915 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7761194029850746, + "acc_stderr": 0.02947525023601718, + "acc_norm": 0.7761194029850746, + "acc_norm_stderr": 0.02947525023601718 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.03778621079092056, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.03778621079092056 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5687830687830688, + "acc_stderr": 0.0255064816981382, + "acc_norm": 0.5687830687830688, + "acc_norm_stderr": 0.0255064816981382 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6040462427745664, + "acc_stderr": 0.02632981334194624, + "acc_norm": 0.6040462427745664, + "acc_norm_stderr": 0.02632981334194624 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6134969325153374, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.6134969325153374, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6327160493827161, + "acc_stderr": 0.026822801759507887, + "acc_norm": 0.6327160493827161, + "acc_norm_stderr": 0.026822801759507887 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6994818652849741, + "acc_stderr": 0.0330881859441575, + "acc_norm": 0.6994818652849741, + "acc_norm_stderr": 0.0330881859441575 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7908256880733945, + "acc_stderr": 0.017437937173343226, + "acc_norm": 0.7908256880733945, + "acc_norm_stderr": 0.017437937173343226 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6699346405228758, + "acc_stderr": 0.026925654653615686, + "acc_norm": 0.6699346405228758, + "acc_norm_stderr": 0.026925654653615686 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.8016528925619835, + "acc_stderr": 0.03640118271990945, + "acc_norm": 0.8016528925619835, + "acc_norm_stderr": 0.03640118271990945 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.7039473684210527, + "acc_stderr": 0.03715062154998905, + "acc_norm": 0.7039473684210527, + "acc_norm_stderr": 0.03715062154998905 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5522875816993464, + "acc_stderr": 0.02011692534742242, + "acc_norm": 0.5522875816993464, + "acc_norm_stderr": 0.02011692534742242 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4574468085106383, + "acc_stderr": 0.029719281272236834, + "acc_norm": 0.4574468085106383, + "acc_norm_stderr": 0.029719281272236834 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053756, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053756 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.03381200005643525, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.03381200005643525 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.42569832402234636, + "acc_stderr": 0.016536829648997123, + "acc_norm": 0.42569832402234636, + "acc_norm_stderr": 0.016536829648997123 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5514705882352942, + "acc_stderr": 0.030211479609121596, + "acc_norm": 0.5514705882352942, + "acc_norm_stderr": 0.030211479609121596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.673469387755102, + "acc_stderr": 0.03002105623844033, + "acc_norm": 0.673469387755102, + "acc_norm_stderr": 0.03002105623844033 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422647, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422647 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4172099087353325, + "acc_stderr": 0.012593959992906424, + "acc_norm": 0.4172099087353325, + "acc_norm_stderr": 0.012593959992906424 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7205882352941176, + "acc_stderr": 0.03149328104507955, + "acc_norm": 0.7205882352941176, + "acc_norm_stderr": 0.03149328104507955 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.703030303030303, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.703030303030303, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31946144430844553, + "mc1_stderr": 0.016322644182960495, + "mc2": 0.47567265217931887, + "mc2_stderr": 0.015538052476814728 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6658795749704841, + "acc_stderr": 0.016216763304239684, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.016068253615813956 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "spow12/Qwen2-7B-ko-Instruct-orpo-ver_2.0_wo_chat", + "model_sha": "c27df7d9d0b143ae2d58bdfb725837f82da92f8a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/spow12/kosolar_4.1_sft/result_2024-04-23 05:34:31.json b/spow12/kosolar_4.1_sft/result_2024-04-23 05:34:31.json new file mode 100644 index 0000000000000000000000000000000000000000..b9a9d8552005016e1b73e312d3e65ee284d76b98 --- /dev/null +++ b/spow12/kosolar_4.1_sft/result_2024-04-23 05:34:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6663822525597269, + "acc_stderr": 0.01377868705417655, + "acc_norm": 0.7226962457337884, + "acc_norm_stderr": 0.013082095839059376 + }, + "harness|ko_hellaswag|10": { + "acc": 0.43228440549691294, + "acc_stderr": 0.004943809330692699, + "acc_norm": 0.5774746066520613, + "acc_norm_stderr": 0.004929517011508222 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7192982456140351, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.7192982456140351, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7139208173690932, + "acc_stderr": 0.01616087140512751, + "acc_norm": 0.7139208173690932, + "acc_norm_stderr": 0.01616087140512751 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5659574468085107, + "acc_stderr": 0.03240038086792748, + "acc_norm": 0.5659574468085107, + "acc_norm_stderr": 0.03240038086792748 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.03891364495835821, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.03891364495835821 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6098654708520179, + "acc_stderr": 0.03273766725459157, + "acc_norm": 0.6098654708520179, + "acc_norm_stderr": 0.03273766725459157 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.031156269519646836, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.031156269519646836 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207763, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207763 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6428571428571429, + "acc_stderr": 0.031124619309328177, + "acc_norm": 0.6428571428571429, + "acc_norm_stderr": 0.031124619309328177 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6205128205128205, + "acc_stderr": 0.024603626924097413, + "acc_norm": 0.6205128205128205, + "acc_norm_stderr": 0.024603626924097413 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.035107665979592154, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.035107665979592154 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6419354838709678, + "acc_stderr": 0.02727389059430063, + "acc_norm": 0.6419354838709678, + "acc_norm_stderr": 0.02727389059430063 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8461538461538461, + "acc_stderr": 0.023636873317489274, + "acc_norm": 0.8461538461538461, + "acc_norm_stderr": 0.023636873317489274 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6037735849056604, + "acc_stderr": 0.030102793781791197, + "acc_norm": 0.6037735849056604, + "acc_norm_stderr": 0.030102793781791197 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.04653429807913507, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.04653429807913507 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.0298696050953169, + "acc_norm": 0.4, + "acc_norm_stderr": 0.0298696050953169 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6865671641791045, + "acc_stderr": 0.03280188205348642, + "acc_norm": 0.6865671641791045, + "acc_norm_stderr": 0.03280188205348642 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41005291005291006, + "acc_stderr": 0.02533120243894444, + "acc_norm": 0.41005291005291006, + "acc_norm_stderr": 0.02533120243894444 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5208333333333334, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.5208333333333334, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6213872832369942, + "acc_stderr": 0.02611374936131034, + "acc_norm": 0.6213872832369942, + "acc_norm_stderr": 0.02611374936131034 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6134969325153374, + "acc_stderr": 0.03825825548848607, + "acc_norm": 0.6134969325153374, + "acc_norm_stderr": 0.03825825548848607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.027125115513166848, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.027125115513166848 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.032210245080411544, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.032210245080411544 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.04598188057816542, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.04598188057816542 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7504587155963303, + "acc_stderr": 0.018553897629501624, + "acc_norm": 0.7504587155963303, + "acc_norm_stderr": 0.018553897629501624 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6209150326797386, + "acc_stderr": 0.02778014120702333, + "acc_norm": 0.6209150326797386, + "acc_norm_stderr": 0.02778014120702333 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.8016528925619835, + "acc_stderr": 0.03640118271990947, + "acc_norm": 0.8016528925619835, + "acc_norm_stderr": 0.03640118271990947 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6052631578947368, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.6052631578947368, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.020087362076702846, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.020087362076702846 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.029097675599463926, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.029097675599463926 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29720670391061454, + "acc_stderr": 0.015285313353641588, + "acc_norm": 0.29720670391061454, + "acc_norm_stderr": 0.015285313353641588 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.03030625772246831, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.03030625772246831 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6163265306122448, + "acc_stderr": 0.03113088039623595, + "acc_norm": 0.6163265306122448, + "acc_norm_stderr": 0.03113088039623595 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.70042194092827, + "acc_stderr": 0.02981802474975309, + "acc_norm": 0.70042194092827, + "acc_norm_stderr": 0.02981802474975309 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.42894393741851367, + "acc_stderr": 0.012640625443067361, + "acc_norm": 0.42894393741851367, + "acc_norm_stderr": 0.012640625443067361 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6029411764705882, + "acc_stderr": 0.03434131164719129, + "acc_norm": 0.6029411764705882, + "acc_norm_stderr": 0.03434131164719129 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.554467564259486, + "mc1_stderr": 0.01739933528014035, + "mc2": 0.6751570109951998, + "mc2_stderr": 0.014485372906766927 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5017709563164109, + "acc_stderr": 0.017190246276231867, + "acc_norm": 0.551357733175915, + "acc_norm_stderr": 0.01709943051472578 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "spow12/kosolar_4.1_sft", + "model_sha": "b79cd4264d9b54e26edcf4d04b34e9cca8403b96", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/spow12/llama-3-Korean-Bllossom-8B_ver_4.3_big_sft_2epochs/result_2024-05-16 00:55:50.json b/spow12/llama-3-Korean-Bllossom-8B_ver_4.3_big_sft_2epochs/result_2024-05-16 00:55:50.json new file mode 100644 index 0000000000000000000000000000000000000000..cdfb7203a6a4d1da3f25e08cbfdf0d0390be3737 --- /dev/null +++ b/spow12/llama-3-Korean-Bllossom-8B_ver_4.3_big_sft_2epochs/result_2024-05-16 00:55:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3174061433447099, + "acc_stderr": 0.01360223908803817, + "acc_norm": 0.3916382252559727, + "acc_norm_stderr": 0.014264122124938218 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34843656642103166, + "acc_stderr": 0.004755013243022122, + "acc_norm": 0.4371639115714001, + "acc_norm_stderr": 0.004950221546187574 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.03743979825926401, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.03743979825926401 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3831417624521073, + "acc_stderr": 0.017384774194885634, + "acc_norm": 0.3831417624521073, + "acc_norm_stderr": 0.017384774194885634 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.032081157507886836, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.032081157507886836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.38263665594855306, + "acc_stderr": 0.027604689028581982, + "acc_norm": 0.38263665594855306, + "acc_norm_stderr": 0.027604689028581982 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.38565022421524664, + "acc_stderr": 0.03266842214289201, + "acc_norm": 0.38565022421524664, + "acc_norm_stderr": 0.03266842214289201 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4393939393939394, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.4393939393939394, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3871794871794872, + "acc_stderr": 0.02469721693087895, + "acc_norm": 0.3871794871794872, + "acc_norm_stderr": 0.02469721693087895 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3967741935483871, + "acc_stderr": 0.027831231605767948, + "acc_norm": 0.3967741935483871, + "acc_norm_stderr": 0.027831231605767948 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5897435897435898, + "acc_stderr": 0.032224140452411065, + "acc_norm": 0.5897435897435898, + "acc_norm_stderr": 0.032224140452411065 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.03056159042673184, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.03056159042673184 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4090909090909091, + "acc_stderr": 0.04709306978661896, + "acc_norm": 0.4090909090909091, + "acc_norm_stderr": 0.04709306978661896 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5323383084577115, + "acc_stderr": 0.035281314729336065, + "acc_norm": 0.5323383084577115, + "acc_norm_stderr": 0.035281314729336065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.407514450867052, + "acc_stderr": 0.026454578146931494, + "acc_norm": 0.407514450867052, + "acc_norm_stderr": 0.026454578146931494 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4171779141104294, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.4171779141104294, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3734567901234568, + "acc_stderr": 0.02691500301138015, + "acc_norm": 0.3734567901234568, + "acc_norm_stderr": 0.02691500301138015 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518752, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518752 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47339449541284406, + "acc_stderr": 0.021406952688151588, + "acc_norm": 0.47339449541284406, + "acc_norm_stderr": 0.021406952688151588 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.04343525428949098, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.04343525428949098 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.02830457667314112, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.02830457667314112 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04545454545454546, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04545454545454546 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.040403110624904356, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.040403110624904356 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2957516339869281, + "acc_stderr": 0.01846315413263282, + "acc_norm": 0.2957516339869281, + "acc_norm_stderr": 0.01846315413263282 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101366, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101366 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697625, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697625 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.03350991604696043, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.03350991604696043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.01455155365936992, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.01455155365936992 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121596, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48523206751054854, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.48523206751054854, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3011734028683181, + "acc_stderr": 0.011717148751648438, + "acc_norm": 0.3011734028683181, + "acc_norm_stderr": 0.011717148751648438 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4068627450980392, + "acc_stderr": 0.03447891136353382, + "acc_norm": 0.4068627450980392, + "acc_norm_stderr": 0.03447891136353382 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4121212121212121, + "acc_stderr": 0.03843566993588717, + "acc_norm": 0.4121212121212121, + "acc_norm_stderr": 0.03843566993588717 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26438188494492043, + "mc1_stderr": 0.015438211119522509, + "mc2": 0.4413432691102796, + "mc2_stderr": 0.015585341330387307 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.30932703659976385, + "acc_stderr": 0.015891320505520886, + "acc_norm": 0.40613931523022434, + "acc_norm_stderr": 0.016884749503191385 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "spow12/llama-3-Korean-Bllossom-8B_ver_4.3_big_sft_2epochs", + "model_sha": "ba2298b4c446616fee3d22498f86391d1787ca80", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/spow12/solar_v01/result_2024-07-17 07:06:43.json b/spow12/solar_v01/result_2024-07-17 07:06:43.json new file mode 100644 index 0000000000000000000000000000000000000000..99eaa72adae7aa296dff4c91ac714f2a3b2f2183 --- /dev/null +++ b/spow12/solar_v01/result_2024-07-17 07:06:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.757679180887372, + "acc_stderr": 0.012521593295800118, + "acc_norm": 0.7901023890784983, + "acc_norm_stderr": 0.011900548748047449 + }, + "harness|ko_hellaswag|10": { + "acc": 0.7102170882294364, + "acc_stderr": 0.004527343651130769, + "acc_norm": 0.8116908982274448, + "acc_norm_stderr": 0.003901597914246544 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7961165048543689, + "acc_stderr": 0.0398913985953177, + "acc_norm": 0.7961165048543689, + "acc_norm_stderr": 0.0398913985953177 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6730523627075351, + "acc_stderr": 0.016774908180131498, + "acc_norm": 0.6730523627075351, + "acc_norm_stderr": 0.016774908180131498 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4808510638297872, + "acc_stderr": 0.032662042990646775, + "acc_norm": 0.4808510638297872, + "acc_norm_stderr": 0.032662042990646775 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6302250803858521, + "acc_stderr": 0.027417996705630998, + "acc_norm": 0.6302250803858521, + "acc_norm_stderr": 0.027417996705630998 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6457399103139013, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.6457399103139013, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6102564102564103, + "acc_stderr": 0.02472696788664708, + "acc_norm": 0.6102564102564103, + "acc_norm_stderr": 0.02472696788664708 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.027869320571664625, + "acc_norm": 0.6, + "acc_norm_stderr": 0.027869320571664625 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.02514093595033543, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.02514093595033543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5886792452830188, + "acc_stderr": 0.03028500925900979, + "acc_norm": 0.5886792452830188, + "acc_norm_stderr": 0.03028500925900979 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652458, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652458 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.02563425811555496, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.02563425811555496 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542126, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542126 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5809248554913294, + "acc_stderr": 0.02656417811142262, + "acc_norm": 0.5809248554913294, + "acc_norm_stderr": 0.02656417811142262 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6073619631901841, + "acc_stderr": 0.0383674090783103, + "acc_norm": 0.6073619631901841, + "acc_norm_stderr": 0.0383674090783103 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6450617283950617, + "acc_stderr": 0.026624152478845853, + "acc_norm": 0.6450617283950617, + "acc_norm_stderr": 0.026624152478845853 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7564766839378239, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.7564766839378239, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.728440366972477, + "acc_stderr": 0.019069098363191452, + "acc_norm": 0.728440366972477, + "acc_norm_stderr": 0.019069098363191452 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.03910525752849725, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.03910525752849725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.019977422600227477, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.019977422600227477 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.450354609929078, + "acc_stderr": 0.02968010556502904, + "acc_norm": 0.450354609929078, + "acc_norm_stderr": 0.02968010556502904 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.0340763209385405, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.0340763209385405 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3474860335195531, + "acc_stderr": 0.015925564060208154, + "acc_norm": 0.3474860335195531, + "acc_norm_stderr": 0.015925564060208154 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6571428571428571, + "acc_stderr": 0.03038726291954772, + "acc_norm": 0.6571428571428571, + "acc_norm_stderr": 0.03038726291954772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598035, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598035 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44654498044328556, + "acc_stderr": 0.012697046024399663, + "acc_norm": 0.44654498044328556, + "acc_norm_stderr": 0.012697046024399663 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.799265605875153, + "mc1_stderr": 0.01402204571748215, + "mc2": 0.8601602291592664, + "mc2_stderr": 0.011715337046215628 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5017709563164109, + "acc_stderr": 0.017190246276231863, + "acc_norm": 0.5277449822904369, + "acc_norm_stderr": 0.017163867979456016 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "spow12/solar_v01", + "model_sha": "6f67d09e7e5a0627f642287257b6bf1f30d0b99e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/squarelike/llama2-ko-medical-7b/result_2023-11-02 08:01:53.json b/squarelike/llama2-ko-medical-7b/result_2023-11-02 08:01:53.json new file mode 100644 index 0000000000000000000000000000000000000000..f37d086ce72c4b68b364640c167b7fc5fc56a4e2 --- /dev/null +++ b/squarelike/llama2-ko-medical-7b/result_2023-11-02 08:01:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1885665529010239, + "acc_stderr": 0.011430897647675815, + "acc_norm": 0.2380546075085324, + "acc_norm_stderr": 0.012445770028026205 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25273849830711015, + "acc_stderr": 0.004336941069568736, + "acc_norm": 0.2559251145190201, + "acc_norm_stderr": 0.004354881005789729 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796617, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.273142112125163, + "acc_stderr": 0.011380150567830394, + "acc_norm": 0.273142112125163, + "acc_norm_stderr": 0.011380150567830394 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.03166009679399812, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.03166009679399812 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24112607099143207, + "mc1_stderr": 0.014974827279752325, + "mc2": 0.4920860295556251, + "mc2_stderr": 0.016960443639831176 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.0755608028335301, + "acc_stderr": 0.00908661811311919, + "acc_norm": 0.2526564344746163, + "acc_norm_stderr": 0.014939640598798425 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "squarelike/llama2-ko-medical-7b", + "model_sha": "85acb5d9285798c89b004dc02b093b2d15a84116", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sronger/ko-llm-llama-2-7b-LoRA-IA3/result_2023-11-29 11:35:39.json b/sronger/ko-llm-llama-2-7b-LoRA-IA3/result_2023-11-29 11:35:39.json new file mode 100644 index 0000000000000000000000000000000000000000..e842db785c592dbdcf2c27b84312ee575d560334 --- /dev/null +++ b/sronger/ko-llm-llama-2-7b-LoRA-IA3/result_2023-11-29 11:35:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.19283276450511946, + "acc_stderr": 0.011529055465663325, + "acc_norm": 0.24573378839590443, + "acc_norm_stderr": 0.012581033453730111 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2506472814180442, + "acc_stderr": 0.004325000473328607, + "acc_norm": 0.24885480979884486, + "acc_norm_stderr": 0.0043146590346493955 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.32748538011695905, + "acc_stderr": 0.035993357714560276, + "acc_norm": 0.32748538011695905, + "acc_norm_stderr": 0.035993357714560276 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.29118773946360155, + "acc_stderr": 0.016246087069701404, + "acc_norm": 0.29118773946360155, + "acc_norm_stderr": 0.016246087069701404 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.17777777777777778, + "acc_stderr": 0.03302789859901717, + "acc_norm": 0.17777777777777778, + "acc_norm_stderr": 0.03302789859901717 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036846, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036846 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.26382978723404255, + "acc_stderr": 0.028809989854102987, + "acc_norm": 0.26382978723404255, + "acc_norm_stderr": 0.028809989854102987 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071855, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071855 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426115, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426115 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.28699551569506726, + "acc_stderr": 0.030360379710291954, + "acc_norm": 0.28699551569506726, + "acc_norm_stderr": 0.030360379710291954 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.25190839694656486, + "acc_stderr": 0.03807387116306086, + "acc_norm": 0.25190839694656486, + "acc_norm_stderr": 0.03807387116306086 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2620689655172414, + "acc_stderr": 0.036646663372252565, + "acc_norm": 0.2620689655172414, + "acc_norm_stderr": 0.036646663372252565 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.02665353159671548, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.02665353159671548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.24102564102564103, + "acc_stderr": 0.02168554666533319, + "acc_norm": 0.24102564102564103, + "acc_norm_stderr": 0.02168554666533319 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.04236511258094633, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.04236511258094633 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2512315270935961, + "acc_stderr": 0.03051653073269444, + "acc_norm": 0.2512315270935961, + "acc_norm_stderr": 0.03051653073269444 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.18387096774193548, + "acc_stderr": 0.02203721734026783, + "acc_norm": 0.18387096774193548, + "acc_norm_stderr": 0.02203721734026783 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2948717948717949, + "acc_stderr": 0.029872577708891162, + "acc_norm": 0.2948717948717949, + "acc_norm_stderr": 0.029872577708891162 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.02634148037111834, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.02634148037111834 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.04122066502878284, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.04122066502878284 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.02592887613276611, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.02592887613276611 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.19205298013245034, + "acc_stderr": 0.03216298420593613, + "acc_norm": 0.19205298013245034, + "acc_norm_stderr": 0.03216298420593613 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014645, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014645 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.031265112061730424, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.031265112061730424 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21957671957671956, + "acc_stderr": 0.02132001859977035, + "acc_norm": 0.21957671957671956, + "acc_norm_stderr": 0.02132001859977035 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548594, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548594 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2191358024691358, + "acc_stderr": 0.023016705640262185, + "acc_norm": 0.2191358024691358, + "acc_norm_stderr": 0.023016705640262185 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.18134715025906736, + "acc_stderr": 0.02780703236068609, + "acc_norm": 0.18134715025906736, + "acc_norm_stderr": 0.02780703236068609 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23302752293577983, + "acc_stderr": 0.018125669180861507, + "acc_norm": 0.23302752293577983, + "acc_norm_stderr": 0.018125669180861507 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03718489006818115, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03718489006818115 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.02428861946604611, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.02428861946604611 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.28289473684210525, + "acc_stderr": 0.03665349695640767, + "acc_norm": 0.28289473684210525, + "acc_norm_stderr": 0.03665349695640767 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.272875816993464, + "acc_stderr": 0.01802047414839358, + "acc_norm": 0.272875816993464, + "acc_norm_stderr": 0.01802047414839358 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.21631205673758866, + "acc_stderr": 0.024561720560562786, + "acc_norm": 0.21631205673758866, + "acc_norm_stderr": 0.024561720560562786 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.14814814814814814, + "acc_stderr": 0.02422762927372837, + "acc_norm": 0.14814814814814814, + "acc_norm_stderr": 0.02422762927372837 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2201117318435754, + "acc_stderr": 0.013856994024227179, + "acc_norm": 0.2201117318435754, + "acc_norm_stderr": 0.013856994024227179 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.02315746830855936, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.02315746830855936 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.19591836734693877, + "acc_stderr": 0.025409301953225678, + "acc_norm": 0.19591836734693877, + "acc_norm_stderr": 0.025409301953225678 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24315514993481094, + "acc_stderr": 0.010956556654417348, + "acc_norm": 0.24315514993481094, + "acc_norm_stderr": 0.010956556654417348 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.029331162294251742, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.029331162294251742 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.18787878787878787, + "acc_stderr": 0.03050193405942914, + "acc_norm": 0.18787878787878787, + "acc_norm_stderr": 0.03050193405942914 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715018, + "mc2": 0.4957664753905337, + "mc2_stderr": 0.016720650341527123 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.07910271546635184, + "acc_stderr": 0.00927931912600906, + "acc_norm": 0.3317591499409681, + "acc_norm_stderr": 0.016187984642157312 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sronger/ko-llm-llama-2-7b-LoRA-IA3", + "model_sha": "2294d6279c3055c45b7e33953feae619534408e8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sronger/ko-llm-llama-2-7b-chat2/result_2023-11-29 08:01:38.json b/sronger/ko-llm-llama-2-7b-chat2/result_2023-11-29 08:01:38.json new file mode 100644 index 0000000000000000000000000000000000000000..83ea5c2bcbf82d8e2e9a8e5385856cf89e5bbbc2 --- /dev/null +++ b/sronger/ko-llm-llama-2-7b-chat2/result_2023-11-29 08:01:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.18430034129692832, + "acc_stderr": 0.011330517933037415, + "acc_norm": 0.2354948805460751, + "acc_norm_stderr": 0.012399451855004746 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25323640709022105, + "acc_stderr": 0.004339764434219062, + "acc_norm": 0.24507070304720174, + "acc_norm_stderr": 0.0042925005017162305 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796617, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2711864406779661, + "acc_stderr": 0.011354581451622985, + "acc_norm": 0.2711864406779661, + "acc_norm_stderr": 0.011354581451622985 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.296969696969697, + "acc_stderr": 0.03567969772268049, + "acc_norm": 0.296969696969697, + "acc_norm_stderr": 0.03567969772268049 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707682, + "mc2": 0.5051802317030636, + "mc2_stderr": 0.016856528106826654 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.06847697756788666, + "acc_stderr": 0.008683282020992616, + "acc_norm": 0.3293978748524203, + "acc_norm_stderr": 0.016158746868147143 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sronger/ko-llm-llama-2-7b-chat2", + "model_sha": "6f4b2c09a0ef4e114c83ccf1bd6e131d1a0fb39a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sronger/ko-llm-llama-2-7b-chat3/result_2023-11-29 10:32:08.json b/sronger/ko-llm-llama-2-7b-chat3/result_2023-11-29 10:32:08.json new file mode 100644 index 0000000000000000000000000000000000000000..0d44c77cf25dd4c713ec4881ad565714b1469361 --- /dev/null +++ b/sronger/ko-llm-llama-2-7b-chat3/result_2023-11-29 10:32:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.1885665529010239, + "acc_stderr": 0.011430897647675823, + "acc_norm": 0.24488054607508533, + "acc_norm_stderr": 0.012566273985131354 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2545309699263095, + "acc_stderr": 0.0043470700195274775, + "acc_norm": 0.24746066520613424, + "acc_norm_stderr": 0.004306547156331383 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796617, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539265, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539265 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24479804161566707, + "mc1_stderr": 0.015051869486715013, + "mc2": 0.5039896617577472, + "mc2_stderr": 0.016756833084863455 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.06847697756788666, + "acc_stderr": 0.008683282020992614, + "acc_norm": 0.33293978748524206, + "acc_norm_stderr": 0.016202431208373797 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sronger/ko-llm-llama-2-7b-chat3", + "model_sha": "96cebf91679d9a5910486aa0324f2f6db685623a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sronger/koko_test/result_2023-11-29 08:29:42.json b/sronger/koko_test/result_2023-11-29 08:29:42.json new file mode 100644 index 0000000000000000000000000000000000000000..811011ce08b248f80c46b4fa165499fa1f3d8c88 --- /dev/null +++ b/sronger/koko_test/result_2023-11-29 08:29:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20051194539249148, + "acc_stderr": 0.011700318050499363, + "acc_norm": 0.2721843003412969, + "acc_norm_stderr": 0.013006600406423704 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25124477195777734, + "acc_stderr": 0.00432842570099869, + "acc_norm": 0.24736108344951205, + "acc_norm_stderr": 0.004305965431515147 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.03508771929824565, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.03508771929824565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.20388349514563106, + "acc_stderr": 0.039891398595317706, + "acc_norm": 0.20388349514563106, + "acc_norm_stderr": 0.039891398595317706 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.225531914893617, + "acc_stderr": 0.02732107841738753, + "acc_norm": 0.225531914893617, + "acc_norm_stderr": 0.02732107841738753 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.22289156626506024, + "acc_stderr": 0.03240004825594687, + "acc_norm": 0.22289156626506024, + "acc_norm_stderr": 0.03240004825594687 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2062780269058296, + "acc_stderr": 0.027157150479563824, + "acc_norm": 0.2062780269058296, + "acc_norm_stderr": 0.027157150479563824 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932026, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932026 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243839, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243839 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24786324786324787, + "acc_stderr": 0.028286324075564386, + "acc_norm": 0.24786324786324787, + "acc_norm_stderr": 0.028286324075564386 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.25870646766169153, + "acc_stderr": 0.03096590312357301, + "acc_norm": 0.25870646766169153, + "acc_norm_stderr": 0.03096590312357301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.29012345679012347, + "acc_stderr": 0.02525117393649502, + "acc_norm": 0.29012345679012347, + "acc_norm_stderr": 0.02525117393649502 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21651376146788992, + "acc_stderr": 0.017658710594443145, + "acc_norm": 0.21651376146788992, + "acc_norm_stderr": 0.017658710594443145 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.018120224251484584, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.018120224251484584 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539265, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539265 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22766217870257038, + "mc1_stderr": 0.014679255032111068, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.07201889020070838, + "acc_stderr": 0.008888072708500573, + "acc_norm": 0.21959858323494688, + "acc_norm_stderr": 0.014232743085580256 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sronger/koko_test", + "model_sha": "6e22512c2781b0f5e34d50358a02ff7d1f00da40", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sronger/mistral-ko-llm/result_2023-12-04 07:31:58.json b/sronger/mistral-ko-llm/result_2023-12-04 07:31:58.json new file mode 100644 index 0000000000000000000000000000000000000000..bf82adcd63228fd94b20367a0ae6c9e697f5231d --- /dev/null +++ b/sronger/mistral-ko-llm/result_2023-12-04 07:31:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.181740614334471, + "acc_stderr": 0.011269198948880236, + "acc_norm": 0.24744027303754265, + "acc_norm_stderr": 0.01261035266329267 + }, + "harness|ko_hellaswag|10": { + "acc": 0.25323640709022105, + "acc_stderr": 0.004339764434219061, + "acc_norm": 0.2477594104760008, + "acc_norm_stderr": 0.00430829048410049 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.29239766081871343, + "acc_stderr": 0.034886477134579236, + "acc_norm": 0.29239766081871343, + "acc_norm_stderr": 0.034886477134579236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2707535121328225, + "acc_stderr": 0.015889888362560486, + "acc_norm": 0.2707535121328225, + "acc_norm_stderr": 0.015889888362560486 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334941, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334941 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2990353697749196, + "acc_stderr": 0.02600330111788514, + "acc_norm": 0.2990353697749196, + "acc_norm_stderr": 0.02600330111788514 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.02693611191280227, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.02693611191280227 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309994, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309994 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.21008403361344538, + "acc_stderr": 0.026461398717471874, + "acc_norm": 0.21008403361344538, + "acc_norm_stderr": 0.026461398717471874 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2128205128205128, + "acc_stderr": 0.020752423722128002, + "acc_norm": 0.2128205128205128, + "acc_norm_stderr": 0.020752423722128002 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252627, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2955665024630542, + "acc_stderr": 0.032104944337514575, + "acc_norm": 0.2955665024630542, + "acc_norm_stderr": 0.032104944337514575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.024685979286239956, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.024685979286239956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.20909090909090908, + "acc_stderr": 0.03895091015724135, + "acc_norm": 0.20909090909090908, + "acc_norm_stderr": 0.03895091015724135 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24875621890547264, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.24875621890547264, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.03295304696818317, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.03295304696818317 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.022789673145776575, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.022789673145776575 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.29190751445086704, + "acc_stderr": 0.02447699407624734, + "acc_norm": 0.29190751445086704, + "acc_norm_stderr": 0.02447699407624734 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3006134969325153, + "acc_stderr": 0.03602511318806771, + "acc_norm": 0.3006134969325153, + "acc_norm_stderr": 0.03602511318806771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2932098765432099, + "acc_stderr": 0.025329888171900926, + "acc_norm": 0.2932098765432099, + "acc_norm_stderr": 0.025329888171900926 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.22797927461139897, + "acc_stderr": 0.030276909945178256, + "acc_norm": 0.22797927461139897, + "acc_norm_stderr": 0.030276909945178256 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.22201834862385322, + "acc_stderr": 0.017818849564796617, + "acc_norm": 0.22201834862385322, + "acc_norm_stderr": 0.017818849564796617 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.25163398692810457, + "acc_stderr": 0.0248480182638752, + "acc_norm": 0.25163398692810457, + "acc_norm_stderr": 0.0248480182638752 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.371900826446281, + "acc_stderr": 0.04412015806624503, + "acc_norm": 0.371900826446281, + "acc_norm_stderr": 0.04412015806624503 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3026315789473684, + "acc_stderr": 0.03738520676119668, + "acc_norm": 0.3026315789473684, + "acc_norm_stderr": 0.03738520676119668 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2761437908496732, + "acc_stderr": 0.018087276935663133, + "acc_norm": 0.2761437908496732, + "acc_norm_stderr": 0.018087276935663133 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.026469036818590634, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.026469036818590634 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952689, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952689 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.027920963147993662, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.027920963147993662 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.16544117647058823, + "acc_stderr": 0.02257177102549475, + "acc_norm": 0.16544117647058823, + "acc_norm_stderr": 0.02257177102549475 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788167, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788167 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.26582278481012656, + "acc_stderr": 0.028756799629658332, + "acc_norm": 0.26582278481012656, + "acc_norm_stderr": 0.028756799629658332 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.011345996743539265, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.011345996743539265 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693264, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693264 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.28484848484848485, + "acc_stderr": 0.035243908445117836, + "acc_norm": 0.28484848484848485, + "acc_norm_stderr": 0.035243908445117836 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23990208078335373, + "mc1_stderr": 0.014948812679062135, + "mc2": 0.4836672858203277, + "mc2_stderr": 0.017102451726330438 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08028335301062574, + "acc_stderr": 0.009342316867054019, + "acc_norm": 0.32585596221959856, + "acc_norm_stderr": 0.016114023894800326 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sronger/mistral-ko-llm", + "model_sha": "f3077660096ca9ec680dea9b50a82ff338d8da85", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/statking/zephyr-7b-sft-full-orpo/result_2024-05-23 11:23:43.json b/statking/zephyr-7b-sft-full-orpo/result_2024-05-23 11:23:43.json new file mode 100644 index 0000000000000000000000000000000000000000..ba330de903c89058356d2aba57efa71108920429 --- /dev/null +++ b/statking/zephyr-7b-sft-full-orpo/result_2024-05-23 11:23:43.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2960750853242321, + "acc_stderr": 0.013340916085246266, + "acc_norm": 0.3660409556313993, + "acc_norm_stderr": 0.014077223108470144 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3588926508663613, + "acc_stderr": 0.004786953146657063, + "acc_norm": 0.4576777534355706, + "acc_norm_stderr": 0.004971874159777695 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4623243933588761, + "acc_stderr": 0.01782913176428719, + "acc_norm": 0.4623243933588761, + "acc_norm_stderr": 0.01782913176428719 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354544, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357783, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357783 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4405144694533762, + "acc_stderr": 0.028196400574197426, + "acc_norm": 0.4405144694533762, + "acc_norm_stderr": 0.028196400574197426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3511450381679389, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.3511450381679389, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5, + "acc_stderr": 0.035623524993954825, + "acc_norm": 0.5, + "acc_norm_stderr": 0.035623524993954825 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179327, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179327 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.0324371805513741, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.0324371805513741 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102308, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102308 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3497536945812808, + "acc_stderr": 0.03355400904969565, + "acc_norm": 0.3497536945812808, + "acc_norm_stderr": 0.03355400904969565 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.02790615082604114, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.02790615082604114 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6623931623931624, + "acc_stderr": 0.03098029699261856, + "acc_norm": 0.6623931623931624, + "acc_norm_stderr": 0.03098029699261856 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286102, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286102 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.03530235517334682, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.03530235517334682 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817729, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817729 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.023919984164047736, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.023919984164047736 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.026842985519615375, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.026842985519615375 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005135, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005135 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.03602573571288441, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.03602573571288441 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48073394495412847, + "acc_stderr": 0.02142140298254888, + "acc_norm": 0.48073394495412847, + "acc_norm_stderr": 0.02142140298254888 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.01952431674486635, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.01952431674486635 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966727, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966727 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.03381200005643525, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.03381200005643525 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.25921787709497207, + "acc_stderr": 0.014655780837497719, + "acc_norm": 0.25921787709497207, + "acc_norm_stderr": 0.014655780837497719 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3713235294117647, + "acc_stderr": 0.02934980313976587, + "acc_norm": 0.3713235294117647, + "acc_norm_stderr": 0.02934980313976587 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.34285714285714286, + "acc_stderr": 0.03038726291954773, + "acc_norm": 0.34285714285714286, + "acc_norm_stderr": 0.03038726291954773 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.510548523206751, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.510548523206751, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3220338983050847, + "acc_stderr": 0.011933936071891093, + "acc_norm": 0.3220338983050847, + "acc_norm_stderr": 0.011933936071891093 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904719, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904719 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.03756335775187897, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.03756335775187897 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.015785370858396708, + "mc2": 0.4649205506401486, + "mc2_stderr": 0.015557008029053434 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.41086186540731995, + "acc_stderr": 0.01691497276784106, + "acc_norm": 0.5017709563164109, + "acc_norm_stderr": 0.017190246276231853 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "statking/zephyr-7b-sft-full-orpo", + "model_sha": "797ee78eb44b3831c9102d1619af9f7493066098", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/statpan/singung-sft-v0.1/result_2023-12-24 13:47:09.json b/statpan/singung-sft-v0.1/result_2023-12-24 13:47:09.json new file mode 100644 index 0000000000000000000000000000000000000000..21ed1b5407328517d54e1c06e6abd671149503de --- /dev/null +++ b/statpan/singung-sft-v0.1/result_2023-12-24 13:47:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3728668941979522, + "acc_stderr": 0.014131176760131172, + "acc_norm": 0.4069965870307167, + "acc_norm_stderr": 0.014356399418009124 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3897629954192392, + "acc_stderr": 0.004866997110388193, + "acc_norm": 0.4962158932483569, + "acc_norm_stderr": 0.004989638507409919 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.49514563106796117, + "acc_stderr": 0.04950504382128919, + "acc_norm": 0.49514563106796117, + "acc_norm_stderr": 0.04950504382128919 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.017867695938429778, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.017867695938429778 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37777777777777777, + "acc_stderr": 0.04188307537595853, + "acc_norm": 0.37777777777777777, + "acc_norm_stderr": 0.04188307537595853 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231004, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231004 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.02825666072336018, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.02825666072336018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.40458015267175573, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.40458015267175573, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.14705882352941177, + "acc_stderr": 0.03524068951567449, + "acc_norm": 0.14705882352941177, + "acc_norm_stderr": 0.03524068951567449 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461227, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461227 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720685, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720685 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809446, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809446 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473075, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763743, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763743 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.024870815251057093, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.024870815251057093 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542595, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542595 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48186528497409326, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.48186528497409326, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.04303684033537316, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.04303684033537316 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47889908256880737, + "acc_stderr": 0.02141822475426465, + "acc_norm": 0.47889908256880737, + "acc_norm_stderr": 0.02141822475426465 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.019559646809215937, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.019559646809215937 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.02796845304356317, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.02796845304356317 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.01421957078810399, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.01421957078810399 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.02972215209928006, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.02972215209928006 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4530612244897959, + "acc_stderr": 0.031867859300041296, + "acc_norm": 0.4530612244897959, + "acc_norm_stderr": 0.031867859300041296 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.03241920684693334, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.03241920684693334 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3116036505867014, + "acc_stderr": 0.011829039182849645, + "acc_norm": 0.3116036505867014, + "acc_norm_stderr": 0.011829039182849645 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502342, + "mc2": 0.4719141578197675, + "mc2_stderr": 0.01599361059010559 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4510035419126328, + "acc_stderr": 0.017107618859549353, + "acc_norm": 0.4852420306965762, + "acc_norm_stderr": 0.017182864434998567 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "statpan/singung-sft-v0.1", + "model_sha": "10ebcfa310dafd576f4767819900d3c9c80077e0", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sue3489/test0_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:06:42.json b/sue3489/test0_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:06:42.json new file mode 100644 index 0000000000000000000000000000000000000000..e18f2b5580bac58f38a92dd55f168a01ae17cce6 --- /dev/null +++ b/sue3489/test0_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:06:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2713310580204778, + "acc_stderr": 0.012993807727545789, + "acc_norm": 0.310580204778157, + "acc_norm_stderr": 0.013522292098053057 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36456881099382593, + "acc_stderr": 0.004803253812881045, + "acc_norm": 0.46564429396534557, + "acc_norm_stderr": 0.004977988452502642 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.22807017543859648, + "acc_stderr": 0.03218093795602357, + "acc_norm": 0.22807017543859648, + "acc_norm_stderr": 0.03218093795602357 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2912621359223301, + "acc_stderr": 0.04498676320572924, + "acc_norm": 0.2912621359223301, + "acc_norm_stderr": 0.04498676320572924 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.23499361430395913, + "acc_stderr": 0.015162024152278445, + "acc_norm": 0.23499361430395913, + "acc_norm_stderr": 0.015162024152278445 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.03853254836552003, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.03853254836552003 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.1829787234042553, + "acc_stderr": 0.02527604100044997, + "acc_norm": 0.1829787234042553, + "acc_norm_stderr": 0.02527604100044997 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.30120481927710846, + "acc_stderr": 0.0357160923005348, + "acc_norm": 0.30120481927710846, + "acc_norm_stderr": 0.0357160923005348 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.27009646302250806, + "acc_stderr": 0.0252180403734106, + "acc_norm": 0.27009646302250806, + "acc_norm_stderr": 0.0252180403734106 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.27802690582959644, + "acc_stderr": 0.030069584874494033, + "acc_norm": 0.27802690582959644, + "acc_norm_stderr": 0.030069584874494033 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596918, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596918 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.033184773338453315, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.033184773338453315 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003336, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003336 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793254, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793254 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3445378151260504, + "acc_stderr": 0.030868682604121622, + "acc_norm": 0.3445378151260504, + "acc_norm_stderr": 0.030868682604121622 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.024321738484602364, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.024321738484602364 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.04133119440243838, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.04133119440243838 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03010833071801162, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03010833071801162 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3096774193548387, + "acc_stderr": 0.026302774983517414, + "acc_norm": 0.3096774193548387, + "acc_norm_stderr": 0.026302774983517414 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.02704685763071668, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.02704685763071668 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.026749899771241238, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.026749899771241238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910507, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910507 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.03368762932259431, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.03368762932259431 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.022182037202948365, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.022182037202948365 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.022894082489925992, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.022894082489925992 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.024569223600460845, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.024569223600460845 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.3160621761658031, + "acc_stderr": 0.033553973696861736, + "acc_norm": 0.3160621761658031, + "acc_norm_stderr": 0.033553973696861736 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.0404933929774814, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.0404933929774814 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3247706422018349, + "acc_stderr": 0.02007772910931033, + "acc_norm": 0.3247706422018349, + "acc_norm_stderr": 0.02007772910931033 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.04134913018303316, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.04134913018303316 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.025738854797818737, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.025738854797818737 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.03941897526516302, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.03941897526516302 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351586, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351586 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.22712418300653595, + "acc_stderr": 0.016949853279212376, + "acc_norm": 0.22712418300653595, + "acc_norm_stderr": 0.016949853279212376 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2375886524822695, + "acc_stderr": 0.025389512552729906, + "acc_norm": 0.2375886524822695, + "acc_norm_stderr": 0.025389512552729906 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596452, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596452 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3020408163265306, + "acc_stderr": 0.029393609319879808, + "acc_norm": 0.3020408163265306, + "acc_norm_stderr": 0.029393609319879808 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25097783572359844, + "acc_stderr": 0.011073730299187224, + "acc_norm": 0.25097783572359844, + "acc_norm_stderr": 0.011073730299187224 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384308, + "mc2": 0.4382110452098873, + "mc2_stderr": 0.015112522165835224 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32585596221959856, + "acc_stderr": 0.016114023894800326, + "acc_norm": 0.3990554899645809, + "acc_norm_stderr": 0.016836377292849303 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sue3489/test0_kullm-polyglot-5.8b-v2-koalpaca-v1.1b", + "model_sha": "79acd9e76f6a5f1e814294761b11c31fc24b9e64", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sue3489/test1_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:49:57.json b/sue3489/test1_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:49:57.json new file mode 100644 index 0000000000000000000000000000000000000000..4fc44ac866aa2946d163962cd97c361543bae564 --- /dev/null +++ b/sue3489/test1_kullm-polyglot-5.8b-v2-koalpaca-v1.1b/result_2023-10-04 08:49:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2841296928327645, + "acc_stderr": 0.013179442447653887, + "acc_norm": 0.32593856655290104, + "acc_norm_stderr": 0.013697432466693237 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3791077474606652, + "acc_stderr": 0.004841734453506664, + "acc_norm": 0.4759012148974308, + "acc_norm_stderr": 0.004983982396187361 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.19298245614035087, + "acc_stderr": 0.030267457554898465, + "acc_norm": 0.19298245614035087, + "acc_norm_stderr": 0.030267457554898465 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2503192848020434, + "acc_stderr": 0.015491088951494597, + "acc_norm": 0.2503192848020434, + "acc_norm_stderr": 0.015491088951494597 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.251063829787234, + "acc_stderr": 0.028346963777162452, + "acc_norm": 0.251063829787234, + "acc_norm_stderr": 0.028346963777162452 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25301204819277107, + "acc_stderr": 0.033844291552331346, + "acc_norm": 0.25301204819277107, + "acc_norm_stderr": 0.033844291552331346 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.26366559485530544, + "acc_stderr": 0.02502553850053234, + "acc_norm": 0.26366559485530544, + "acc_norm_stderr": 0.02502553850053234 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.28699551569506726, + "acc_stderr": 0.030360379710291936, + "acc_norm": 0.28699551569506726, + "acc_norm_stderr": 0.030360379710291936 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2900763358778626, + "acc_stderr": 0.03980066246467765, + "acc_norm": 0.2900763358778626, + "acc_norm_stderr": 0.03980066246467765 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2676767676767677, + "acc_stderr": 0.03154449888270286, + "acc_norm": 0.2676767676767677, + "acc_norm_stderr": 0.03154449888270286 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.037800192304380135, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.037800192304380135 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.027553614467863783, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.027553614467863783 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.02213908110397153, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.02213908110397153 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653695, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653695 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.23645320197044334, + "acc_stderr": 0.029896114291733545, + "acc_norm": 0.23645320197044334, + "acc_norm_stderr": 0.029896114291733545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.24193548387096775, + "acc_stderr": 0.024362599693031096, + "acc_norm": 0.24193548387096775, + "acc_norm_stderr": 0.024362599693031096 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.027046857630716677, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.027046857630716677 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.026341480371118352, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.026341480371118352 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.04461272175910508, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.04461272175910508 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473836, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473836 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409217, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409217 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2328042328042328, + "acc_stderr": 0.021765961672154534, + "acc_norm": 0.2328042328042328, + "acc_norm_stderr": 0.021765961672154534 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03899073687357335, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03899073687357335 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.03291099578615769, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.03291099578615769 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25617283950617287, + "acc_stderr": 0.0242885336377261, + "acc_norm": 0.25617283950617287, + "acc_norm_stderr": 0.0242885336377261 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2538860103626943, + "acc_stderr": 0.03141024780565319, + "acc_norm": 0.2538860103626943, + "acc_norm_stderr": 0.03141024780565319 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.03646758875075566, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.03646758875075566 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30091743119266057, + "acc_stderr": 0.019664751366802114, + "acc_norm": 0.30091743119266057, + "acc_norm_stderr": 0.019664751366802114 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848878, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848878 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.02405102973991225, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.02405102973991225 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909282, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909282 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2231404958677686, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.2231404958677686, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2565789473684211, + "acc_stderr": 0.0355418036802569, + "acc_norm": 0.2565789473684211, + "acc_norm_stderr": 0.0355418036802569 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24019607843137256, + "acc_stderr": 0.017282760695167418, + "acc_norm": 0.24019607843137256, + "acc_norm_stderr": 0.017282760695167418 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.024414612974307703, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.024414612974307703 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.03022522616001237, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.03022522616001237 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3602941176470588, + "acc_stderr": 0.029163128570670736, + "acc_norm": 0.3602941176470588, + "acc_norm_stderr": 0.029163128570670736 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2571428571428571, + "acc_stderr": 0.027979823538744546, + "acc_norm": 0.2571428571428571, + "acc_norm_stderr": 0.027979823538744546 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.270042194092827, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.270042194092827, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2470664928292047, + "acc_stderr": 0.011015752255279338, + "acc_norm": 0.2470664928292047, + "acc_norm_stderr": 0.011015752255279338 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350194, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350194 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326905, + "mc2": 0.43178124206391555, + "mc2_stderr": 0.01588615796057271 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.29161747343565525, + "acc_stderr": 0.015626276690070242, + "acc_norm": 0.3187721369539551, + "acc_norm_stderr": 0.016021427055309588 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sue3489/test1_kullm-polyglot-5.8b-v2-koalpaca-v1.1b", + "model_sha": "acc7ed3105114ba922fe4b408807b57e39ec0cff", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sunburstAI/sb_solar_ko_10.7B_v0.2/result_2024-03-06 23:33:39.json b/sunburstAI/sb_solar_ko_10.7B_v0.2/result_2024-03-06 23:33:39.json new file mode 100644 index 0000000000000000000000000000000000000000..583a279fdd523ed7d47158bfbacd0959f61b0f2b --- /dev/null +++ b/sunburstAI/sb_solar_ko_10.7B_v0.2/result_2024-03-06 23:33:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.386518771331058, + "acc_stderr": 0.014230084761910471, + "acc_norm": 0.45307167235494883, + "acc_norm_stderr": 0.01454689205200563 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4213304122684724, + "acc_stderr": 0.004927631806477563, + "acc_norm": 0.5708026289583749, + "acc_norm_stderr": 0.004939500404882186 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5964912280701754, + "acc_stderr": 0.03762738699917057, + "acc_norm": 0.5964912280701754, + "acc_norm_stderr": 0.03762738699917057 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.47572815533980584, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.47572815533980584, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.017784034534992423, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.017784034534992423 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788683, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788683 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840625, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840625 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.0497569851956243, + "acc_norm": 0.43, + "acc_norm_stderr": 0.0497569851956243 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828064, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828064 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617749, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617749 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48717948717948717, + "acc_stderr": 0.025342671293807264, + "acc_norm": 0.48717948717948717, + "acc_norm_stderr": 0.025342671293807264 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.03078232157768817, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.03078232157768817 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4226415094339623, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.4226415094339623, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.02708037281514565, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.02708037281514565 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.0344578996436275, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.0344578996436275 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.023068188848261107, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.023068188848261107 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.03981240543717862, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.03981240543717862 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5578034682080925, + "acc_stderr": 0.026738603643807403, + "acc_norm": 0.5578034682080925, + "acc_norm_stderr": 0.026738603643807403 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327245, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327245 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5651376146788991, + "acc_stderr": 0.021254631465609273, + "acc_norm": 0.5651376146788991, + "acc_norm_stderr": 0.021254631465609273 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04216370213557835, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04216370213557835 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.01994491413687358, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.01994491413687358 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0286638201471995, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0286638201471995 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3101851851851852, + "acc_stderr": 0.03154696285656628, + "acc_norm": 0.3101851851851852, + "acc_norm_stderr": 0.03154696285656628 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.02922719246003203, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.02922719246003203 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.47346938775510206, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.47346938775510206, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6540084388185654, + "acc_stderr": 0.03096481058878671, + "acc_norm": 0.6540084388185654, + "acc_norm_stderr": 0.03096481058878671 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35528031290743156, + "acc_stderr": 0.012223623364044044, + "acc_norm": 0.35528031290743156, + "acc_norm_stderr": 0.012223623364044044 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.034924061041636124, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.034924061041636124 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205983, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205983 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768542, + "mc2": 0.46130489377887407, + "mc2_stderr": 0.01503694541376684 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.5348288075560803, + "acc_norm_stderr": 0.017148598015747422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sunburstAI/sb_solar_ko_10.7B_v0.2", + "model_sha": "d0f9dee5ef14801e76643bb3a27cda45a50730b9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sunburstAI/solar_ko_v0.1/result_2024-02-25 01:09:24.json b/sunburstAI/solar_ko_v0.1/result_2024-02-25 01:09:24.json new file mode 100644 index 0000000000000000000000000000000000000000..e07d0ee87a817d2b0ce64e3823578833fabfddc1 --- /dev/null +++ b/sunburstAI/solar_ko_v0.1/result_2024-02-25 01:09:24.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4718430034129693, + "acc_stderr": 0.014588204105102205, + "acc_norm": 0.5247440273037542, + "acc_norm_stderr": 0.014593487694937742 + }, + "harness|ko_hellaswag|10": { + "acc": 0.48028281218880703, + "acc_stderr": 0.0049859001723177, + "acc_norm": 0.6428002389962159, + "acc_norm_stderr": 0.004781950883460504 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7203065134099617, + "acc_stderr": 0.016050792148036567, + "acc_norm": 0.7203065134099617, + "acc_norm_stderr": 0.016050792148036567 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.043192236258113303, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.043192236258113303 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5148936170212766, + "acc_stderr": 0.032671518489247764, + "acc_norm": 0.5148936170212766, + "acc_norm_stderr": 0.032671518489247764 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751475, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751475 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.043171711948702556, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.043171711948702556 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932046, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932046 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6092436974789915, + "acc_stderr": 0.031693802357129965, + "acc_norm": 0.6092436974789915, + "acc_norm_stderr": 0.031693802357129965 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5666666666666667, + "acc_stderr": 0.0251246535258851, + "acc_norm": 0.5666666666666667, + "acc_norm_stderr": 0.0251246535258851 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.632258064516129, + "acc_stderr": 0.027430866579973463, + "acc_norm": 0.632258064516129, + "acc_norm_stderr": 0.027430866579973463 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.027046857630716657, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.027046857630716657 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205615, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205615 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.028972648884844267, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.028972648884844267 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273958, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273958 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.0252798503974049, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.0252798503974049 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5972222222222222, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.5972222222222222, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.83, + "acc_stderr": 0.0377525168068637, + "acc_norm": 0.83, + "acc_norm_stderr": 0.0377525168068637 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6358381502890174, + "acc_stderr": 0.025906632631016117, + "acc_norm": 0.6358381502890174, + "acc_norm_stderr": 0.025906632631016117 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5828220858895705, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.5828220858895705, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6234567901234568, + "acc_stderr": 0.026959344518747784, + "acc_norm": 0.6234567901234568, + "acc_norm_stderr": 0.026959344518747784 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7512953367875648, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.7512953367875648, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4298245614035088, + "acc_stderr": 0.04657047260594964, + "acc_norm": 0.4298245614035088, + "acc_norm_stderr": 0.04657047260594964 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7247706422018348, + "acc_stderr": 0.019149093743155196, + "acc_norm": 0.7247706422018348, + "acc_norm_stderr": 0.019149093743155196 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6503267973856209, + "acc_stderr": 0.0273053080762747, + "acc_norm": 0.6503267973856209, + "acc_norm_stderr": 0.0273053080762747 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.04026097083296563, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.04026097083296563 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.020206653187884786, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.020206653187884786 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875192, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875192 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095266, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095266 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03032024326500413, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03032024326500413 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6693877551020408, + "acc_stderr": 0.030116426296540617, + "acc_norm": 0.6693877551020408, + "acc_norm_stderr": 0.030116426296540617 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7932489451476793, + "acc_stderr": 0.026361651668389094, + "acc_norm": 0.7932489451476793, + "acc_norm_stderr": 0.026361651668389094 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.408735332464146, + "acc_stderr": 0.01255570134670338, + "acc_norm": 0.408735332464146, + "acc_norm_stderr": 0.01255570134670338 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7401960784313726, + "acc_stderr": 0.03077855467869326, + "acc_norm": 0.7401960784313726, + "acc_norm_stderr": 0.03077855467869326 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7333333333333333, + "acc_stderr": 0.03453131801885416, + "acc_norm": 0.7333333333333333, + "acc_norm_stderr": 0.03453131801885416 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29865361077111385, + "mc1_stderr": 0.016021570613768542, + "mc2": 0.4487241026145131, + "mc2_stderr": 0.015479020087762283 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5655253837072018, + "acc_stderr": 0.017042098620824928, + "acc_norm": 0.5761511216056671, + "acc_norm_stderr": 0.016989810834628256 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sunburstAI/solar_ko_v0.1", + "model_sha": "6bc4c2d1d2d7df2d9f64365fa12914bc2a4f32f2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sygstat/XRAI-CB-SOLAR-10.7B-v2/result_2024-07-16 07:25:07.json b/sygstat/XRAI-CB-SOLAR-10.7B-v2/result_2024-07-16 07:25:07.json new file mode 100644 index 0000000000000000000000000000000000000000..8b5460ca1407d08518762769286eda98eeac19d3 --- /dev/null +++ b/sygstat/XRAI-CB-SOLAR-10.7B-v2/result_2024-07-16 07:25:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.757679180887372, + "acc_stderr": 0.012521593295800118, + "acc_norm": 0.7901023890784983, + "acc_norm_stderr": 0.011900548748047449 + }, + "harness|ko_hellaswag|10": { + "acc": 0.707329217287393, + "acc_stderr": 0.004540586983230031, + "acc_norm": 0.8122883887671779, + "acc_norm_stderr": 0.0038968367100898726 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6842105263157895, + "acc_stderr": 0.035650796707083106, + "acc_norm": 0.6842105263157895, + "acc_norm_stderr": 0.035650796707083106 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7766990291262136, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.7766990291262136, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6794380587484036, + "acc_stderr": 0.01668889331080374, + "acc_norm": 0.6794380587484036, + "acc_norm_stderr": 0.01668889331080374 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4851063829787234, + "acc_stderr": 0.03267151848924777, + "acc_norm": 0.4851063829787234, + "acc_norm_stderr": 0.03267151848924777 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.0389136449583582, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.0389136449583582 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.027368078243971646, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.027368078243971646 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6412556053811659, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.6412556053811659, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.031566630992154156, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.031566630992154156 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6230769230769231, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.6230769230769231, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497751, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497751 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6161290322580645, + "acc_stderr": 0.027666182075539652, + "acc_norm": 0.6161290322580645, + "acc_norm_stderr": 0.027666182075539652 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8290598290598291, + "acc_stderr": 0.024662496845209804, + "acc_norm": 0.8290598290598291, + "acc_norm_stderr": 0.024662496845209804 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5924528301886792, + "acc_stderr": 0.030242233800854498, + "acc_norm": 0.5924528301886792, + "acc_norm_stderr": 0.030242233800854498 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.045820048415054174, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.045820048415054174 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857406, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857406 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.03170056183497309, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.03170056183497309 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4470899470899471, + "acc_stderr": 0.025606723995777025, + "acc_norm": 0.4470899470899471, + "acc_norm_stderr": 0.025606723995777025 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6041666666666666, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.6041666666666666, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6040462427745664, + "acc_stderr": 0.02632981334194625, + "acc_norm": 0.6040462427745664, + "acc_norm_stderr": 0.02632981334194625 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6073619631901841, + "acc_stderr": 0.038367409078310294, + "acc_norm": 0.6073619631901841, + "acc_norm_stderr": 0.038367409078310294 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.02640614597362568, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.02640614597362568 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7668393782383419, + "acc_stderr": 0.030516111371476008, + "acc_norm": 0.7668393782383419, + "acc_norm_stderr": 0.030516111371476008 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7192660550458716, + "acc_stderr": 0.019266055045871606, + "acc_norm": 0.7192660550458716, + "acc_norm_stderr": 0.019266055045871606 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.028110928492809075, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.028110928492809075 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5751633986928104, + "acc_stderr": 0.019997973035458333, + "acc_norm": 0.5751633986928104, + "acc_norm_stderr": 0.019997973035458333 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.450354609929078, + "acc_stderr": 0.02968010556502904, + "acc_norm": 0.450354609929078, + "acc_norm_stderr": 0.02968010556502904 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3564245810055866, + "acc_stderr": 0.01601823971051341, + "acc_norm": 0.3564245810055866, + "acc_norm_stderr": 0.01601823971051341 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5073529411764706, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.5073529411764706, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6408163265306123, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.6408163265306123, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.729957805907173, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.729957805907173, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4491525423728814, + "acc_stderr": 0.012704030518851474, + "acc_norm": 0.4491525423728814, + "acc_norm_stderr": 0.012704030518851474 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.033086111132364364, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.033086111132364364 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7796817625458996, + "mc1_stderr": 0.0145090451714873, + "mc2": 0.8490508659279148, + "mc2_stderr": 0.012224494401356738 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.500590318772137, + "acc_stderr": 0.01719034212344859, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.017161563949916345 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sygstat/XRAI-CB-SOLAR-10.7B-v2", + "model_sha": "8336956b08eb428688817eb75f6949b9e6d1f1e7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sygstat/XRAI-CB-SOLAR-10.7B-v3/result_2024-07-17 04:15:33.json b/sygstat/XRAI-CB-SOLAR-10.7B-v3/result_2024-07-17 04:15:33.json new file mode 100644 index 0000000000000000000000000000000000000000..5ac52ab2904ea0b858ebe07344799936b01a0948 --- /dev/null +++ b/sygstat/XRAI-CB-SOLAR-10.7B-v3/result_2024-07-17 04:15:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7508532423208191, + "acc_stderr": 0.012639407111926435, + "acc_norm": 0.7815699658703071, + "acc_norm_stderr": 0.012074291605700985 + }, + "harness|ko_hellaswag|10": { + "acc": 0.702051384186417, + "acc_stderr": 0.004564220870531543, + "acc_norm": 0.8097988448516232, + "acc_norm_stderr": 0.003916576989422082 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7766990291262136, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.7766990291262136, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6845466155810983, + "acc_stderr": 0.016617501738763408, + "acc_norm": 0.6845466155810983, + "acc_norm_stderr": 0.016617501738763408 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4765957446808511, + "acc_stderr": 0.03265019475033583, + "acc_norm": 0.4765957446808511, + "acc_norm_stderr": 0.03265019475033583 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.027466610213140095, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.027466610213140095 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6367713004484304, + "acc_stderr": 0.03227790442850499, + "acc_norm": 0.6367713004484304, + "acc_norm_stderr": 0.03227790442850499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5572519083969466, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.5572519083969466, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7525252525252525, + "acc_stderr": 0.03074630074212451, + "acc_norm": 0.7525252525252525, + "acc_norm_stderr": 0.03074630074212451 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.03142946637883708, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.03142946637883708 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6307692307692307, + "acc_stderr": 0.024468615241478926, + "acc_norm": 0.6307692307692307, + "acc_norm_stderr": 0.024468615241478926 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6096774193548387, + "acc_stderr": 0.027751256636969583, + "acc_norm": 0.6096774193548387, + "acc_norm_stderr": 0.027751256636969583 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8247863247863247, + "acc_stderr": 0.024904439098918214, + "acc_norm": 0.8247863247863247, + "acc_norm_stderr": 0.024904439098918214 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5924528301886792, + "acc_stderr": 0.030242233800854498, + "acc_norm": 0.5924528301886792, + "acc_norm_stderr": 0.030242233800854498 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.045820048415054174, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.045820048415054174 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555402, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555402 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.03789401760283646, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.03789401760283646 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.025591857761382182, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.025591857761382182 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5982658959537572, + "acc_stderr": 0.026394104177643634, + "acc_norm": 0.5982658959537572, + "acc_norm_stderr": 0.026394104177643634 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6073619631901841, + "acc_stderr": 0.038367409078310294, + "acc_norm": 0.6073619631901841, + "acc_norm_stderr": 0.038367409078310294 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6450617283950617, + "acc_stderr": 0.026624152478845853, + "acc_norm": 0.6450617283950617, + "acc_norm_stderr": 0.026624152478845853 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7772020725388601, + "acc_stderr": 0.03003114797764154, + "acc_norm": 0.7772020725388601, + "acc_norm_stderr": 0.03003114797764154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7229357798165138, + "acc_stderr": 0.01918848259016954, + "acc_norm": 0.7229357798165138, + "acc_norm_stderr": 0.01918848259016954 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5849673202614379, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.5849673202614379, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.019977422600227477, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.019977422600227477 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4432624113475177, + "acc_stderr": 0.029634838473766006, + "acc_norm": 0.4432624113475177, + "acc_norm_stderr": 0.029634838473766006 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.0340763209385405, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.0340763209385405 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3486033519553073, + "acc_stderr": 0.015937484656687022, + "acc_norm": 0.3486033519553073, + "acc_norm_stderr": 0.015937484656687022 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5036764705882353, + "acc_stderr": 0.030372015885428195, + "acc_norm": 0.5036764705882353, + "acc_norm_stderr": 0.030372015885428195 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6571428571428571, + "acc_stderr": 0.03038726291954772, + "acc_norm": 0.6571428571428571, + "acc_norm_stderr": 0.03038726291954772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.729957805907173, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.729957805907173, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4471968709256845, + "acc_stderr": 0.012698825252435117, + "acc_norm": 0.4471968709256845, + "acc_norm_stderr": 0.012698825252435117 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.032962451101722294, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.032962451101722294 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7870257037943696, + "mc1_stderr": 0.014332203787059692, + "mc2": 0.851192462726135, + "mc2_stderr": 0.012061683461258134 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.500590318772137, + "acc_stderr": 0.01719034212344859, + "acc_norm": 0.5242030696576151, + "acc_norm_stderr": 0.017170202466520748 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sygstat/XRAI-CB-SOLAR-10.7B-v3", + "model_sha": "41922a2b931e1c3e27b38324637e99cf48b118fb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sygstat/XRAI-CB-SOLAR-10.7B-v4/result_2024-07-19 03:53:37.json b/sygstat/XRAI-CB-SOLAR-10.7B-v4/result_2024-07-19 03:53:37.json new file mode 100644 index 0000000000000000000000000000000000000000..4f937ff6d9508eba7361f4f0039b05687f3ed58b --- /dev/null +++ b/sygstat/XRAI-CB-SOLAR-10.7B-v4/result_2024-07-19 03:53:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.659556313993174, + "acc_stderr": 0.013847460518892981, + "acc_norm": 0.7158703071672355, + "acc_norm_stderr": 0.013179442447653887 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5187213702449711, + "acc_stderr": 0.004986282450647317, + "acc_norm": 0.6712806213901613, + "acc_norm_stderr": 0.004687877183164461 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.04721188506097173, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.04721188506097173 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6590038314176245, + "acc_stderr": 0.016951781383223313, + "acc_norm": 0.6590038314176245, + "acc_norm_stderr": 0.016951781383223313 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.502127659574468, + "acc_stderr": 0.03268572658667493, + "acc_norm": 0.502127659574468, + "acc_norm_stderr": 0.03268572658667493 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.03892212195333045, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.03892212195333045 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5691318327974276, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.5691318327974276, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6278026905829597, + "acc_stderr": 0.032443052830087304, + "acc_norm": 0.6278026905829597, + "acc_norm_stderr": 0.032443052830087304 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.03345678422756776, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.03345678422756776 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006715, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006715 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6302521008403361, + "acc_stderr": 0.03135709599613591, + "acc_norm": 0.6302521008403361, + "acc_norm_stderr": 0.03135709599613591 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5897435897435898, + "acc_stderr": 0.02493931390694079, + "acc_norm": 0.5897435897435898, + "acc_norm_stderr": 0.02493931390694079 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.02804098138076154, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.02804098138076154 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.02645350805404035, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.02645350805404035 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5471698113207547, + "acc_stderr": 0.030635627957961816, + "acc_norm": 0.5471698113207547, + "acc_norm_stderr": 0.030635627957961816 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.029185714949857406, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.029185714949857406 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.328042328042328, + "acc_stderr": 0.024180497164376907, + "acc_norm": 0.328042328042328, + "acc_norm_stderr": 0.024180497164376907 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5347222222222222, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.5347222222222222, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.026720034380514995, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.026720034380514995 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5950920245398773, + "acc_stderr": 0.03856672163548912, + "acc_norm": 0.5950920245398773, + "acc_norm_stderr": 0.03856672163548912 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5895061728395061, + "acc_stderr": 0.027371350925124768, + "acc_norm": 0.5895061728395061, + "acc_norm_stderr": 0.027371350925124768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.0329229663915514, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.0329229663915514 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583704, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583704 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6605504587155964, + "acc_stderr": 0.02030210934266235, + "acc_norm": 0.6605504587155964, + "acc_norm_stderr": 0.02030210934266235 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6118421052631579, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.6118421052631579, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.02020665318788479, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.02020665318788479 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.029275532159704725, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.029275532159704725 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03388857118502325, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03388857118502325 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.293854748603352, + "acc_stderr": 0.015235075776719608, + "acc_norm": 0.293854748603352, + "acc_norm_stderr": 0.015235075776719608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5673469387755102, + "acc_stderr": 0.031717528240626645, + "acc_norm": 0.5673469387755102, + "acc_norm_stderr": 0.031717528240626645 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.032190357031317736, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.032190357031317736 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41851368970013036, + "acc_stderr": 0.012599505608336479, + "acc_norm": 0.41851368970013036, + "acc_norm_stderr": 0.012599505608336479 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6891064871481029, + "mc1_stderr": 0.016203316673559693, + "mc2": 0.7972227092749452, + "mc2_stderr": 0.013260083563569077 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5324675324675324, + "acc_stderr": 0.017154073716682865, + "acc_norm": 0.564344746162928, + "acc_norm_stderr": 0.01704741522947632 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sygstat/XRAI-CB-SOLAR-10.7B-v4", + "model_sha": "2a6cbfa54ef1ff8bbcf3ad4f5720928e1011a49d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sygstat/XRAI-CB-SOLAR-10.7B-v6/result_2024-07-19 07:00:34.json b/sygstat/XRAI-CB-SOLAR-10.7B-v6/result_2024-07-19 07:00:34.json new file mode 100644 index 0000000000000000000000000000000000000000..56233ce8310d3d1f10e493ae1c03090887dfdd20 --- /dev/null +++ b/sygstat/XRAI-CB-SOLAR-10.7B-v6/result_2024-07-19 07:00:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7235494880546075, + "acc_stderr": 0.013069662474252428, + "acc_norm": 0.7679180887372014, + "acc_norm_stderr": 0.012336718284948853 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6651065524795857, + "acc_stderr": 0.0047098866441571095, + "acc_norm": 0.7818163712407887, + "acc_norm_stderr": 0.004121686700238604 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6081871345029239, + "acc_stderr": 0.03743979825926398, + "acc_norm": 0.6081871345029239, + "acc_norm_stderr": 0.03743979825926398 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7184466019417476, + "acc_stderr": 0.04453254836326468, + "acc_norm": 0.7184466019417476, + "acc_norm_stderr": 0.04453254836326468 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6538952745849298, + "acc_stderr": 0.017011965266412077, + "acc_norm": 0.6538952745849298, + "acc_norm_stderr": 0.017011965266412077 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.02764814959975146, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.02764814959975146 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6412556053811659, + "acc_stderr": 0.03219079200419996, + "acc_norm": 0.6412556053811659, + "acc_norm_stderr": 0.03219079200419996 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6616161616161617, + "acc_stderr": 0.03371124142626303, + "acc_norm": 0.6616161616161617, + "acc_norm_stderr": 0.03371124142626303 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.031429466378837076, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.031429466378837076 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5794871794871795, + "acc_stderr": 0.025028610276710862, + "acc_norm": 0.5794871794871795, + "acc_norm_stderr": 0.025028610276710862 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.028040981380761536, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.028040981380761536 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.811965811965812, + "acc_stderr": 0.025598193686652244, + "acc_norm": 0.811965811965812, + "acc_norm_stderr": 0.025598193686652244 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5471698113207547, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.5471698113207547, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948496, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6616915422885572, + "acc_stderr": 0.03345563070339191, + "acc_norm": 0.6616915422885572, + "acc_norm_stderr": 0.03345563070339191 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36772486772486773, + "acc_stderr": 0.02483383982556242, + "acc_norm": 0.36772486772486773, + "acc_norm_stderr": 0.02483383982556242 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5347222222222222, + "acc_stderr": 0.04171115858181618, + "acc_norm": 0.5347222222222222, + "acc_norm_stderr": 0.04171115858181618 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5809248554913294, + "acc_stderr": 0.02656417811142262, + "acc_norm": 0.5809248554913294, + "acc_norm_stderr": 0.02656417811142262 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.588957055214724, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.588957055214724, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5679012345679012, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.5679012345679012, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7046632124352331, + "acc_stderr": 0.0329229663915514, + "acc_norm": 0.7046632124352331, + "acc_norm_stderr": 0.0329229663915514 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.045796394220704355, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.045796394220704355 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6678899082568808, + "acc_stderr": 0.020192682985423344, + "acc_norm": 0.6678899082568808, + "acc_norm_stderr": 0.020192682985423344 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.04343525428949096, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.04343525428949096 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5522875816993464, + "acc_stderr": 0.028472938478033526, + "acc_norm": 0.5522875816993464, + "acc_norm_stderr": 0.028472938478033526 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252609, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252609 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536671, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536671 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.04017901275981749, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.04017901275981749 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5375816993464052, + "acc_stderr": 0.020170614974969765, + "acc_norm": 0.5375816993464052, + "acc_norm_stderr": 0.020170614974969765 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.02949482760014437, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.02949482760014437 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.32513966480446926, + "acc_stderr": 0.01566654278505356, + "acc_norm": 0.32513966480446926, + "acc_norm_stderr": 0.01566654278505356 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5, + "acc_stderr": 0.030372836961539352, + "acc_norm": 0.5, + "acc_norm_stderr": 0.030372836961539352 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5877551020408164, + "acc_stderr": 0.031512360446742695, + "acc_norm": 0.5877551020408164, + "acc_norm_stderr": 0.031512360446742695 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6919831223628692, + "acc_stderr": 0.0300523893356057, + "acc_norm": 0.6919831223628692, + "acc_norm_stderr": 0.0300523893356057 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.43741851368970014, + "acc_stderr": 0.012669813464935715, + "acc_norm": 0.43741851368970014, + "acc_norm_stderr": 0.012669813464935715 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5637254901960784, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.5637254901960784, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.03843566993588717, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.03843566993588717 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7613219094247246, + "mc1_stderr": 0.014922629695456418, + "mc2": 0.8409676668671134, + "mc2_stderr": 0.01247737710206256 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5714285714285714, + "acc_stderr": 0.017014038119297487, + "acc_norm": 0.5974025974025974, + "acc_norm_stderr": 0.016861020486407773 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sygstat/XRAI-CB-SOLAR-10.7B-v6", + "model_sha": "3da7b3d0d2fd20fd81550477e2e9de36f21b0fcf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sygstat/XRAI-CB-SOLAR-10.7B/result_2024-07-15 06:21:52.json b/sygstat/XRAI-CB-SOLAR-10.7B/result_2024-07-15 06:21:52.json new file mode 100644 index 0000000000000000000000000000000000000000..3f69671b9368ea31665ad4f087106a6b1531202b --- /dev/null +++ b/sygstat/XRAI-CB-SOLAR-10.7B/result_2024-07-15 06:21:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6825938566552902, + "acc_stderr": 0.01360223908803817, + "acc_norm": 0.7303754266211604, + "acc_norm_stderr": 0.012968040686869154 + }, + "harness|ko_hellaswag|10": { + "acc": 0.49830711013742285, + "acc_stderr": 0.004989752811173414, + "acc_norm": 0.6674965146385182, + "acc_norm_stderr": 0.004701474865207017 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7485380116959064, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.7485380116959064, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7394636015325671, + "acc_stderr": 0.015696008563807123, + "acc_norm": 0.7394636015325671, + "acc_norm_stderr": 0.015696008563807123 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5829787234042553, + "acc_stderr": 0.032232762667117124, + "acc_norm": 0.5829787234042553, + "acc_norm_stderr": 0.032232762667117124 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6720257234726688, + "acc_stderr": 0.026664410886937613, + "acc_norm": 0.6720257234726688, + "acc_norm_stderr": 0.026664410886937613 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6860986547085202, + "acc_stderr": 0.031146796482972465, + "acc_norm": 0.6860986547085202, + "acc_norm_stderr": 0.031146796482972465 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7474747474747475, + "acc_stderr": 0.030954055470365907, + "acc_norm": 0.7474747474747475, + "acc_norm_stderr": 0.030954055470365907 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5862068965517241, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.5862068965517241, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.047551296160629475, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.047551296160629475 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6848739495798319, + "acc_stderr": 0.030176808288974337, + "acc_norm": 0.6848739495798319, + "acc_norm_stderr": 0.030176808288974337 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6641025641025641, + "acc_stderr": 0.023946724741563983, + "acc_norm": 0.6641025641025641, + "acc_norm_stderr": 0.023946724741563983 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.47783251231527096, + "acc_stderr": 0.03514528562175008, + "acc_norm": 0.47783251231527096, + "acc_norm_stderr": 0.03514528562175008 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.667741935483871, + "acc_stderr": 0.026795560848122794, + "acc_norm": 0.667741935483871, + "acc_norm_stderr": 0.026795560848122794 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8504273504273504, + "acc_stderr": 0.023365051491753715, + "acc_norm": 0.8504273504273504, + "acc_norm_stderr": 0.023365051491753715 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.630188679245283, + "acc_stderr": 0.029711421880107933, + "acc_norm": 0.630188679245283, + "acc_norm_stderr": 0.029711421880107933 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6818181818181818, + "acc_stderr": 0.044612721759105085, + "acc_norm": 0.6818181818181818, + "acc_norm_stderr": 0.044612721759105085 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7412935323383084, + "acc_stderr": 0.03096590312357303, + "acc_norm": 0.7412935323383084, + "acc_norm_stderr": 0.03096590312357303 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5606936416184971, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.5606936416184971, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4497354497354497, + "acc_stderr": 0.02562085704293665, + "acc_norm": 0.4497354497354497, + "acc_norm_stderr": 0.02562085704293665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6329479768786127, + "acc_stderr": 0.025950054337654085, + "acc_norm": 0.6329479768786127, + "acc_norm_stderr": 0.025950054337654085 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6380368098159509, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.6380368098159509, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6882716049382716, + "acc_stderr": 0.02577311116963046, + "acc_norm": 0.6882716049382716, + "acc_norm_stderr": 0.02577311116963046 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7823834196891192, + "acc_stderr": 0.029778663037752954, + "acc_norm": 0.7823834196891192, + "acc_norm_stderr": 0.029778663037752954 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.5, + "acc_stderr": 0.047036043419179864, + "acc_norm": 0.5, + "acc_norm_stderr": 0.047036043419179864 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7798165137614679, + "acc_stderr": 0.01776597865232758, + "acc_norm": 0.7798165137614679, + "acc_norm_stderr": 0.01776597865232758 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.5158730158730159, + "acc_stderr": 0.044698818540726076, + "acc_norm": 0.5158730158730159, + "acc_norm_stderr": 0.044698818540726076 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6535947712418301, + "acc_stderr": 0.027245613047215355, + "acc_norm": 0.6535947712418301, + "acc_norm_stderr": 0.027245613047215355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7933884297520661, + "acc_stderr": 0.03695980128098824, + "acc_norm": 0.7933884297520661, + "acc_norm_stderr": 0.03695980128098824 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6513157894736842, + "acc_stderr": 0.03878139888797611, + "acc_norm": 0.6513157894736842, + "acc_norm_stderr": 0.03878139888797611 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.6209150326797386, + "acc_stderr": 0.01962744474841224, + "acc_norm": 0.6209150326797386, + "acc_norm_stderr": 0.01962744474841224 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4716312056737589, + "acc_stderr": 0.029779450957303062, + "acc_norm": 0.4716312056737589, + "acc_norm_stderr": 0.029779450957303062 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5601851851851852, + "acc_stderr": 0.03385177976044811, + "acc_norm": 0.5601851851851852, + "acc_norm_stderr": 0.03385177976044811 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.376536312849162, + "acc_stderr": 0.016204672385106603, + "acc_norm": 0.376536312849162, + "acc_norm_stderr": 0.016204672385106603 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.03016191193076711, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.03016191193076711 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6938775510204082, + "acc_stderr": 0.029504896454595968, + "acc_norm": 0.6938775510204082, + "acc_norm_stderr": 0.029504896454595968 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7763713080168776, + "acc_stderr": 0.027123298205229966, + "acc_norm": 0.7763713080168776, + "acc_norm_stderr": 0.027123298205229966 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4602346805736636, + "acc_stderr": 0.012729785386598552, + "acc_norm": 0.4602346805736636, + "acc_norm_stderr": 0.012729785386598552 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6911764705882353, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.6911764705882353, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.703030303030303, + "acc_stderr": 0.03567969772268047, + "acc_norm": 0.703030303030303, + "acc_norm_stderr": 0.03567969772268047 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6756425948592412, + "mc1_stderr": 0.016387976779647942, + "mc2": 0.7671294843957964, + "mc2_stderr": 0.013753073872170713 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.525383707201889, + "acc_stderr": 0.017168187201429253, + "acc_norm": 0.5312868949232585, + "acc_norm_stderr": 0.01715666685978547 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sygstat/XRAI-CB-SOLAR-10.7B", + "model_sha": "5172dd130499d05c27eaeb5eac3de99540afb515", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/sygstat/XRAI-Inc.-SOLAR-10.7B-v1/result_2024-07-19 13:08:53.json b/sygstat/XRAI-Inc.-SOLAR-10.7B-v1/result_2024-07-19 13:08:53.json new file mode 100644 index 0000000000000000000000000000000000000000..8fec5a3b373eb2a14a6d55b69c15f6a62236d81a --- /dev/null +++ b/sygstat/XRAI-Inc.-SOLAR-10.7B-v1/result_2024-07-19 13:08:53.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6851535836177475, + "acc_stderr": 0.013572657703084948, + "acc_norm": 0.7261092150170648, + "acc_norm_stderr": 0.013032004972989503 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5420235012945628, + "acc_stderr": 0.004972126523031939, + "acc_norm": 0.699362676757618, + "acc_norm_stderr": 0.004575980763923574 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.045821241601615506, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.045821241601615506 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7011494252873564, + "acc_stderr": 0.016369256815093117, + "acc_norm": 0.7011494252873564, + "acc_norm_stderr": 0.016369256815093117 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936338, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936338 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6527331189710611, + "acc_stderr": 0.027040745502307333, + "acc_norm": 0.6527331189710611, + "acc_norm_stderr": 0.027040745502307333 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6457399103139013, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.6457399103139013, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6565656565656566, + "acc_stderr": 0.03383201223244442, + "acc_norm": 0.6565656565656566, + "acc_norm_stderr": 0.03383201223244442 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5793103448275863, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.5793103448275863, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.047840607041056527, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.047840607041056527 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6680672268907563, + "acc_stderr": 0.03058869701378364, + "acc_norm": 0.6680672268907563, + "acc_norm_stderr": 0.03058869701378364 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6128205128205129, + "acc_stderr": 0.02469721693087893, + "acc_norm": 0.6128205128205129, + "acc_norm_stderr": 0.02469721693087893 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301811, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301811 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.47783251231527096, + "acc_stderr": 0.035145285621750094, + "acc_norm": 0.47783251231527096, + "acc_norm_stderr": 0.035145285621750094 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6193548387096774, + "acc_stderr": 0.027621717832907036, + "acc_norm": 0.6193548387096774, + "acc_norm_stderr": 0.027621717832907036 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8461538461538461, + "acc_stderr": 0.02363687331748927, + "acc_norm": 0.8461538461538461, + "acc_norm_stderr": 0.02363687331748927 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5962264150943396, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.5962264150943396, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.0467375233367024, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.0467375233367024 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.41721854304635764, + "acc_stderr": 0.04026141497634612, + "acc_norm": 0.41721854304635764, + "acc_norm_stderr": 0.04026141497634612 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7014925373134329, + "acc_stderr": 0.03235743789355045, + "acc_norm": 0.7014925373134329, + "acc_norm_stderr": 0.03235743789355045 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.02519710107424648, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.02519710107424648 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6041666666666666, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.6041666666666666, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5924855491329479, + "acc_stderr": 0.026454578146931505, + "acc_norm": 0.5924855491329479, + "acc_norm_stderr": 0.026454578146931505 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6196319018404908, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.6196319018404908, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6049382716049383, + "acc_stderr": 0.027201117666925647, + "acc_norm": 0.6049382716049383, + "acc_norm_stderr": 0.027201117666925647 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.032210245080411544, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.032210245080411544 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7229357798165138, + "acc_stderr": 0.019188482590169538, + "acc_norm": 0.7229357798165138, + "acc_norm_stderr": 0.019188482590169538 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.028074158947600663, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.028074158947600663 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252609, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252609 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536671, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536671 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6644736842105263, + "acc_stderr": 0.03842498559395269, + "acc_norm": 0.6644736842105263, + "acc_norm_stderr": 0.03842498559395269 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5800653594771242, + "acc_stderr": 0.019966811178256473, + "acc_norm": 0.5800653594771242, + "acc_norm_stderr": 0.019966811178256473 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.46099290780141844, + "acc_stderr": 0.02973659252642444, + "acc_norm": 0.46099290780141844, + "acc_norm_stderr": 0.02973659252642444 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.014756906483260659, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.014756906483260659 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5367647058823529, + "acc_stderr": 0.030290619180485694, + "acc_norm": 0.5367647058823529, + "acc_norm_stderr": 0.030290619180485694 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6857142857142857, + "acc_stderr": 0.029719329422417458, + "acc_norm": 0.6857142857142857, + "acc_norm_stderr": 0.029719329422417458 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7172995780590717, + "acc_stderr": 0.029312814153955934, + "acc_norm": 0.7172995780590717, + "acc_norm_stderr": 0.029312814153955934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4517601043024772, + "acc_stderr": 0.012710662233660247, + "acc_norm": 0.4517601043024772, + "acc_norm_stderr": 0.012710662233660247 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.034924061041636124, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.034924061041636124 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03756335775187896, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03756335775187896 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6682986536107711, + "mc1_stderr": 0.016482148810241477, + "mc2": 0.7838899620143942, + "mc2_stderr": 0.013393342032711244 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5726092089728453, + "acc_stderr": 0.017008129844823156, + "acc_norm": 0.6044864226682408, + "acc_norm_stderr": 0.016810815902206042 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "sygstat/XRAI-Inc.-SOLAR-10.7B-v1", + "model_sha": "253353ce72a57dde19f577985ddcae68e78e5873", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/taeminlee/mistral_7B_ma/result_2023-10-19 06:42:26.json b/taeminlee/mistral_7B_ma/result_2023-10-19 06:42:26.json new file mode 100644 index 0000000000000000000000000000000000000000..f57b552d668317ee19822f6ca7fa7be56512d658 --- /dev/null +++ b/taeminlee/mistral_7B_ma/result_2023-10-19 06:42:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33532423208191126, + "acc_stderr": 0.01379618294778556, + "acc_norm": 0.38139931740614336, + "acc_norm_stderr": 0.01419438908668526 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3703445528779128, + "acc_stderr": 0.004819100456867818, + "acc_norm": 0.481876120294762, + "acc_norm_stderr": 0.004986502296931182 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468544, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468544 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.042561937679014075, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.042561937679014075 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.041443118108781506, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.041443118108781506 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253252, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253252 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2582781456953642, + "acc_stderr": 0.035737053147634576, + "acc_norm": 0.2582781456953642, + "acc_norm_stderr": 0.035737053147634576 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099521, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099521 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303118, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303118 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214338, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142628, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142628 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884124, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884124 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490435, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490435 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.01980828131744984, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.01980828131744984 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.03381200005643525, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.03381200005643525 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.34413407821229053, + "acc_stderr": 0.015889221313307094, + "acc_norm": 0.34413407821229053, + "acc_norm_stderr": 0.015889221313307094 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877743, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.563265306122449, + "acc_stderr": 0.03175195237583323, + "acc_norm": 0.563265306122449, + "acc_norm_stderr": 0.03175195237583323 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131774, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131774 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32790091264667537, + "acc_stderr": 0.011989936640666535, + "acc_norm": 0.32790091264667537, + "acc_norm_stderr": 0.011989936640666535 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.03488845451304974, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.03488845451304974 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.03888176921674099, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.03888176921674099 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.015826142439502342, + "mc2": 0.4613168911756529, + "mc2_stderr": 0.015417066073991514 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5076741440377804, + "acc_stderr": 0.017188329219654273, + "acc_norm": 0.5678866587957497, + "acc_norm_stderr": 0.017031170198851742 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "taeminlee/mistral_7B_ma", + "model_sha": "9773826bd9bb297186b78c87a410cbb07e1919cc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/taeminlee/polyglot_12.8b_ins_orcastyle/result_2023-10-10 08:18:37.json b/taeminlee/polyglot_12.8b_ins_orcastyle/result_2023-10-10 08:18:37.json new file mode 100644 index 0000000000000000000000000000000000000000..8ddb6a07697bbc98aa23c36b089a4c1cdcd2ef67 --- /dev/null +++ b/taeminlee/polyglot_12.8b_ins_orcastyle/result_2023-10-10 08:18:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.27047781569965873, + "acc_stderr": 0.012980954547659556, + "acc_norm": 0.32849829351535836, + "acc_norm_stderr": 0.01372497846553737 + }, + "harness|ko_hellaswag|10": { + "acc": 0.386476797450707, + "acc_stderr": 0.004859467984155263, + "acc_norm": 0.4980083648675563, + "acc_norm_stderr": 0.004989741826250384 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.035650796707083106, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.035650796707083106 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1650485436893204, + "acc_stderr": 0.036756688322331886, + "acc_norm": 0.1650485436893204, + "acc_norm_stderr": 0.036756688322331886 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.27330779054916987, + "acc_stderr": 0.015936681062628556, + "acc_norm": 0.27330779054916987, + "acc_norm_stderr": 0.015936681062628556 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617722, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617722 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2297872340425532, + "acc_stderr": 0.027501752944412424, + "acc_norm": 0.2297872340425532, + "acc_norm_stderr": 0.027501752944412424 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.032082844503563655, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.032082844503563655 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3054662379421222, + "acc_stderr": 0.026160584450140474, + "acc_norm": 0.3054662379421222, + "acc_norm_stderr": 0.026160584450140474 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.19282511210762332, + "acc_stderr": 0.02647824096048936, + "acc_norm": 0.19282511210762332, + "acc_norm_stderr": 0.02647824096048936 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.20610687022900764, + "acc_stderr": 0.03547771004159465, + "acc_norm": 0.20610687022900764, + "acc_norm_stderr": 0.03547771004159465 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23737373737373738, + "acc_stderr": 0.03031371053819887, + "acc_norm": 0.23737373737373738, + "acc_norm_stderr": 0.03031371053819887 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727771, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727771 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.02702543349888239, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.02702543349888239 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.22564102564102564, + "acc_stderr": 0.02119363252514854, + "acc_norm": 0.22564102564102564, + "acc_norm_stderr": 0.02119363252514854 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04330043749650742, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04330043749650742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2315270935960591, + "acc_stderr": 0.029678333141444455, + "acc_norm": 0.2315270935960591, + "acc_norm_stderr": 0.029678333141444455 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.22580645161290322, + "acc_stderr": 0.023785577884181012, + "acc_norm": 0.22580645161290322, + "acc_norm_stderr": 0.023785577884181012 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.3076923076923077, + "acc_stderr": 0.030236389942173092, + "acc_norm": 0.3076923076923077, + "acc_norm_stderr": 0.030236389942173092 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24150943396226415, + "acc_stderr": 0.02634148037111835, + "acc_norm": 0.24150943396226415, + "acc_norm_stderr": 0.02634148037111835 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.04013964554072774, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.04013964554072774 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.027309140588230193, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.027309140588230193 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.03336767086567977, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.03336767086567977 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.23699421965317918, + "acc_stderr": 0.03242414757483099, + "acc_norm": 0.23699421965317918, + "acc_norm_stderr": 0.03242414757483099 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.21164021164021163, + "acc_stderr": 0.021037331505262883, + "acc_norm": 0.21164021164021163, + "acc_norm_stderr": 0.021037331505262883 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.023083658586984204, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.023083658586984204 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.3117283950617284, + "acc_stderr": 0.02577311116963043, + "acc_norm": 0.3117283950617284, + "acc_norm_stderr": 0.02577311116963043 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.21761658031088082, + "acc_stderr": 0.029778663037752975, + "acc_norm": 0.21761658031088082, + "acc_norm_stderr": 0.029778663037752975 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813344, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813344 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3137614678899083, + "acc_stderr": 0.019894723341469134, + "acc_norm": 0.3137614678899083, + "acc_norm_stderr": 0.019894723341469134 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03718489006818115, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03718489006818115 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24836601307189543, + "acc_stderr": 0.02473998135511359, + "acc_norm": 0.24836601307189543, + "acc_norm_stderr": 0.02473998135511359 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.36363636363636365, + "acc_stderr": 0.04391326286724071, + "acc_norm": 0.36363636363636365, + "acc_norm_stderr": 0.04391326286724071 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03459777606810536, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03459777606810536 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.24673202614379086, + "acc_stderr": 0.0174408203674025, + "acc_norm": 0.24673202614379086, + "acc_norm_stderr": 0.0174408203674025 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2198581560283688, + "acc_stderr": 0.024706141070705488, + "acc_norm": 0.2198581560283688, + "acc_norm_stderr": 0.024706141070705488 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.03952301967702511, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.03952301967702511 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257013, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816505, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816505 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.027033041151681453, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.027033041151681453 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.23265306122448978, + "acc_stderr": 0.02704925791589618, + "acc_norm": 0.23265306122448978, + "acc_norm_stderr": 0.02704925791589618 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29535864978902954, + "acc_stderr": 0.029696338713422882, + "acc_norm": 0.29535864978902954, + "acc_norm_stderr": 0.029696338713422882 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2653194263363755, + "acc_stderr": 0.011276198843958866, + "acc_norm": 0.2653194263363755, + "acc_norm_stderr": 0.011276198843958866 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693257, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693257 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2606060606060606, + "acc_stderr": 0.034277431758165236, + "acc_norm": 0.2606060606060606, + "acc_norm_stderr": 0.034277431758165236 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476189, + "mc2": 0.42136905448237544, + "mc2_stderr": 0.014761634772803508 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3317591499409681, + "acc_stderr": 0.016187984642157312, + "acc_norm": 0.4049586776859504, + "acc_norm_stderr": 0.016876941165045612 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "taeminlee/polyglot_12.8b_ins_orcastyle", + "model_sha": "eed242f438b6ee3860a810454126f468373836b2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/taeminlee/polyglot_12.8b_ins_orcastyle_ma/result_2023-10-16 09:17:56.json b/taeminlee/polyglot_12.8b_ins_orcastyle_ma/result_2023-10-16 09:17:56.json new file mode 100644 index 0000000000000000000000000000000000000000..1d3bed86889c4beeadff924ca50d33bd533ec8ef --- /dev/null +++ b/taeminlee/polyglot_12.8b_ins_orcastyle_ma/result_2023-10-16 09:17:56.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2636518771331058, + "acc_stderr": 0.012875929151297073, + "acc_norm": 0.31399317406143346, + "acc_norm_stderr": 0.013562691224726284 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3792073292172874, + "acc_stderr": 0.0048419819735152775, + "acc_norm": 0.48775144393547104, + "acc_norm_stderr": 0.004988283981631052 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.031267817146631786, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.031267817146631786 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.18446601941747573, + "acc_stderr": 0.03840423627288276, + "acc_norm": 0.18446601941747573, + "acc_norm_stderr": 0.03840423627288276 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2554278416347382, + "acc_stderr": 0.015594955384455765, + "acc_norm": 0.2554278416347382, + "acc_norm_stderr": 0.015594955384455765 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.037857144650666544, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.037857144650666544 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.25957446808510637, + "acc_stderr": 0.028659179374292326, + "acc_norm": 0.25957446808510637, + "acc_norm_stderr": 0.028659179374292326 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2469879518072289, + "acc_stderr": 0.03357351982064536, + "acc_norm": 0.2469879518072289, + "acc_norm_stderr": 0.03357351982064536 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.31189710610932475, + "acc_stderr": 0.02631185807185416, + "acc_norm": 0.31189710610932475, + "acc_norm_stderr": 0.02631185807185416 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.29596412556053814, + "acc_stderr": 0.03063659134869981, + "acc_norm": 0.29596412556053814, + "acc_norm_stderr": 0.03063659134869981 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.040393149787245626, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.040393149787245626 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3181818181818182, + "acc_stderr": 0.03318477333845331, + "acc_norm": 0.3181818181818182, + "acc_norm_stderr": 0.03318477333845331 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.036001056927277716, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.036001056927277716 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.33613445378151263, + "acc_stderr": 0.03068473711513535, + "acc_norm": 0.33613445378151263, + "acc_norm_stderr": 0.03068473711513535 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3128205128205128, + "acc_stderr": 0.023507579020645347, + "acc_norm": 0.3128205128205128, + "acc_norm_stderr": 0.023507579020645347 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.26851851851851855, + "acc_stderr": 0.04284467968052191, + "acc_norm": 0.26851851851851855, + "acc_norm_stderr": 0.04284467968052191 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.28078817733990147, + "acc_stderr": 0.0316185633535861, + "acc_norm": 0.28078817733990147, + "acc_norm_stderr": 0.0316185633535861 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3064516129032258, + "acc_stderr": 0.026226485652553873, + "acc_norm": 0.3064516129032258, + "acc_norm_stderr": 0.026226485652553873 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.026749899771241238, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.026749899771241238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.23636363636363636, + "acc_stderr": 0.04069306319721377, + "acc_norm": 0.23636363636363636, + "acc_norm_stderr": 0.04069306319721377 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073838, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073838 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658754, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658754 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.263681592039801, + "acc_stderr": 0.031157150869355554, + "acc_norm": 0.263681592039801, + "acc_norm_stderr": 0.031157150869355554 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2947976878612717, + "acc_stderr": 0.03476599607516478, + "acc_norm": 0.2947976878612717, + "acc_norm_stderr": 0.03476599607516478 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.23544973544973544, + "acc_stderr": 0.021851509822031715, + "acc_norm": 0.23544973544973544, + "acc_norm_stderr": 0.021851509822031715 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548574, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548574 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2630057803468208, + "acc_stderr": 0.023703099525258176, + "acc_norm": 0.2630057803468208, + "acc_norm_stderr": 0.023703099525258176 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25153374233128833, + "acc_stderr": 0.03408997886857529, + "acc_norm": 0.25153374233128833, + "acc_norm_stderr": 0.03408997886857529 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.024477222856135114, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.024477222856135114 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.32642487046632124, + "acc_stderr": 0.033840286211432945, + "acc_norm": 0.32642487046632124, + "acc_norm_stderr": 0.033840286211432945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3284403669724771, + "acc_stderr": 0.020135902797298395, + "acc_norm": 0.3284403669724771, + "acc_norm_stderr": 0.020135902797298395 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276863, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276863 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.02582916327275748, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.02582916327275748 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.23140495867768596, + "acc_stderr": 0.03849856098794088, + "acc_norm": 0.23140495867768596, + "acc_norm_stderr": 0.03849856098794088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.26973684210526316, + "acc_stderr": 0.03611780560284898, + "acc_norm": 0.26973684210526316, + "acc_norm_stderr": 0.03611780560284898 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.23366013071895425, + "acc_stderr": 0.017119158496044506, + "acc_norm": 0.23366013071895425, + "acc_norm_stderr": 0.017119158496044506 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.02564555362226673, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.02564555362226673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.19642857142857142, + "acc_stderr": 0.03770970049347018, + "acc_norm": 0.19642857142857142, + "acc_norm_stderr": 0.03770970049347018 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27150837988826815, + "acc_stderr": 0.014874252168095278, + "acc_norm": 0.27150837988826815, + "acc_norm_stderr": 0.014874252168095278 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.37551020408163266, + "acc_stderr": 0.031001209039894836, + "acc_norm": 0.37551020408163266, + "acc_norm_stderr": 0.031001209039894836 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460295, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460295 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24771838331160365, + "acc_stderr": 0.011025499291443737, + "acc_norm": 0.24771838331160365, + "acc_norm_stderr": 0.011025499291443737 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.0347769116216366, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.0347769116216366 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.01548369193923726, + "mc2": 0.4409725050511923, + "mc2_stderr": 0.014977060866131325 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3282172373081464, + "acc_stderr": 0.016143955036184442, + "acc_norm": 0.3919716646989374, + "acc_norm_stderr": 0.016784332119424074 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "taeminlee/polyglot_12.8b_ins_orcastyle_ma", + "model_sha": "e59d2d5beba5386564f914402663e3d530a1b093", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tanitan2/SOLAR-10.7B-Instruct-SD_SOLAR_10.7B-slerp/result_2024-07-16 09:35:44.json b/tanitan2/SOLAR-10.7B-Instruct-SD_SOLAR_10.7B-slerp/result_2024-07-16 09:35:44.json new file mode 100644 index 0000000000000000000000000000000000000000..1b675c94c25a429252194d8ab534b1b057e150f7 --- /dev/null +++ b/tanitan2/SOLAR-10.7B-Instruct-SD_SOLAR_10.7B-slerp/result_2024-07-16 09:35:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.560580204778157, + "acc_stderr": 0.014503747823580122, + "acc_norm": 0.621160409556314, + "acc_norm_stderr": 0.01417591549000032 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4493128858793069, + "acc_stderr": 0.004964075870120347, + "acc_norm": 0.5962955586536547, + "acc_norm_stderr": 0.004896368185765238 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6549707602339181, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.6549707602339181, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.04541609446503948, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.04541609446503948 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6807151979565773, + "acc_stderr": 0.016671261749538743, + "acc_norm": 0.6807151979565773, + "acc_norm_stderr": 0.016671261749538743 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5404255319148936, + "acc_stderr": 0.03257901482099834, + "acc_norm": 0.5404255319148936, + "acc_norm_stderr": 0.03257901482099834 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.038922121953330446, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.038922121953330446 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6205787781350482, + "acc_stderr": 0.02755994980234782, + "acc_norm": 0.6205787781350482, + "acc_norm_stderr": 0.02755994980234782 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5874439461883408, + "acc_stderr": 0.03304062175449297, + "acc_norm": 0.5874439461883408, + "acc_norm_stderr": 0.03304062175449297 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5877862595419847, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.5877862595419847, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7727272727272727, + "acc_stderr": 0.02985751567338641, + "acc_norm": 0.7727272727272727, + "acc_norm_stderr": 0.02985751567338641 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5379310344827586, + "acc_stderr": 0.04154659671707548, + "acc_norm": 0.5379310344827586, + "acc_norm_stderr": 0.04154659671707548 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6386554621848739, + "acc_stderr": 0.031204691225150023, + "acc_norm": 0.6386554621848739, + "acc_norm_stderr": 0.031204691225150023 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6256410256410256, + "acc_stderr": 0.0245375915728305, + "acc_norm": 0.6256410256410256, + "acc_norm_stderr": 0.0245375915728305 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4729064039408867, + "acc_stderr": 0.03512819077876106, + "acc_norm": 0.4729064039408867, + "acc_norm_stderr": 0.03512819077876106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6451612903225806, + "acc_stderr": 0.02721888977330876, + "acc_norm": 0.6451612903225806, + "acc_norm_stderr": 0.02721888977330876 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8076923076923077, + "acc_stderr": 0.0258192332564837, + "acc_norm": 0.8076923076923077, + "acc_norm_stderr": 0.0258192332564837 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6339622641509434, + "acc_stderr": 0.02964781353936525, + "acc_norm": 0.6339622641509434, + "acc_norm_stderr": 0.02964781353936525 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.04582004841505417, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.04582004841505417 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02911661760608301, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02911661760608301 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.746268656716418, + "acc_stderr": 0.030769444967296014, + "acc_norm": 0.746268656716418, + "acc_norm_stderr": 0.030769444967296014 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.43915343915343913, + "acc_stderr": 0.025559920550531006, + "acc_norm": 0.43915343915343913, + "acc_norm_stderr": 0.025559920550531006 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5625, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.5625, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5924855491329479, + "acc_stderr": 0.026454578146931505, + "acc_norm": 0.5924855491329479, + "acc_norm_stderr": 0.026454578146931505 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6503067484662577, + "acc_stderr": 0.03746668325470022, + "acc_norm": 0.6503067484662577, + "acc_norm_stderr": 0.03746668325470022 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6419753086419753, + "acc_stderr": 0.0266756119260371, + "acc_norm": 0.6419753086419753, + "acc_norm_stderr": 0.0266756119260371 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7461139896373057, + "acc_stderr": 0.03141024780565318, + "acc_norm": 0.7461139896373057, + "acc_norm_stderr": 0.03141024780565318 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04644602091222317, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04644602091222317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7431192660550459, + "acc_stderr": 0.018732492928342455, + "acc_norm": 0.7431192660550459, + "acc_norm_stderr": 0.018732492928342455 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.48412698412698413, + "acc_stderr": 0.04469881854072606, + "acc_norm": 0.48412698412698413, + "acc_norm_stderr": 0.04469881854072606 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.02807415894760066, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.02807415894760066 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7851239669421488, + "acc_stderr": 0.037494924487096966, + "acc_norm": 0.7851239669421488, + "acc_norm_stderr": 0.037494924487096966 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6447368421052632, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.6447368421052632, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.545751633986928, + "acc_stderr": 0.02014297455379521, + "acc_norm": 0.545751633986928, + "acc_norm_stderr": 0.02014297455379521 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4432624113475177, + "acc_stderr": 0.029634838473766002, + "acc_norm": 0.4432624113475177, + "acc_norm_stderr": 0.029634838473766002 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03388857118502326, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03388857118502326 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.36983240223463687, + "acc_stderr": 0.016145881256056212, + "acc_norm": 0.36983240223463687, + "acc_norm_stderr": 0.016145881256056212 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5183823529411765, + "acc_stderr": 0.03035230339535196, + "acc_norm": 0.5183823529411765, + "acc_norm_stderr": 0.03035230339535196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6816326530612244, + "acc_stderr": 0.029822533793982045, + "acc_norm": 0.6816326530612244, + "acc_norm_stderr": 0.029822533793982045 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7341772151898734, + "acc_stderr": 0.02875679962965834, + "acc_norm": 0.7341772151898734, + "acc_norm_stderr": 0.02875679962965834 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4517601043024772, + "acc_stderr": 0.012710662233660245, + "acc_norm": 0.4517601043024772, + "acc_norm_stderr": 0.012710662233660245 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6911764705882353, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.6911764705882353, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.703030303030303, + "acc_stderr": 0.035679697722680474, + "acc_norm": 0.703030303030303, + "acc_norm_stderr": 0.035679697722680474 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5605875152998776, + "mc1_stderr": 0.017374520482513704, + "mc2": 0.6962733312577075, + "mc2_stderr": 0.015321023643422008 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4887839433293979, + "acc_stderr": 0.017186028469489287, + "acc_norm": 0.500590318772137, + "acc_norm_stderr": 0.017190342123448586 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tanitan2/SOLAR-10.7B-Instruct-SD_SOLAR_10.7B-slerp", + "model_sha": "6782ceddf108dc183a273a9f052a681e5fad8d8b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tanitan2/SOLAR-10.7B-Instruct-STOCK_SOLAR-linear/result_2024-07-19 00:20:37.json b/tanitan2/SOLAR-10.7B-Instruct-STOCK_SOLAR-linear/result_2024-07-19 00:20:37.json new file mode 100644 index 0000000000000000000000000000000000000000..326a64c4926ad65105fae6810672a47270799a2a --- /dev/null +++ b/tanitan2/SOLAR-10.7B-Instruct-STOCK_SOLAR-linear/result_2024-07-19 00:20:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6100682593856656, + "acc_stderr": 0.014252959848892877, + "acc_norm": 0.6638225255972696, + "acc_norm_stderr": 0.013804855026205756 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4671380203146783, + "acc_stderr": 0.004978992721242828, + "acc_norm": 0.6303525194184425, + "acc_norm_stderr": 0.00481722729224028 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.695906432748538, + "acc_stderr": 0.03528211258245232, + "acc_norm": 0.695906432748538, + "acc_norm_stderr": 0.03528211258245232 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.0458212416016155, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.0458212416016155 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7088122605363985, + "acc_stderr": 0.0162460870697014, + "acc_norm": 0.7088122605363985, + "acc_norm_stderr": 0.0162460870697014 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5446808510638298, + "acc_stderr": 0.03255525359340356, + "acc_norm": 0.5446808510638298, + "acc_norm_stderr": 0.03255525359340356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.038913644958358175, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.038913644958358175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6591639871382636, + "acc_stderr": 0.026920841260776162, + "acc_norm": 0.6591639871382636, + "acc_norm_stderr": 0.026920841260776162 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6098654708520179, + "acc_stderr": 0.03273766725459156, + "acc_norm": 0.6098654708520179, + "acc_norm_stderr": 0.03273766725459156 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6717557251908397, + "acc_stderr": 0.04118438565806298, + "acc_norm": 0.6717557251908397, + "acc_norm_stderr": 0.04118438565806298 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7525252525252525, + "acc_stderr": 0.030746300742124498, + "acc_norm": 0.7525252525252525, + "acc_norm_stderr": 0.030746300742124498 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6932773109243697, + "acc_stderr": 0.02995382389188704, + "acc_norm": 0.6932773109243697, + "acc_norm_stderr": 0.02995382389188704 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6384615384615384, + "acc_stderr": 0.024359581465397007, + "acc_norm": 0.6384615384615384, + "acc_norm_stderr": 0.024359581465397007 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6612903225806451, + "acc_stderr": 0.026923446059302827, + "acc_norm": 0.6612903225806451, + "acc_norm_stderr": 0.026923446059302827 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.811965811965812, + "acc_stderr": 0.02559819368665226, + "acc_norm": 0.811965811965812, + "acc_norm_stderr": 0.02559819368665226 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5849056603773585, + "acc_stderr": 0.03032594578928611, + "acc_norm": 0.5849056603773585, + "acc_norm_stderr": 0.03032594578928611 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4417989417989418, + "acc_stderr": 0.025576257061253837, + "acc_norm": 0.4417989417989418, + "acc_norm_stderr": 0.025576257061253837 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5486111111111112, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.5486111111111112, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.615606936416185, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.615606936416185, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6441717791411042, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.6441717791411042, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.026229649178821163, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.026229649178821163 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7668393782383419, + "acc_stderr": 0.03051611137147601, + "acc_norm": 0.7668393782383419, + "acc_norm_stderr": 0.03051611137147601 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7596330275229358, + "acc_stderr": 0.01832060732096407, + "acc_norm": 0.7596330275229358, + "acc_norm_stderr": 0.01832060732096407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.5158730158730159, + "acc_stderr": 0.044698818540726076, + "acc_norm": 0.5158730158730159, + "acc_norm_stderr": 0.044698818540726076 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6241830065359477, + "acc_stderr": 0.027732834353363944, + "acc_norm": 0.6241830065359477, + "acc_norm_stderr": 0.027732834353363944 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7851239669421488, + "acc_stderr": 0.037494924487096966, + "acc_norm": 0.7851239669421488, + "acc_norm_stderr": 0.037494924487096966 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.020087362076702853, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.020087362076702853 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.029658235097666904, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.029658235097666904 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03362277436608044, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03362277436608044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3564245810055866, + "acc_stderr": 0.016018239710513412, + "acc_norm": 0.3564245810055866, + "acc_norm_stderr": 0.016018239710513412 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686781, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686781 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5845588235294118, + "acc_stderr": 0.029935342707877753, + "acc_norm": 0.5845588235294118, + "acc_norm_stderr": 0.029935342707877753 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.7142857142857143, + "acc_stderr": 0.028920583220675578, + "acc_norm": 0.7142857142857143, + "acc_norm_stderr": 0.028920583220675578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7426160337552743, + "acc_stderr": 0.02845882099146031, + "acc_norm": 0.7426160337552743, + "acc_norm_stderr": 0.02845882099146031 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4439374185136897, + "acc_stderr": 0.01268970816778768, + "acc_norm": 0.4439374185136897, + "acc_norm_stderr": 0.01268970816778768 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.03283472056108561, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.03283472056108561 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5446756425948592, + "mc1_stderr": 0.017433490102538755, + "mc2": 0.6818432649761036, + "mc2_stderr": 0.015042979873824917 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.512396694214876, + "acc_stderr": 0.01718506973267653, + "acc_norm": 0.5171192443919717, + "acc_norm_stderr": 0.01718027524608563 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tanitan2/SOLAR-10.7B-Instruct-STOCK_SOLAR-linear", + "model_sha": "c9af3ad47f430bd171eaef0f4f55fd883efaeb56", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tanitan2/SOLAR-10.7B-Instruct-STOCK_SOLAR-passthrough/result_2024-07-12 07:58:20.json b/tanitan2/SOLAR-10.7B-Instruct-STOCK_SOLAR-passthrough/result_2024-07-12 07:58:20.json new file mode 100644 index 0000000000000000000000000000000000000000..bab236f746f5aa162fdab2c290a68909f0b30200 --- /dev/null +++ b/tanitan2/SOLAR-10.7B-Instruct-STOCK_SOLAR-passthrough/result_2024-07-12 07:58:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6953924914675768, + "acc_stderr": 0.013449522109932492, + "acc_norm": 0.7397610921501706, + "acc_norm_stderr": 0.01282193022511256 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5977892850029874, + "acc_stderr": 0.0048934189299182675, + "acc_norm": 0.723859788886676, + "acc_norm_stderr": 0.004461732908157684 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.03815827365913238, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.03815827365913238 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6385696040868455, + "acc_stderr": 0.017179601328900732, + "acc_norm": 0.6385696040868455, + "acc_norm_stderr": 0.017179601328900732 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5755627009646302, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.5755627009646302, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.57847533632287, + "acc_stderr": 0.03314190222110658, + "acc_norm": 0.57847533632287, + "acc_norm_stderr": 0.03314190222110658 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553893, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553893 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5672268907563025, + "acc_stderr": 0.032183581077426124, + "acc_norm": 0.5672268907563025, + "acc_norm_stderr": 0.032183581077426124 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5564102564102564, + "acc_stderr": 0.025189149894764208, + "acc_norm": 0.5564102564102564, + "acc_norm_stderr": 0.025189149894764208 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.04557239513497752, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.04557239513497752 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5129032258064516, + "acc_stderr": 0.02843453315268186, + "acc_norm": 0.5129032258064516, + "acc_norm_stderr": 0.02843453315268186 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.02777883590493544, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.02777883590493544 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.029318203645206865, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.029318203645206865 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.037940126746970296, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.037940126746970296 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.025487187147859372, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.025487187147859372 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.04166666666666665, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.04166666666666665 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6242774566473989, + "acc_stderr": 0.02607431485165708, + "acc_norm": 0.6242774566473989, + "acc_norm_stderr": 0.02607431485165708 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6012269938650306, + "acc_stderr": 0.038470214204560246, + "acc_norm": 0.6012269938650306, + "acc_norm_stderr": 0.038470214204560246 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5493827160493827, + "acc_stderr": 0.027684721415656192, + "acc_norm": 0.5493827160493827, + "acc_norm_stderr": 0.027684721415656192 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7098445595854922, + "acc_stderr": 0.032752644677915145, + "acc_norm": 0.7098445595854922, + "acc_norm_stderr": 0.032752644677915145 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.636697247706422, + "acc_stderr": 0.020620603919625804, + "acc_norm": 0.636697247706422, + "acc_norm_stderr": 0.020620603919625804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.04435932892851466, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.04435932892851466 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5424836601307189, + "acc_stderr": 0.02852638345214264, + "acc_norm": 0.5424836601307189, + "acc_norm_stderr": 0.02852638345214264 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.041733491480834994, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.041733491480834994 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5394736842105263, + "acc_stderr": 0.04056242252249034, + "acc_norm": 0.5394736842105263, + "acc_norm_stderr": 0.04056242252249034 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.020200164564804588, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.020200164564804588 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.028893955412115892, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.028893955412115892 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5, + "acc_stderr": 0.034099716973523674, + "acc_norm": 0.5, + "acc_norm_stderr": 0.034099716973523674 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.34413407821229053, + "acc_stderr": 0.015889221313307094, + "acc_norm": 0.34413407821229053, + "acc_norm_stderr": 0.015889221313307094 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.48161764705882354, + "acc_stderr": 0.030352303395351964, + "acc_norm": 0.48161764705882354, + "acc_norm_stderr": 0.030352303395351964 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6448979591836734, + "acc_stderr": 0.030635655150387634, + "acc_norm": 0.6448979591836734, + "acc_norm_stderr": 0.030635655150387634 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.031219569445301836, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.031219569445301836 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4178617992177314, + "acc_stderr": 0.012596744108998567, + "acc_norm": 0.4178617992177314, + "acc_norm_stderr": 0.012596744108998567 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03460228327239172, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03460228327239172 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.038435669935887186, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.038435669935887186 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6499388004895961, + "mc1_stderr": 0.016697949420151025, + "mc2": 0.725221467189836, + "mc2_stderr": 0.015198414981468457 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43919716646989376, + "acc_stderr": 0.017062775744780698, + "acc_norm": 0.44510035419126326, + "acc_norm_stderr": 0.01708641743100547 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tanitan2/SOLAR-10.7B-Instruct-STOCK_SOLAR-passthrough", + "model_sha": "87bd13515d10dfab8f88dec3536b5263ec5c9099", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tanitan2/SOLAR-10.7B-Instruct-STOCK_SOLAR-slerp/result_2024-07-16 09:36:37.json b/tanitan2/SOLAR-10.7B-Instruct-STOCK_SOLAR-slerp/result_2024-07-16 09:36:37.json new file mode 100644 index 0000000000000000000000000000000000000000..c3dcc03e2dd89b619f3a892274b4163c5e8ceaa6 --- /dev/null +++ b/tanitan2/SOLAR-10.7B-Instruct-STOCK_SOLAR-slerp/result_2024-07-16 09:36:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5614334470989761, + "acc_stderr": 0.01450068261821286, + "acc_norm": 0.6194539249146758, + "acc_norm_stderr": 0.014188277712349822 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45309699263095, + "acc_stderr": 0.004967778940011945, + "acc_norm": 0.6032662816172077, + "acc_norm_stderr": 0.004882200364432369 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.685823754789272, + "acc_stderr": 0.01659929173588493, + "acc_norm": 0.685823754789272, + "acc_norm_stderr": 0.01659929173588493 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5276595744680851, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.5276595744680851, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5240963855421686, + "acc_stderr": 0.038879718495972646, + "acc_norm": 0.5240963855421686, + "acc_norm_stderr": 0.038879718495972646 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.02736807824397163, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.02736807824397163 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5695067264573991, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.5695067264573991, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7878787878787878, + "acc_stderr": 0.029126522834586818, + "acc_norm": 0.7878787878787878, + "acc_norm_stderr": 0.029126522834586818 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6596638655462185, + "acc_stderr": 0.030778057422931673, + "acc_norm": 0.6596638655462185, + "acc_norm_stderr": 0.030778057422931673 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6461538461538462, + "acc_stderr": 0.024243783994062146, + "acc_norm": 0.6461538461538462, + "acc_norm_stderr": 0.024243783994062146 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145631, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145631 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301811, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301811 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6516129032258065, + "acc_stderr": 0.027104826328100944, + "acc_norm": 0.6516129032258065, + "acc_norm_stderr": 0.027104826328100944 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.025372139671722933, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.025372139671722933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6377358490566037, + "acc_stderr": 0.029582245128384303, + "acc_norm": 0.6377358490566037, + "acc_norm_stderr": 0.029582245128384303 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3814814814814815, + "acc_stderr": 0.02961671892749759, + "acc_norm": 0.3814814814814815, + "acc_norm_stderr": 0.02961671892749759 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242742, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242742 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4312169312169312, + "acc_stderr": 0.025506481698138208, + "acc_norm": 0.4312169312169312, + "acc_norm_stderr": 0.025506481698138208 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5763888888888888, + "acc_stderr": 0.04132125019723369, + "acc_norm": 0.5763888888888888, + "acc_norm_stderr": 0.04132125019723369 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6040462427745664, + "acc_stderr": 0.02632981334194624, + "acc_norm": 0.6040462427745664, + "acc_norm_stderr": 0.02632981334194624 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6380368098159509, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.6380368098159509, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6450617283950617, + "acc_stderr": 0.02662415247884585, + "acc_norm": 0.6450617283950617, + "acc_norm_stderr": 0.02662415247884585 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.032210245080411544, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.032210245080411544 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.744954128440367, + "acc_stderr": 0.01868850085653585, + "acc_norm": 0.744954128440367, + "acc_norm_stderr": 0.01868850085653585 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.49206349206349204, + "acc_stderr": 0.044715725362943486, + "acc_norm": 0.49206349206349204, + "acc_norm_stderr": 0.044715725362943486 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6045751633986928, + "acc_stderr": 0.02799672318063145, + "acc_norm": 0.6045751633986928, + "acc_norm_stderr": 0.02799672318063145 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7851239669421488, + "acc_stderr": 0.03749492448709696, + "acc_norm": 0.7851239669421488, + "acc_norm_stderr": 0.03749492448709696 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6447368421052632, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.6447368421052632, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5359477124183006, + "acc_stderr": 0.020175488765484056, + "acc_norm": 0.5359477124183006, + "acc_norm_stderr": 0.020175488765484056 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41843971631205673, + "acc_stderr": 0.02942799403941999, + "acc_norm": 0.41843971631205673, + "acc_norm_stderr": 0.02942799403941999 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5601851851851852, + "acc_stderr": 0.03385177976044812, + "acc_norm": 0.5601851851851852, + "acc_norm_stderr": 0.03385177976044812 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.38324022346368714, + "acc_stderr": 0.016260159604429128, + "acc_norm": 0.38324022346368714, + "acc_norm_stderr": 0.016260159604429128 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768081, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768081 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5404411764705882, + "acc_stderr": 0.03027332507734576, + "acc_norm": 0.5404411764705882, + "acc_norm_stderr": 0.03027332507734576 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6979591836734694, + "acc_stderr": 0.029393609319879797, + "acc_norm": 0.6979591836734694, + "acc_norm_stderr": 0.029393609319879797 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.729957805907173, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.729957805907173, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44589308996088656, + "acc_stderr": 0.01269524471137978, + "acc_norm": 0.44589308996088656, + "acc_norm_stderr": 0.01269524471137978 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.0332057461294543, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.0332057461294543 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5703794369645043, + "mc1_stderr": 0.017329234580409084, + "mc2": 0.6999323570775174, + "mc2_stderr": 0.015305951714511889 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4887839433293979, + "acc_stderr": 0.017186028469489287, + "acc_norm": 0.5088547815820543, + "acc_norm_stderr": 0.017187658199336743 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tanitan2/SOLAR-10.7B-Instruct-STOCK_SOLAR-slerp", + "model_sha": "e26ecb95a424049979fdc8152c43ba5bf43786c5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tanitan2/SOLAR-10.7B-Instruct-STOCK_SOLAR-ties/result_2024-07-12 07:59:55.json b/tanitan2/SOLAR-10.7B-Instruct-STOCK_SOLAR-ties/result_2024-07-12 07:59:55.json new file mode 100644 index 0000000000000000000000000000000000000000..fb333c563bfeb00bb9e012b3b6713bcbd5621bde --- /dev/null +++ b/tanitan2/SOLAR-10.7B-Instruct-STOCK_SOLAR-ties/result_2024-07-12 07:59:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7286689419795221, + "acc_stderr": 0.012993807727545789, + "acc_norm": 0.7704778156996587, + "acc_norm_stderr": 0.012288926760890793 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5880302728540131, + "acc_stderr": 0.004911837730582204, + "acc_norm": 0.7379008165704043, + "acc_norm_stderr": 0.004388775298210219 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7076023391812866, + "acc_stderr": 0.03488647713457921, + "acc_norm": 0.7076023391812866, + "acc_norm_stderr": 0.03488647713457921 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7475728155339806, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.7475728155339806, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.735632183908046, + "acc_stderr": 0.01576998484069052, + "acc_norm": 0.735632183908046, + "acc_norm_stderr": 0.01576998484069052 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.04319223625811331, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.04319223625811331 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5106382978723404, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.5106382978723404, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.038913644958358175, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.038913644958358175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6559485530546624, + "acc_stderr": 0.026981478043648047, + "acc_norm": 0.6559485530546624, + "acc_norm_stderr": 0.026981478043648047 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6771300448430493, + "acc_stderr": 0.03138147637575499, + "acc_norm": 0.6771300448430493, + "acc_norm_stderr": 0.03138147637575499 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7474747474747475, + "acc_stderr": 0.03095405547036592, + "acc_norm": 0.7474747474747475, + "acc_norm_stderr": 0.03095405547036592 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.041443118108781526, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.041443118108781526 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6638655462184874, + "acc_stderr": 0.030684737115135363, + "acc_norm": 0.6638655462184874, + "acc_norm_stderr": 0.030684737115135363 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6282051282051282, + "acc_stderr": 0.02450347255711092, + "acc_norm": 0.6282051282051282, + "acc_norm_stderr": 0.02450347255711092 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6548387096774193, + "acc_stderr": 0.027045746573534323, + "acc_norm": 0.6548387096774193, + "acc_norm_stderr": 0.027045746573534323 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8418803418803419, + "acc_stderr": 0.02390232554956041, + "acc_norm": 0.8418803418803419, + "acc_norm_stderr": 0.02390232554956041 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6188679245283019, + "acc_stderr": 0.029890609686286637, + "acc_norm": 0.6188679245283019, + "acc_norm_stderr": 0.029890609686286637 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6818181818181818, + "acc_stderr": 0.044612721759105085, + "acc_norm": 0.6818181818181818, + "acc_norm_stderr": 0.044612721759105085 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630882, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630882 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.03983798306659806, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.03983798306659806 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935558, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935558 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.03794012674697029, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.03794012674697029 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.025699352832131796, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.025699352832131796 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6184971098265896, + "acc_stderr": 0.0261521986197268, + "acc_norm": 0.6184971098265896, + "acc_norm_stderr": 0.0261521986197268 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6503067484662577, + "acc_stderr": 0.03746668325470023, + "acc_norm": 0.6503067484662577, + "acc_norm_stderr": 0.03746668325470023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6820987654320988, + "acc_stderr": 0.025910063528240868, + "acc_norm": 0.6820987654320988, + "acc_norm_stderr": 0.025910063528240868 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.772020725388601, + "acc_stderr": 0.030276909945178274, + "acc_norm": 0.772020725388601, + "acc_norm_stderr": 0.030276909945178274 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4824561403508772, + "acc_stderr": 0.04700708033551038, + "acc_norm": 0.4824561403508772, + "acc_norm_stderr": 0.04700708033551038 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7706422018348624, + "acc_stderr": 0.01802534972461868, + "acc_norm": 0.7706422018348624, + "acc_norm_stderr": 0.01802534972461868 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.49206349206349204, + "acc_stderr": 0.044715725362943486, + "acc_norm": 0.49206349206349204, + "acc_norm_stderr": 0.044715725362943486 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6535947712418301, + "acc_stderr": 0.02724561304721536, + "acc_norm": 0.6535947712418301, + "acc_norm_stderr": 0.02724561304721536 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7851239669421488, + "acc_stderr": 0.037494924487096966, + "acc_norm": 0.7851239669421488, + "acc_norm_stderr": 0.037494924487096966 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6644736842105263, + "acc_stderr": 0.03842498559395268, + "acc_norm": 0.6644736842105263, + "acc_norm_stderr": 0.03842498559395268 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.6045751633986928, + "acc_stderr": 0.019780465954777515, + "acc_norm": 0.6045751633986928, + "acc_norm_stderr": 0.019780465954777515 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.450354609929078, + "acc_stderr": 0.02968010556502904, + "acc_norm": 0.450354609929078, + "acc_norm_stderr": 0.02968010556502904 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3854748603351955, + "acc_stderr": 0.016277927039638193, + "acc_norm": 0.3854748603351955, + "acc_norm_stderr": 0.016277927039638193 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.02989616303312547, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.02989616303312547 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.673469387755102, + "acc_stderr": 0.03002105623844033, + "acc_norm": 0.673469387755102, + "acc_norm_stderr": 0.03002105623844033 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.45827900912646674, + "acc_stderr": 0.012725701656953642, + "acc_norm": 0.45827900912646674, + "acc_norm_stderr": 0.012725701656953642 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.03283472056108561, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.03283472056108561 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7649938800489596, + "mc1_stderr": 0.014843061507731611, + "mc2": 0.8249287047568252, + "mc2_stderr": 0.012829091134360837 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.512396694214876, + "acc_stderr": 0.017185069732676528, + "acc_norm": 0.526564344746163, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tanitan2/SOLAR-10.7B-Instruct-STOCK_SOLAR-ties", + "model_sha": "c586a85dfad72633aff986077955d1edc82ee98e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tanitan2/SOLAR-10.7B-Instruct-T3Q-ko-solar-dpo-linear/result_2024-07-19 00:20:46.json b/tanitan2/SOLAR-10.7B-Instruct-T3Q-ko-solar-dpo-linear/result_2024-07-19 00:20:46.json new file mode 100644 index 0000000000000000000000000000000000000000..1b7de4c909d732d7915c224b231a8d3a8507a393 --- /dev/null +++ b/tanitan2/SOLAR-10.7B-Instruct-T3Q-ko-solar-dpo-linear/result_2024-07-19 00:20:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6100682593856656, + "acc_stderr": 0.014252959848892877, + "acc_norm": 0.6646757679180887, + "acc_norm_stderr": 0.013796182947785564 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46703843855805616, + "acc_stderr": 0.004978927164792876, + "acc_norm": 0.6301533559051982, + "acc_norm_stderr": 0.004817763581410237 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.695906432748538, + "acc_stderr": 0.03528211258245232, + "acc_norm": 0.695906432748538, + "acc_norm_stderr": 0.03528211258245232 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.0458212416016155, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.0458212416016155 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7088122605363985, + "acc_stderr": 0.0162460870697014, + "acc_norm": 0.7088122605363985, + "acc_norm_stderr": 0.0162460870697014 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5446808510638298, + "acc_stderr": 0.03255525359340356, + "acc_norm": 0.5446808510638298, + "acc_norm_stderr": 0.03255525359340356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.038913644958358175, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.038913644958358175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6591639871382636, + "acc_stderr": 0.026920841260776162, + "acc_norm": 0.6591639871382636, + "acc_norm_stderr": 0.026920841260776162 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6098654708520179, + "acc_stderr": 0.03273766725459156, + "acc_norm": 0.6098654708520179, + "acc_norm_stderr": 0.03273766725459156 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6717557251908397, + "acc_stderr": 0.04118438565806298, + "acc_norm": 0.6717557251908397, + "acc_norm_stderr": 0.04118438565806298 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7525252525252525, + "acc_stderr": 0.030746300742124498, + "acc_norm": 0.7525252525252525, + "acc_norm_stderr": 0.030746300742124498 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6932773109243697, + "acc_stderr": 0.02995382389188704, + "acc_norm": 0.6932773109243697, + "acc_norm_stderr": 0.02995382389188704 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6384615384615384, + "acc_stderr": 0.024359581465397007, + "acc_norm": 0.6384615384615384, + "acc_norm_stderr": 0.024359581465397007 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6612903225806451, + "acc_stderr": 0.026923446059302827, + "acc_norm": 0.6612903225806451, + "acc_norm_stderr": 0.026923446059302827 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.811965811965812, + "acc_stderr": 0.02559819368665226, + "acc_norm": 0.811965811965812, + "acc_norm_stderr": 0.02559819368665226 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5849056603773585, + "acc_stderr": 0.03032594578928611, + "acc_norm": 0.5849056603773585, + "acc_norm_stderr": 0.03032594578928611 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.045820048415054174, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.045820048415054174 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4417989417989418, + "acc_stderr": 0.025576257061253837, + "acc_norm": 0.4417989417989418, + "acc_norm_stderr": 0.025576257061253837 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.04166666666666666, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.04166666666666666 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6184971098265896, + "acc_stderr": 0.026152198619726803, + "acc_norm": 0.6184971098265896, + "acc_norm_stderr": 0.026152198619726803 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6441717791411042, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.6441717791411042, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.026229649178821163, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.026229649178821163 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.030748905363909902, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.030748905363909902 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4649122807017544, + "acc_stderr": 0.046920083813689104, + "acc_norm": 0.4649122807017544, + "acc_norm_stderr": 0.046920083813689104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7596330275229358, + "acc_stderr": 0.01832060732096407, + "acc_norm": 0.7596330275229358, + "acc_norm_stderr": 0.01832060732096407 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.5158730158730159, + "acc_stderr": 0.044698818540726076, + "acc_norm": 0.5158730158730159, + "acc_norm_stderr": 0.044698818540726076 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6209150326797386, + "acc_stderr": 0.027780141207023334, + "acc_norm": 0.6209150326797386, + "acc_norm_stderr": 0.027780141207023334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7851239669421488, + "acc_stderr": 0.037494924487096966, + "acc_norm": 0.7851239669421488, + "acc_norm_stderr": 0.037494924487096966 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5571895424836601, + "acc_stderr": 0.02009508315457735, + "acc_norm": 0.5571895424836601, + "acc_norm_stderr": 0.02009508315457735 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.44680851063829785, + "acc_stderr": 0.029658235097666904, + "acc_norm": 0.44680851063829785, + "acc_norm_stderr": 0.029658235097666904 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03362277436608044, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03362277436608044 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3564245810055866, + "acc_stderr": 0.016018239710513412, + "acc_norm": 0.3564245810055866, + "acc_norm_stderr": 0.016018239710513412 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5808823529411765, + "acc_stderr": 0.02997280717046462, + "acc_norm": 0.5808823529411765, + "acc_norm_stderr": 0.02997280717046462 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.7142857142857143, + "acc_stderr": 0.028920583220675578, + "acc_norm": 0.7142857142857143, + "acc_norm_stderr": 0.028920583220675578 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7383966244725738, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.7383966244725738, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44654498044328556, + "acc_stderr": 0.012697046024399666, + "acc_norm": 0.44654498044328556, + "acc_norm_stderr": 0.012697046024399666 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.03283472056108561, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.03283472056108561 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03681050869161549, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03681050869161549 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5446756425948592, + "mc1_stderr": 0.017433490102538755, + "mc2": 0.6817707704055787, + "mc2_stderr": 0.015046009361025585 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.51357733175915, + "acc_stderr": 0.017184015060401455, + "acc_norm": 0.5182998819362455, + "acc_norm_stderr": 0.017178836639177755 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tanitan2/SOLAR-10.7B-Instruct-T3Q-ko-solar-dpo-linear", + "model_sha": "bdc862849252a05b8c933429e9c57376346dc3d9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tanitan2/SOLAR-10.7B-Instruct-T3Q-ko-solar-dpo-slerp/result_2024-07-16 09:36:57.json b/tanitan2/SOLAR-10.7B-Instruct-T3Q-ko-solar-dpo-slerp/result_2024-07-16 09:36:57.json new file mode 100644 index 0000000000000000000000000000000000000000..dbb654acb6a1ed11d8f741bf2a6eb4e61c484e5d --- /dev/null +++ b/tanitan2/SOLAR-10.7B-Instruct-T3Q-ko-solar-dpo-slerp/result_2024-07-16 09:36:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.5597269624573379, + "acc_stderr": 0.01450676952480425, + "acc_norm": 0.6186006825938567, + "acc_norm_stderr": 0.014194389086685265 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4529974108743278, + "acc_stderr": 0.004967685204073108, + "acc_norm": 0.6030671181039634, + "acc_norm_stderr": 0.004882619484166601 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6783625730994152, + "acc_stderr": 0.03582529442573122, + "acc_norm": 0.6783625730994152, + "acc_norm_stderr": 0.03582529442573122 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.685823754789272, + "acc_stderr": 0.01659929173588493, + "acc_norm": 0.685823754789272, + "acc_norm_stderr": 0.01659929173588493 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5276595744680851, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.5276595744680851, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5240963855421686, + "acc_stderr": 0.038879718495972646, + "acc_norm": 0.5240963855421686, + "acc_norm_stderr": 0.038879718495972646 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.02736807824397163, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.02736807824397163 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5695067264573991, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.5695067264573991, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7878787878787878, + "acc_stderr": 0.029126522834586818, + "acc_norm": 0.7878787878787878, + "acc_norm_stderr": 0.029126522834586818 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192118, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192118 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6596638655462185, + "acc_stderr": 0.030778057422931673, + "acc_norm": 0.6596638655462185, + "acc_norm_stderr": 0.030778057422931673 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6435897435897436, + "acc_stderr": 0.024283140529467315, + "acc_norm": 0.6435897435897436, + "acc_norm_stderr": 0.024283140529467315 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145631, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145631 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301811, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301811 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.034991131376767445, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.034991131376767445 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6516129032258065, + "acc_stderr": 0.027104826328100944, + "acc_norm": 0.6516129032258065, + "acc_norm_stderr": 0.027104826328100944 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.025372139671722933, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.025372139671722933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6377358490566037, + "acc_stderr": 0.029582245128384303, + "acc_norm": 0.6377358490566037, + "acc_norm_stderr": 0.029582245128384303 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630882, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630882 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935557, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935557 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4312169312169312, + "acc_stderr": 0.025506481698138208, + "acc_norm": 0.4312169312169312, + "acc_norm_stderr": 0.025506481698138208 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5763888888888888, + "acc_stderr": 0.04132125019723369, + "acc_norm": 0.5763888888888888, + "acc_norm_stderr": 0.04132125019723369 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6069364161849711, + "acc_stderr": 0.026296227915613677, + "acc_norm": 0.6069364161849711, + "acc_norm_stderr": 0.026296227915613677 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6380368098159509, + "acc_stderr": 0.037757007291414416, + "acc_norm": 0.6380368098159509, + "acc_norm_stderr": 0.037757007291414416 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6419753086419753, + "acc_stderr": 0.026675611926037093, + "acc_norm": 0.6419753086419753, + "acc_norm_stderr": 0.026675611926037093 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.032210245080411544, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.032210245080411544 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.744954128440367, + "acc_stderr": 0.01868850085653585, + "acc_norm": 0.744954128440367, + "acc_norm_stderr": 0.01868850085653585 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.49206349206349204, + "acc_stderr": 0.044715725362943486, + "acc_norm": 0.49206349206349204, + "acc_norm_stderr": 0.044715725362943486 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6045751633986928, + "acc_stderr": 0.02799672318063145, + "acc_norm": 0.6045751633986928, + "acc_norm_stderr": 0.02799672318063145 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7851239669421488, + "acc_stderr": 0.03749492448709696, + "acc_norm": 0.7851239669421488, + "acc_norm_stderr": 0.03749492448709696 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6447368421052632, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.6447368421052632, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5359477124183006, + "acc_stderr": 0.020175488765484056, + "acc_norm": 0.5359477124183006, + "acc_norm_stderr": 0.020175488765484056 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41843971631205673, + "acc_stderr": 0.02942799403941999, + "acc_norm": 0.41843971631205673, + "acc_norm_stderr": 0.02942799403941999 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5601851851851852, + "acc_stderr": 0.03385177976044812, + "acc_norm": 0.5601851851851852, + "acc_norm_stderr": 0.03385177976044812 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.38324022346368714, + "acc_stderr": 0.016260159604429128, + "acc_norm": 0.38324022346368714, + "acc_norm_stderr": 0.016260159604429128 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768081, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768081 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5404411764705882, + "acc_stderr": 0.03027332507734576, + "acc_norm": 0.5404411764705882, + "acc_norm_stderr": 0.03027332507734576 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6979591836734694, + "acc_stderr": 0.029393609319879797, + "acc_norm": 0.6979591836734694, + "acc_norm_stderr": 0.029393609319879797 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.729957805907173, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.729957805907173, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4445893089960887, + "acc_stderr": 0.012691575792657114, + "acc_norm": 0.4445893089960887, + "acc_norm_stderr": 0.012691575792657114 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6617647058823529, + "acc_stderr": 0.0332057461294543, + "acc_norm": 0.6617647058823529, + "acc_norm_stderr": 0.0332057461294543 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.5691554467564259, + "mc1_stderr": 0.017335272475332373, + "mc2": 0.700152431348099, + "mc2_stderr": 0.015303160262859886 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4887839433293979, + "acc_stderr": 0.017186028469489287, + "acc_norm": 0.5088547815820543, + "acc_norm_stderr": 0.017187658199336743 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tanitan2/SOLAR-10.7B-Instruct-T3Q-ko-solar-dpo-slerp", + "model_sha": "11151401ff45e4b050b949cc0dba03ec22a9b90e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tanitan2/SOLAR-10.7B-Instruct-T3Q-ko-solar-dpo-ties/result_2024-07-15 09:21:12.json b/tanitan2/SOLAR-10.7B-Instruct-T3Q-ko-solar-dpo-ties/result_2024-07-15 09:21:12.json new file mode 100644 index 0000000000000000000000000000000000000000..bde9e324ed055b1f0d1a5a78991d66e53be7f73e --- /dev/null +++ b/tanitan2/SOLAR-10.7B-Instruct-T3Q-ko-solar-dpo-ties/result_2024-07-15 09:21:12.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7295221843003413, + "acc_stderr": 0.012980954547659558, + "acc_norm": 0.7696245733788396, + "acc_norm_stderr": 0.01230492841874761 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5881298546106354, + "acc_stderr": 0.004911659884506159, + "acc_norm": 0.7375024895439155, + "acc_norm_stderr": 0.004390923353200571 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7076023391812866, + "acc_stderr": 0.03488647713457921, + "acc_norm": 0.7076023391812866, + "acc_norm_stderr": 0.03488647713457921 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7475728155339806, + "acc_stderr": 0.04301250399690878, + "acc_norm": 0.7475728155339806, + "acc_norm_stderr": 0.04301250399690878 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.735632183908046, + "acc_stderr": 0.01576998484069052, + "acc_norm": 0.735632183908046, + "acc_norm_stderr": 0.01576998484069052 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4962962962962963, + "acc_stderr": 0.04319223625811331, + "acc_norm": 0.4962962962962963, + "acc_norm_stderr": 0.04319223625811331 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5148936170212766, + "acc_stderr": 0.032671518489247764, + "acc_norm": 0.5148936170212766, + "acc_norm_stderr": 0.032671518489247764 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.038913644958358175, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.038913644958358175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6527331189710611, + "acc_stderr": 0.027040745502307336, + "acc_norm": 0.6527331189710611, + "acc_norm_stderr": 0.027040745502307336 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.672645739910314, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.672645739910314, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5801526717557252, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.5801526717557252, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7525252525252525, + "acc_stderr": 0.030746300742124515, + "acc_norm": 0.7525252525252525, + "acc_norm_stderr": 0.030746300742124515 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.041443118108781526, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.041443118108781526 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6638655462184874, + "acc_stderr": 0.030684737115135363, + "acc_norm": 0.6638655462184874, + "acc_norm_stderr": 0.030684737115135363 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6307692307692307, + "acc_stderr": 0.024468615241478926, + "acc_norm": 0.6307692307692307, + "acc_norm_stderr": 0.024468615241478926 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6759259259259259, + "acc_stderr": 0.04524596007030048, + "acc_norm": 0.6759259259259259, + "acc_norm_stderr": 0.04524596007030048 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6516129032258065, + "acc_stderr": 0.02710482632810094, + "acc_norm": 0.6516129032258065, + "acc_norm_stderr": 0.02710482632810094 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8418803418803419, + "acc_stderr": 0.02390232554956041, + "acc_norm": 0.8418803418803419, + "acc_norm_stderr": 0.02390232554956041 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6226415094339622, + "acc_stderr": 0.029832808114796005, + "acc_norm": 0.6226415094339622, + "acc_norm_stderr": 0.029832808114796005 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6818181818181818, + "acc_stderr": 0.044612721759105085, + "acc_norm": 0.6818181818181818, + "acc_norm_stderr": 0.044612721759105085 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.029670906124630882, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.029670906124630882 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.039955240076816806, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.039955240076816806 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935558, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935558 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.03794012674697029, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.03794012674697029 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4656084656084656, + "acc_stderr": 0.02569032176249385, + "acc_norm": 0.4656084656084656, + "acc_norm_stderr": 0.02569032176249385 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6184971098265896, + "acc_stderr": 0.0261521986197268, + "acc_norm": 0.6184971098265896, + "acc_norm_stderr": 0.0261521986197268 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6441717791411042, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.6441717791411042, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6790123456790124, + "acc_stderr": 0.02597656601086274, + "acc_norm": 0.6790123456790124, + "acc_norm_stderr": 0.02597656601086274 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.772020725388601, + "acc_stderr": 0.030276909945178274, + "acc_norm": 0.772020725388601, + "acc_norm_stderr": 0.030276909945178274 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4824561403508772, + "acc_stderr": 0.04700708033551038, + "acc_norm": 0.4824561403508772, + "acc_norm_stderr": 0.04700708033551038 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7724770642201835, + "acc_stderr": 0.017974463578776502, + "acc_norm": 0.7724770642201835, + "acc_norm_stderr": 0.017974463578776502 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.5, + "acc_stderr": 0.04472135954999579, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04472135954999579 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6535947712418301, + "acc_stderr": 0.02724561304721536, + "acc_norm": 0.6535947712418301, + "acc_norm_stderr": 0.02724561304721536 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7768595041322314, + "acc_stderr": 0.03800754475228733, + "acc_norm": 0.7768595041322314, + "acc_norm_stderr": 0.03800754475228733 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6644736842105263, + "acc_stderr": 0.03842498559395268, + "acc_norm": 0.6644736842105263, + "acc_norm_stderr": 0.03842498559395268 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.6062091503267973, + "acc_stderr": 0.01976621199107307, + "acc_norm": 0.6062091503267973, + "acc_norm_stderr": 0.01976621199107307 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.450354609929078, + "acc_stderr": 0.029680105565029036, + "acc_norm": 0.450354609929078, + "acc_norm_stderr": 0.029680105565029036 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5324074074074074, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.5324074074074074, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.38324022346368714, + "acc_stderr": 0.016260159604429128, + "acc_norm": 0.38324022346368714, + "acc_norm_stderr": 0.016260159604429128 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.73, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.73, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.02989616303312547, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.02989616303312547 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.673469387755102, + "acc_stderr": 0.03002105623844033, + "acc_norm": 0.673469387755102, + "acc_norm_stderr": 0.03002105623844033 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7510548523206751, + "acc_stderr": 0.028146970599422644, + "acc_norm": 0.7510548523206751, + "acc_norm_stderr": 0.028146970599422644 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4576271186440678, + "acc_stderr": 0.012724296550980188, + "acc_norm": 0.4576271186440678, + "acc_norm_stderr": 0.012724296550980188 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.03283472056108561, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.03283472056108561 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7637698898408812, + "mc1_stderr": 0.014869755015871119, + "mc2": 0.8247966685281103, + "mc2_stderr": 0.012842411377810867 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5147579693034239, + "acc_stderr": 0.017182864434998564, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.017161563949916345 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tanitan2/SOLAR-10.7B-Instruct-T3Q-ko-solar-dpo-ties", + "model_sha": "9a4854888cf34f7a1de9aa2f85a2349c5da56b4e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/teknium/OpenHermes-2-Mistral-7B/result_2023-10-23 09:00:37.json b/teknium/OpenHermes-2-Mistral-7B/result_2023-10-23 09:00:37.json new file mode 100644 index 0000000000000000000000000000000000000000..e2ba147aee96d2387adeb256a854b3a8119e89a4 --- /dev/null +++ b/teknium/OpenHermes-2-Mistral-7B/result_2023-10-23 09:00:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3310580204778157, + "acc_stderr": 0.013752062419817836, + "acc_norm": 0.37372013651877134, + "acc_norm_stderr": 0.014137708601759075 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37711611232822145, + "acc_stderr": 0.004836738514051328, + "acc_norm": 0.48078072097191793, + "acc_norm_stderr": 0.004986093791041655 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48020434227330777, + "acc_stderr": 0.017865944827291605, + "acc_norm": 0.48020434227330777, + "acc_norm_stderr": 0.017865944827291605 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.042446332383532286, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.042446332383532286 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0314108219759624, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0314108219759624 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45980707395498394, + "acc_stderr": 0.028306190403305696, + "acc_norm": 0.45980707395498394, + "acc_norm_stderr": 0.028306190403305696 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563918, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563918 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4579831932773109, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.4579831932773109, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35467980295566504, + "acc_stderr": 0.03366124489051448, + "acc_norm": 0.35467980295566504, + "acc_norm_stderr": 0.03366124489051448 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942656, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.02742001935094527, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.02742001935094527 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526733, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526733 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3236994219653179, + "acc_stderr": 0.0356760379963917, + "acc_norm": 0.3236994219653179, + "acc_norm_stderr": 0.0356760379963917 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3783068783068783, + "acc_stderr": 0.024976954053155247, + "acc_norm": 0.3783068783068783, + "acc_norm_stderr": 0.024976954053155247 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04076663253918567, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04076663253918567 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.026918645383239015, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.026918645383239015 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833942, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833942 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.036060650018329185, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.036060650018329185 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4954128440366973, + "acc_stderr": 0.021436420955529424, + "acc_norm": 0.4954128440366973, + "acc_norm_stderr": 0.021436420955529424 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336285, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336285 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42320261437908496, + "acc_stderr": 0.019987809769482064, + "acc_norm": 0.42320261437908496, + "acc_norm_stderr": 0.019987809769482064 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2223463687150838, + "acc_stderr": 0.013907189208156881, + "acc_norm": 0.2223463687150838, + "acc_norm_stderr": 0.013907189208156881 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254184, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254184 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.569620253164557, + "acc_stderr": 0.03223017195937599, + "acc_norm": 0.569620253164557, + "acc_norm_stderr": 0.03223017195937599 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34485006518904826, + "acc_stderr": 0.01213988100628706, + "acc_norm": 0.34485006518904826, + "acc_norm_stderr": 0.01213988100628706 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31334149326805383, + "mc1_stderr": 0.01623806506905961, + "mc2": 0.4910419282897384, + "mc2_stderr": 0.015891313216487672 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43565525383707204, + "acc_stderr": 0.01704741522947632, + "acc_norm": 0.46635182998819363, + "acc_norm_stderr": 0.01715138411713187 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "teknium/OpenHermes-2-Mistral-7B", + "model_sha": "2bb0b75442eeadc2da3035a6ada86e3953308ac3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/testmod/koen-llama2-13b-avg_testver/result_2024-01-31 04:43:06.json b/testmod/koen-llama2-13b-avg_testver/result_2024-01-31 04:43:06.json new file mode 100644 index 0000000000000000000000000000000000000000..f0e6a755eab4192f6ee7dec0a259dde48c635ec5 --- /dev/null +++ b/testmod/koen-llama2-13b-avg_testver/result_2024-01-31 04:43:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4249146757679181, + "acc_stderr": 0.01444569896852077, + "acc_norm": 0.48378839590443684, + "acc_norm_stderr": 0.014603708567414934 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42929695279824737, + "acc_stderr": 0.004939642460172577, + "acc_norm": 0.5633339972117108, + "acc_norm_stderr": 0.004949589567678895 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6699029126213593, + "acc_stderr": 0.04656147110012351, + "acc_norm": 0.6699029126213593, + "acc_norm_stderr": 0.04656147110012351 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.017784034534992416, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.017784034534992416 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04292596718256981, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04292596718256981 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.032400380867927465, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.032400380867927465 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5530546623794212, + "acc_stderr": 0.028237769422085335, + "acc_norm": 0.5530546623794212, + "acc_norm_stderr": 0.028237769422085335 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643945, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643945 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.02535100632816969, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02535100632816969 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998574, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998574 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.02840609505765332, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.02840609505765332 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.029343114798094455, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.029343114798094455 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176095, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176095 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.271523178807947, + "acc_stderr": 0.03631329803969653, + "acc_norm": 0.271523178807947, + "acc_norm_stderr": 0.03631329803969653 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.32275132275132273, + "acc_stderr": 0.024078943243597016, + "acc_norm": 0.32275132275132273, + "acc_norm_stderr": 0.024078943243597016 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651281, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651281 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.026817718130348923, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.026817718130348923 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.027820214158594384, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.027820214158594384 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5853211009174312, + "acc_stderr": 0.021122903208602585, + "acc_norm": 0.5853211009174312, + "acc_norm_stderr": 0.021122903208602585 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3758169934640523, + "acc_stderr": 0.019594021136577447, + "acc_norm": 0.3758169934640523, + "acc_norm_stderr": 0.019594021136577447 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.042466243366976256, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.042466243366976256 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2849162011173184, + "acc_stderr": 0.015096222302469806, + "acc_norm": 0.2849162011173184, + "acc_norm_stderr": 0.015096222302469806 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.02952009569768776, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.02952009569768776 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03168091161233882, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03168091161233882 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.012134433741002572, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.012134433741002572 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.038154943086889305, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.038154943086889305 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32802937576499386, + "mc1_stderr": 0.016435632932815043, + "mc2": 0.49521614493542, + "mc2_stderr": 0.0156523599659445 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5029515938606848, + "acc_stderr": 0.017190054580194694, + "acc_norm": 0.5171192443919717, + "acc_norm_stderr": 0.017180275246085633 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "testmod/koen-llama2-13b-avg_testver", + "model_sha": "1fdf9fb79122e157a9858234ca463d0fe7523d0e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/testmod/koen-llama2-13b-dpotrain_testver/result_2024-01-31 07:13:03.json b/testmod/koen-llama2-13b-dpotrain_testver/result_2024-01-31 07:13:03.json new file mode 100644 index 0000000000000000000000000000000000000000..7552758033af678d571ba072f28fa5ad6c9a7d03 --- /dev/null +++ b/testmod/koen-llama2-13b-dpotrain_testver/result_2024-01-31 07:13:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44283276450511944, + "acc_stderr": 0.014515573873348902, + "acc_norm": 0.5042662116040956, + "acc_norm_stderr": 0.014610858923956952 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4380601473809998, + "acc_stderr": 0.0049513463381645004, + "acc_norm": 0.5782712607050389, + "acc_norm_stderr": 0.004928263494616739 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5466155810983397, + "acc_stderr": 0.01780208713585031, + "acc_norm": 0.5466155810983397, + "acc_norm_stderr": 0.01780208713585031 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5434083601286174, + "acc_stderr": 0.028290869054197604, + "acc_norm": 0.5434083601286174, + "acc_norm_stderr": 0.028290869054197604 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.48878923766816146, + "acc_stderr": 0.033549366530984746, + "acc_norm": 0.48878923766816146, + "acc_norm_stderr": 0.033549366530984746 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.041665675771015785, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.041665675771015785 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.0253480060315348, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.0253480060315348 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5161290322580645, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.5161290322580645, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.029202540153431187, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.029202540153431187 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075657, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075657 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.027840811495871923, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.027840811495871923 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404946, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404946 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149138, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.569364161849711, + "acc_stderr": 0.02665880027367238, + "acc_norm": 0.569364161849711, + "acc_norm_stderr": 0.02665880027367238 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.47530864197530864, + "acc_stderr": 0.02778680093142745, + "acc_norm": 0.47530864197530864, + "acc_norm_stderr": 0.02778680093142745 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5906735751295337, + "acc_stderr": 0.03548608168860806, + "acc_norm": 0.5906735751295337, + "acc_norm_stderr": 0.03548608168860806 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2982456140350877, + "acc_stderr": 0.043036840335373173, + "acc_norm": 0.2982456140350877, + "acc_norm_stderr": 0.043036840335373173 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5871559633027523, + "acc_stderr": 0.02110912813341392, + "acc_norm": 0.5871559633027523, + "acc_norm_stderr": 0.02110912813341392 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5, + "acc_stderr": 0.028629916715693413, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028629916715693413 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249033, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249033 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.01969145905235415, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.01969145905235415 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02812163604063988, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02812163604063988 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787296, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787296 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2916201117318436, + "acc_stderr": 0.015201032512520427, + "acc_norm": 0.2916201117318436, + "acc_norm_stderr": 0.015201032512520427 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.02922719246003203, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.02922719246003203 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175364, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175364 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.03137624072561619, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.03137624072561619 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34615384615384615, + "acc_stderr": 0.012150699768228582, + "acc_norm": 0.34615384615384615, + "acc_norm_stderr": 0.012150699768228582 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.593939393939394, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.593939393939394, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.37209302325581395, + "mc1_stderr": 0.016921090118814035, + "mc2": 0.5396189268353753, + "mc2_stderr": 0.015733524971135917 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5206611570247934, + "acc_stderr": 0.017175671279836446, + "acc_norm": 0.5289256198347108, + "acc_norm_stderr": 0.017161563949916345 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "testmod/koen-llama2-13b-dpotrain_testver", + "model_sha": "d5eb1b93035c4bcf8255e82c20ccdd5decc3e7ba", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/testmod/koen-llama2-13b-sft_testver/result_2024-01-31 04:43:23.json b/testmod/koen-llama2-13b-sft_testver/result_2024-01-31 04:43:23.json new file mode 100644 index 0000000000000000000000000000000000000000..3fcc94190bab7be0e239f72da956deb90635b487 --- /dev/null +++ b/testmod/koen-llama2-13b-sft_testver/result_2024-01-31 04:43:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42662116040955633, + "acc_stderr": 0.014453185592920293, + "acc_norm": 0.4803754266211604, + "acc_norm_stderr": 0.014600132075947094 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4302927703644692, + "acc_stderr": 0.004941051795214787, + "acc_norm": 0.5700059749053973, + "acc_norm_stderr": 0.0049406311358035325 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.01778403453499242, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.01778403453499242 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.042925967182569816, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.042925967182569816 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.452914798206278, + "acc_stderr": 0.033408675019233246, + "acc_norm": 0.452914798206278, + "acc_norm_stderr": 0.033408675019233246 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171452, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171452 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.46638655462184875, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.46638655462184875, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.41794871794871796, + "acc_stderr": 0.025007329882461227, + "acc_norm": 0.41794871794871796, + "acc_norm_stderr": 0.025007329882461227 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.034454876862647144, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.034454876862647144 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5064516129032258, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.5064516129032258, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6581196581196581, + "acc_stderr": 0.031075028526507748, + "acc_norm": 0.6581196581196581, + "acc_norm_stderr": 0.031075028526507748 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.026719240783712166, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.026719240783712166 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092055, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092055 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.023577604791655795, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.023577604791655795 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.036045136724422014, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.036045136724422014 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5376146788990825, + "acc_stderr": 0.02137657527439758, + "acc_norm": 0.5376146788990825, + "acc_norm_stderr": 0.02137657527439758 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49673202614379086, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.49673202614379086, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309174, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309174 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.019393058402355435, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.019393058402355435 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.027640120545169934, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.027640120545169934 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.04157751539865629, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.04157751539865629 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3287037037037037, + "acc_stderr": 0.032036140846700596, + "acc_norm": 0.3287037037037037, + "acc_norm_stderr": 0.032036140846700596 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33088235294117646, + "acc_stderr": 0.02858270975389842, + "acc_norm": 0.33088235294117646, + "acc_norm_stderr": 0.02858270975389842 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5527426160337553, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.5527426160337553, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3070404172099087, + "acc_stderr": 0.011780959114513778, + "acc_norm": 0.3070404172099087, + "acc_norm_stderr": 0.011780959114513778 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.03883565977956928, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.03883565977956928 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2692778457772338, + "mc1_stderr": 0.015528566637087302, + "mc2": 0.4283699176663712, + "mc2_stderr": 0.015070862440094028 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.512396694214876, + "acc_stderr": 0.01718506973267653, + "acc_norm": 0.5584415584415584, + "acc_norm_stderr": 0.017072525875563103 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "testmod/koen-llama2-13b-sft_testver", + "model_sha": "2511d97b253fd1e958aced0a2ea3378592ce8044", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tlphams/gollm-12.8b-instruct-tendency-t45/result_2024-03-28 01:04:02.json b/tlphams/gollm-12.8b-instruct-tendency-t45/result_2024-03-28 01:04:02.json new file mode 100644 index 0000000000000000000000000000000000000000..48fdd5e4e92b6040e1afd5e18b0b0c8321d3eba5 --- /dev/null +++ b/tlphams/gollm-12.8b-instruct-tendency-t45/result_2024-03-28 01:04:02.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2986348122866894, + "acc_stderr": 0.01337407861506875, + "acc_norm": 0.34982935153583616, + "acc_norm_stderr": 0.013936809212158292 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40908185620394344, + "acc_stderr": 0.004906595857916761, + "acc_norm": 0.5320653256323441, + "acc_norm_stderr": 0.004979510001776618 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.03446296217088426, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.03446296217088426 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.24271844660194175, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.24271844660194175, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2656449553001277, + "acc_stderr": 0.01579430248788872, + "acc_norm": 0.2656449553001277, + "acc_norm_stderr": 0.01579430248788872 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2, + "acc_stderr": 0.026148818018424495, + "acc_norm": 0.2, + "acc_norm_stderr": 0.026148818018424495 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663926, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663926 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.29260450160771706, + "acc_stderr": 0.02583989833487798, + "acc_norm": 0.29260450160771706, + "acc_norm_stderr": 0.02583989833487798 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.15246636771300448, + "acc_stderr": 0.02412620481325287, + "acc_norm": 0.15246636771300448, + "acc_norm_stderr": 0.02412620481325287 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3053435114503817, + "acc_stderr": 0.04039314978724561, + "acc_norm": 0.3053435114503817, + "acc_norm_stderr": 0.04039314978724561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.26262626262626265, + "acc_stderr": 0.031353050095330834, + "acc_norm": 0.26262626262626265, + "acc_norm_stderr": 0.031353050095330834 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.31724137931034485, + "acc_stderr": 0.03878352372138622, + "acc_norm": 0.31724137931034485, + "acc_norm_stderr": 0.03878352372138622 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171451, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171451 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.27310924369747897, + "acc_stderr": 0.02894200404099817, + "acc_norm": 0.27310924369747897, + "acc_norm_stderr": 0.02894200404099817 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.25384615384615383, + "acc_stderr": 0.022066054378726257, + "acc_norm": 0.25384615384615383, + "acc_norm_stderr": 0.022066054378726257 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.24630541871921183, + "acc_stderr": 0.03031509928561773, + "acc_norm": 0.24630541871921183, + "acc_norm_stderr": 0.03031509928561773 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.025560604721022888, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.025560604721022888 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.28205128205128205, + "acc_stderr": 0.029480360549541187, + "acc_norm": 0.28205128205128205, + "acc_norm_stderr": 0.029480360549541187 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2528301886792453, + "acc_stderr": 0.026749899771241238, + "acc_norm": 0.2528301886792453, + "acc_norm_stderr": 0.026749899771241238 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782855, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782855 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.02620276653465215, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.02620276653465215 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.21393034825870647, + "acc_stderr": 0.028996909693328923, + "acc_norm": 0.21393034825870647, + "acc_norm_stderr": 0.028996909693328923 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3063583815028902, + "acc_stderr": 0.03514942551267438, + "acc_norm": 0.3063583815028902, + "acc_norm_stderr": 0.03514942551267438 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2724867724867725, + "acc_stderr": 0.022930973071633345, + "acc_norm": 0.2724867724867725, + "acc_norm_stderr": 0.022930973071633345 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.22832369942196531, + "acc_stderr": 0.022598703804321624, + "acc_norm": 0.22832369942196531, + "acc_norm_stderr": 0.022598703804321624 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.02517104191530968, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.02517104191530968 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.2694300518134715, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.2694300518134715, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436695, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436695 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.29724770642201837, + "acc_stderr": 0.019595707224643533, + "acc_norm": 0.29724770642201837, + "acc_norm_stderr": 0.019595707224643533 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.025646863097137894, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.025646863097137894 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.38016528925619836, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.38016528925619836, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.03690677986137283, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.03690677986137283 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.28104575163398693, + "acc_stderr": 0.018185218954318082, + "acc_norm": 0.28104575163398693, + "acc_norm_stderr": 0.018185218954318082 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2695035460992908, + "acc_stderr": 0.02646903681859063, + "acc_norm": 0.2695035460992908, + "acc_norm_stderr": 0.02646903681859063 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.20535714285714285, + "acc_stderr": 0.03834241021419074, + "acc_norm": 0.20535714285714285, + "acc_norm_stderr": 0.03834241021419074 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.27314814814814814, + "acc_stderr": 0.030388051301678116, + "acc_norm": 0.27314814814814814, + "acc_norm_stderr": 0.030388051301678116 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24081632653061225, + "acc_stderr": 0.027372942201788163, + "acc_norm": 0.24081632653061225, + "acc_norm_stderr": 0.027372942201788163 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.29535864978902954, + "acc_stderr": 0.02969633871342289, + "acc_norm": 0.29535864978902954, + "acc_norm_stderr": 0.02969633871342289 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2503259452411995, + "acc_stderr": 0.011064151027165433, + "acc_norm": 0.2503259452411995, + "acc_norm_stderr": 0.011064151027165433 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.0309645179269234, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.0309645179269234 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24848484848484848, + "acc_stderr": 0.03374402644139404, + "acc_norm": 0.24848484848484848, + "acc_norm_stderr": 0.03374402644139404 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2558139534883721, + "mc1_stderr": 0.015274176219283349, + "mc2": 0.4212861068555545, + "mc2_stderr": 0.015680084712746713 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.28807556080283353, + "acc_stderr": 0.015569869674838364, + "acc_norm": 0.3187721369539551, + "acc_norm_stderr": 0.01602142705530958 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tlphams/gollm-12.8b-instruct-tendency-t45", + "model_sha": "7f34e8e79116eb0d6e51072d67a3d9586513ec0f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tlphams/gollm-12.8b-instruct-v2.1/result_2023-11-01 04:43:20.json b/tlphams/gollm-12.8b-instruct-v2.1/result_2023-11-01 04:43:20.json new file mode 100644 index 0000000000000000000000000000000000000000..809d0ca94cdaa78cad07cb9ef127f9e2c1f28bdd --- /dev/null +++ b/tlphams/gollm-12.8b-instruct-v2.1/result_2023-11-01 04:43:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.24914675767918087, + "acc_stderr": 0.012639407111926435, + "acc_norm": 0.29692832764505117, + "acc_norm_stderr": 0.013352025976725222 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3407687711611233, + "acc_stderr": 0.004729990807895056, + "acc_norm": 0.41804421429994026, + "acc_norm_stderr": 0.004922294797766664 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.28654970760233917, + "acc_stderr": 0.03467826685703826, + "acc_norm": 0.28654970760233917, + "acc_norm_stderr": 0.03467826685703826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.27184466019417475, + "acc_stderr": 0.044052680241409216, + "acc_norm": 0.27184466019417475, + "acc_norm_stderr": 0.044052680241409216 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2848020434227331, + "acc_stderr": 0.016139174096522577, + "acc_norm": 0.2848020434227331, + "acc_norm_stderr": 0.016139174096522577 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.14814814814814814, + "acc_stderr": 0.03068864761035268, + "acc_norm": 0.14814814814814814, + "acc_norm_stderr": 0.03068864761035268 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.028504856470514196, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.028504856470514196 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.03711725190740748, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.03711725190740748 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2861736334405145, + "acc_stderr": 0.025670259242188957, + "acc_norm": 0.2861736334405145, + "acc_norm_stderr": 0.025670259242188957 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3183856502242152, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.3183856502242152, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2366412213740458, + "acc_stderr": 0.03727673575596916, + "acc_norm": 0.2366412213740458, + "acc_norm_stderr": 0.03727673575596916 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.25517241379310346, + "acc_stderr": 0.03632984052707842, + "acc_norm": 0.25517241379310346, + "acc_norm_stderr": 0.03632984052707842 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.038739587141493524, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.038739587141493524 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.25210084033613445, + "acc_stderr": 0.028205545033277726, + "acc_norm": 0.25210084033613445, + "acc_norm_stderr": 0.028205545033277726 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.28974358974358977, + "acc_stderr": 0.02300062824368797, + "acc_norm": 0.28974358974358977, + "acc_norm_stderr": 0.02300062824368797 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.042365112580946336, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.042365112580946336 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.030712730070982592, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.030712730070982592 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25806451612903225, + "acc_stderr": 0.024892469172462843, + "acc_norm": 0.25806451612903225, + "acc_norm_stderr": 0.024892469172462843 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.21794871794871795, + "acc_stderr": 0.02704685763071668, + "acc_norm": 0.21794871794871795, + "acc_norm_stderr": 0.02704685763071668 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.19622641509433963, + "acc_stderr": 0.024442388131100817, + "acc_norm": 0.19622641509433963, + "acc_norm_stderr": 0.024442388131100817 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2909090909090909, + "acc_stderr": 0.04350271442923243, + "acc_norm": 0.2909090909090909, + "acc_norm_stderr": 0.04350271442923243 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.02773896963217609, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.02773896963217609 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.17218543046357615, + "acc_stderr": 0.03082613696196239, + "acc_norm": 0.17218543046357615, + "acc_norm_stderr": 0.03082613696196239 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2254335260115607, + "acc_stderr": 0.031862098516411426, + "acc_norm": 0.2254335260115607, + "acc_norm_stderr": 0.031862098516411426 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24855491329479767, + "acc_stderr": 0.023267528432100174, + "acc_norm": 0.24855491329479767, + "acc_norm_stderr": 0.023267528432100174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.26380368098159507, + "acc_stderr": 0.03462419931615624, + "acc_norm": 0.26380368098159507, + "acc_norm_stderr": 0.03462419931615624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2839506172839506, + "acc_stderr": 0.025089478523765134, + "acc_norm": 0.2839506172839506, + "acc_norm_stderr": 0.025089478523765134 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.23834196891191708, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.23834196891191708, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512322004, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512322004 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.1963302752293578, + "acc_stderr": 0.01703071933915435, + "acc_norm": 0.1963302752293578, + "acc_norm_stderr": 0.01703071933915435 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.15079365079365079, + "acc_stderr": 0.03200686497287392, + "acc_norm": 0.15079365079365079, + "acc_norm_stderr": 0.03200686497287392 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.24183006535947713, + "acc_stderr": 0.024518195641879334, + "acc_norm": 0.24183006535947713, + "acc_norm_stderr": 0.024518195641879334 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.039849796533028704, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.039849796533028704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.19078947368421054, + "acc_stderr": 0.031975658210325, + "acc_norm": 0.19078947368421054, + "acc_norm_stderr": 0.031975658210325 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.016819028375736383, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.016819028375736383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.24468085106382978, + "acc_stderr": 0.025645553622266733, + "acc_norm": 0.24468085106382978, + "acc_norm_stderr": 0.025645553622266733 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2767857142857143, + "acc_stderr": 0.04246624336697623, + "acc_norm": 0.2767857142857143, + "acc_norm_stderr": 0.04246624336697623 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.01431099954796147, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.01431099954796147 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.029029422815681407, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.029029422815681407 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22448979591836735, + "acc_stderr": 0.026711430555538422, + "acc_norm": 0.22448979591836735, + "acc_norm_stderr": 0.026711430555538422 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.27848101265822783, + "acc_stderr": 0.029178682304842544, + "acc_norm": 0.27848101265822783, + "acc_norm_stderr": 0.029178682304842544 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676653, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.031660096793998116, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.031660096793998116 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.0340150671524904, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.0340150671524904 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24357405140758873, + "mc1_stderr": 0.015026354824910782, + "mc2": 0.38968108562447634, + "mc2_stderr": 0.01596435745027793 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32585596221959856, + "acc_stderr": 0.016114023894800336, + "acc_norm": 0.3955135773317591, + "acc_norm_stderr": 0.016810815902206042 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tlphams/gollm-12.8b-instruct-v2.1", + "model_sha": "cf0ea50740783b1c6791e9fd8afdc9bdfebadf35", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tlphams/gollm-12.8b-instruct-v2.3/result_2023-11-08 03:54:19.json b/tlphams/gollm-12.8b-instruct-v2.3/result_2023-11-08 03:54:19.json new file mode 100644 index 0000000000000000000000000000000000000000..954bde025941cb6e7d3a1cc7739dd6220f2fb09b --- /dev/null +++ b/tlphams/gollm-12.8b-instruct-v2.3/result_2023-11-08 03:54:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.24914675767918087, + "acc_stderr": 0.01263940711192643, + "acc_norm": 0.2960750853242321, + "acc_norm_stderr": 0.013340916085246271 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3414658434574786, + "acc_stderr": 0.004732322172153751, + "acc_norm": 0.4298944433379805, + "acc_norm_stderr": 0.004940490508240642 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.18128654970760233, + "acc_stderr": 0.029547741687640024, + "acc_norm": 0.18128654970760233, + "acc_norm_stderr": 0.029547741687640024 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.34951456310679613, + "acc_stderr": 0.04721188506097171, + "acc_norm": 0.34951456310679613, + "acc_norm_stderr": 0.04721188506097171 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20945083014048532, + "acc_stderr": 0.014551310568143698, + "acc_norm": 0.20945083014048532, + "acc_norm_stderr": 0.014551310568143698 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.22962962962962963, + "acc_stderr": 0.036333844140734636, + "acc_norm": 0.22962962962962963, + "acc_norm_stderr": 0.036333844140734636 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20425531914893616, + "acc_stderr": 0.02635515841334942, + "acc_norm": 0.20425531914893616, + "acc_norm_stderr": 0.02635515841334942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2469879518072289, + "acc_stderr": 0.03357351982064537, + "acc_norm": 0.2469879518072289, + "acc_norm_stderr": 0.03357351982064537 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24115755627009647, + "acc_stderr": 0.024296594034763426, + "acc_norm": 0.24115755627009647, + "acc_norm_stderr": 0.024296594034763426 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.10762331838565023, + "acc_stderr": 0.020799400082880004, + "acc_norm": 0.10762331838565023, + "acc_norm_stderr": 0.020799400082880004 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.2748091603053435, + "acc_stderr": 0.039153454088478354, + "acc_norm": 0.2748091603053435, + "acc_norm_stderr": 0.039153454088478354 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.35353535353535354, + "acc_stderr": 0.03406086723547153, + "acc_norm": 0.35353535353535354, + "acc_norm_stderr": 0.03406086723547153 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3487394957983193, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.3487394957983193, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.3641025641025641, + "acc_stderr": 0.024396672985094778, + "acc_norm": 0.3641025641025641, + "acc_norm_stderr": 0.024396672985094778 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.039427724440366234, + "acc_norm": 0.19, + "acc_norm_stderr": 0.039427724440366234 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.27586206896551724, + "acc_stderr": 0.03144712581678242, + "acc_norm": 0.27586206896551724, + "acc_norm_stderr": 0.03144712581678242 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3161290322580645, + "acc_stderr": 0.026450874489042764, + "acc_norm": 0.3161290322580645, + "acc_norm_stderr": 0.026450874489042764 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.30943396226415093, + "acc_stderr": 0.02845015479411863, + "acc_norm": 0.30943396226415093, + "acc_norm_stderr": 0.02845015479411863 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.22727272727272727, + "acc_stderr": 0.040139645540727735, + "acc_norm": 0.22727272727272727, + "acc_norm_stderr": 0.040139645540727735 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.26865671641791045, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.26865671641791045, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.26455026455026454, + "acc_stderr": 0.02271746789770861, + "acc_norm": 0.26455026455026454, + "acc_norm_stderr": 0.02271746789770861 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757173, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757173 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.023132376234543325, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.023132376234543325 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.36787564766839376, + "acc_stderr": 0.034801756684660366, + "acc_norm": 0.36787564766839376, + "acc_norm_stderr": 0.034801756684660366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3467889908256881, + "acc_stderr": 0.02040609710409303, + "acc_norm": 0.3467889908256881, + "acc_norm_stderr": 0.02040609710409303 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.2908496732026144, + "acc_stderr": 0.026004800363952113, + "acc_norm": 0.2908496732026144, + "acc_norm_stderr": 0.026004800363952113 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.14049586776859505, + "acc_stderr": 0.031722334260021585, + "acc_norm": 0.14049586776859505, + "acc_norm_stderr": 0.031722334260021585 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3355263157894737, + "acc_stderr": 0.038424985593952694, + "acc_norm": 0.3355263157894737, + "acc_norm_stderr": 0.038424985593952694 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22695035460992907, + "acc_stderr": 0.02498710636564297, + "acc_norm": 0.22695035460992907, + "acc_norm_stderr": 0.02498710636564297 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16071428571428573, + "acc_stderr": 0.034859460964757394, + "acc_norm": 0.16071428571428573, + "acc_norm_stderr": 0.034859460964757394 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.014893391735249608, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.014893391735249608 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036624, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036624 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4, + "acc_stderr": 0.03136250240935892, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03136250240935892 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.20253164556962025, + "acc_stderr": 0.026160568246601464, + "acc_norm": 0.20253164556962025, + "acc_norm_stderr": 0.026160568246601464 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.22294654498044328, + "acc_stderr": 0.010630525747386089, + "acc_norm": 0.22294654498044328, + "acc_norm_stderr": 0.010630525747386089 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22766217870257038, + "mc1_stderr": 0.01467925503211107, + "mc2": 0.3871459989979698, + "mc2_stderr": 0.014973315901005473 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3246753246753247, + "acc_stderr": 0.016098883939346463, + "acc_norm": 0.41086186540731995, + "acc_norm_stderr": 0.016914972767841062 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tlphams/gollm-12.8b-instruct-v2.3", + "model_sha": "3eb75691c0365fe231a8666291a6ae3178d88b4b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tlphams/gollm-instruct-all-in-one-v1/result_2023-10-04 06:14:50.json b/tlphams/gollm-instruct-all-in-one-v1/result_2023-10-04 06:14:50.json new file mode 100644 index 0000000000000000000000000000000000000000..571b9d0d93278cc2a5202231c808929ff67be409 --- /dev/null +++ b/tlphams/gollm-instruct-all-in-one-v1/result_2023-10-04 06:14:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.25853242320819114, + "acc_stderr": 0.012794553754288677, + "acc_norm": 0.31569965870307165, + "acc_norm_stderr": 0.013582571095815293 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3498307110137423, + "acc_stderr": 0.004759416464201141, + "acc_norm": 0.43178649671380204, + "acc_norm_stderr": 0.0049431275832905125 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.15789473684210525, + "acc_stderr": 0.027966785859160872, + "acc_norm": 0.15789473684210525, + "acc_norm_stderr": 0.027966785859160872 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.34951456310679613, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.34951456310679613, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.20689655172413793, + "acc_stderr": 0.014485656041669164, + "acc_norm": 0.20689655172413793, + "acc_norm_stderr": 0.014485656041669164 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.13333333333333333, + "acc_stderr": 0.029365879728106822, + "acc_norm": 0.13333333333333333, + "acc_norm_stderr": 0.029365879728106822 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2425531914893617, + "acc_stderr": 0.028020226271200217, + "acc_norm": 0.2425531914893617, + "acc_norm_stderr": 0.028020226271200217 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21084337349397592, + "acc_stderr": 0.03175554786629919, + "acc_norm": 0.21084337349397592, + "acc_norm_stderr": 0.03175554786629919 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.24437299035369775, + "acc_stderr": 0.024406162094668882, + "acc_norm": 0.24437299035369775, + "acc_norm_stderr": 0.024406162094668882 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.2242152466367713, + "acc_stderr": 0.027991534258519527, + "acc_norm": 0.2242152466367713, + "acc_norm_stderr": 0.027991534258519527 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677697, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677697 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.23737373737373738, + "acc_stderr": 0.030313710538198885, + "acc_norm": 0.23737373737373738, + "acc_norm_stderr": 0.030313710538198885 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2482758620689655, + "acc_stderr": 0.03600105692727772, + "acc_norm": 0.2482758620689655, + "acc_norm_stderr": 0.03600105692727772 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31932773109243695, + "acc_stderr": 0.0302839955258844, + "acc_norm": 0.31932773109243695, + "acc_norm_stderr": 0.0302839955258844 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35384615384615387, + "acc_stderr": 0.02424378399406217, + "acc_norm": 0.35384615384615387, + "acc_norm_stderr": 0.02424378399406217 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.17, + "acc_stderr": 0.03775251680686371, + "acc_norm": 0.17, + "acc_norm_stderr": 0.03775251680686371 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.040191074725573483, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.040191074725573483 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2561576354679803, + "acc_stderr": 0.0307127300709826, + "acc_norm": 0.2561576354679803, + "acc_norm_stderr": 0.0307127300709826 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3, + "acc_stderr": 0.026069362295335137, + "acc_norm": 0.3, + "acc_norm_stderr": 0.026069362295335137 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19230769230769232, + "acc_stderr": 0.02581923325648373, + "acc_norm": 0.19230769230769232, + "acc_norm_stderr": 0.02581923325648373 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2792452830188679, + "acc_stderr": 0.027611163402399715, + "acc_norm": 0.2792452830188679, + "acc_norm_stderr": 0.027611163402399715 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.24545454545454545, + "acc_stderr": 0.041220665028782834, + "acc_norm": 0.24545454545454545, + "acc_norm_stderr": 0.041220665028782834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.22592592592592592, + "acc_stderr": 0.025497532639609542, + "acc_norm": 0.22592592592592592, + "acc_norm_stderr": 0.025497532639609542 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.035118075718047245, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.035118075718047245 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2736318407960199, + "acc_stderr": 0.03152439186555401, + "acc_norm": 0.2736318407960199, + "acc_norm_stderr": 0.03152439186555401 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.27167630057803466, + "acc_stderr": 0.03391750322321659, + "acc_norm": 0.27167630057803466, + "acc_norm_stderr": 0.03391750322321659 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.18518518518518517, + "acc_stderr": 0.020006075494524416, + "acc_norm": 0.18518518518518517, + "acc_norm_stderr": 0.020006075494524416 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2138728323699422, + "acc_stderr": 0.022075709251757177, + "acc_norm": 0.2138728323699422, + "acc_norm_stderr": 0.022075709251757177 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.25766871165644173, + "acc_stderr": 0.03436150827846917, + "acc_norm": 0.25766871165644173, + "acc_norm_stderr": 0.03436150827846917 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.22530864197530864, + "acc_stderr": 0.02324620264781975, + "acc_norm": 0.22530864197530864, + "acc_norm_stderr": 0.02324620264781975 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.03423465100104284, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.03423465100104284 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.30825688073394497, + "acc_stderr": 0.019798366698367268, + "acc_norm": 0.30825688073394497, + "acc_norm_stderr": 0.019798366698367268 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04040610178208841, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04040610178208841 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.026336613469046633, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.026336613469046633 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.21487603305785125, + "acc_stderr": 0.03749492448709698, + "acc_norm": 0.21487603305785125, + "acc_norm_stderr": 0.03749492448709698 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.03860731599316091, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.03860731599316091 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.21405228758169934, + "acc_stderr": 0.01659342966232903, + "acc_norm": 0.21405228758169934, + "acc_norm_stderr": 0.01659342966232903 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.22340425531914893, + "acc_stderr": 0.024847921358063962, + "acc_norm": 0.22340425531914893, + "acc_norm_stderr": 0.024847921358063962 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.16964285714285715, + "acc_stderr": 0.03562367850095391, + "acc_norm": 0.16964285714285715, + "acc_norm_stderr": 0.03562367850095391 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26033519553072626, + "acc_stderr": 0.014676252009319464, + "acc_norm": 0.26033519553072626, + "acc_norm_stderr": 0.014676252009319464 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.030233758551596452, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.030233758551596452 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3224489795918367, + "acc_stderr": 0.02992310056368391, + "acc_norm": 0.3224489795918367, + "acc_norm_stderr": 0.02992310056368391 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460295, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460295 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.22685788787483702, + "acc_stderr": 0.01069634813356993, + "acc_norm": 0.22685788787483702, + "acc_norm_stderr": 0.01069634813356993 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25980392156862747, + "acc_stderr": 0.030778554678693247, + "acc_norm": 0.25980392156862747, + "acc_norm_stderr": 0.030778554678693247 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.03453131801885415, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.03453131801885415 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22643818849449204, + "mc1_stderr": 0.014651337324602602, + "mc2": 0.40483124709618634, + "mc2_stderr": 0.015674854564799542 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.32113341204250295, + "acc_stderr": 0.016052762579111562, + "acc_norm": 0.36717827626918537, + "acc_norm_stderr": 0.016572727807458592 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tlphams/gollm-instruct-all-in-one-v1", + "model_sha": "44937fddb3168a387b55173371b365a0b280ae3e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tlphams/solar-10.7b-merged-v0.1/result_2024-03-29 08:57:58.json b/tlphams/solar-10.7b-merged-v0.1/result_2024-03-29 08:57:58.json new file mode 100644 index 0000000000000000000000000000000000000000..b906adf56b02853265d8439e8e6792d22a80894f --- /dev/null +++ b/tlphams/solar-10.7b-merged-v0.1/result_2024-03-29 08:57:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6450511945392492, + "acc_stderr": 0.013983036904094099, + "acc_norm": 0.697098976109215, + "acc_norm_stderr": 0.013428241573185349 + }, + "harness|ko_hellaswag|10": { + "acc": 0.5314678350926111, + "acc_stderr": 0.004979889597551663, + "acc_norm": 0.6972714598685521, + "acc_norm_stderr": 0.00458499793536044 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7572815533980582, + "acc_stderr": 0.04245022486384495, + "acc_norm": 0.7572815533980582, + "acc_norm_stderr": 0.04245022486384495 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7011494252873564, + "acc_stderr": 0.016369256815093103, + "acc_norm": 0.7011494252873564, + "acc_norm_stderr": 0.016369256815093103 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5106382978723404, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.5106382978723404, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5120481927710844, + "acc_stderr": 0.038913644958358175, + "acc_norm": 0.5120481927710844, + "acc_norm_stderr": 0.038913644958358175 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6366559485530546, + "acc_stderr": 0.027316847674192714, + "acc_norm": 0.6366559485530546, + "acc_norm_stderr": 0.027316847674192714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6816143497757847, + "acc_stderr": 0.03126580522513713, + "acc_norm": 0.6816143497757847, + "acc_norm_stderr": 0.03126580522513713 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956914, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956914 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7626262626262627, + "acc_stderr": 0.030313710538198917, + "acc_norm": 0.7626262626262627, + "acc_norm_stderr": 0.030313710538198917 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5655172413793104, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.5655172413793104, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6680672268907563, + "acc_stderr": 0.03058869701378364, + "acc_norm": 0.6680672268907563, + "acc_norm_stderr": 0.03058869701378364 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.02432173848460235, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.02432173848460235 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720685, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720685 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301812, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301812 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6064516129032258, + "acc_stderr": 0.027791878753132264, + "acc_norm": 0.6064516129032258, + "acc_norm_stderr": 0.027791878753132264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8290598290598291, + "acc_stderr": 0.024662496845209804, + "acc_norm": 0.8290598290598291, + "acc_norm_stderr": 0.024662496845209804 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6075471698113207, + "acc_stderr": 0.030052580579557845, + "acc_norm": 0.6075471698113207, + "acc_norm_stderr": 0.030052580579557845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.045820048415054174, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.045820048415054174 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.02944316932303154, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.02944316932303154 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.36423841059602646, + "acc_stderr": 0.03929111781242741, + "acc_norm": 0.36423841059602646, + "acc_norm_stderr": 0.03929111781242741 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.03809342081273957, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.03809342081273957 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.025699352832131792, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.025699352832131792 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04016660030451233, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04016660030451233 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6040462427745664, + "acc_stderr": 0.026329813341946243, + "acc_norm": 0.6040462427745664, + "acc_norm_stderr": 0.026329813341946243 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6196319018404908, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.6196319018404908, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6975308641975309, + "acc_stderr": 0.02555765398186806, + "acc_norm": 0.6975308641975309, + "acc_norm_stderr": 0.02555765398186806 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.772020725388601, + "acc_stderr": 0.03027690994517826, + "acc_norm": 0.772020725388601, + "acc_norm_stderr": 0.03027690994517826 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.04702880432049615, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.04702880432049615 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7541284403669725, + "acc_stderr": 0.018461940968708464, + "acc_norm": 0.7541284403669725, + "acc_norm_stderr": 0.018461940968708464 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6013071895424836, + "acc_stderr": 0.028036092273891772, + "acc_norm": 0.6013071895424836, + "acc_norm_stderr": 0.028036092273891772 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908706, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908706 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6644736842105263, + "acc_stderr": 0.03842498559395268, + "acc_norm": 0.6644736842105263, + "acc_norm_stderr": 0.03842498559395268 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5866013071895425, + "acc_stderr": 0.019922115682786685, + "acc_norm": 0.5866013071895425, + "acc_norm_stderr": 0.019922115682786685 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4326241134751773, + "acc_stderr": 0.02955545423677885, + "acc_norm": 0.4326241134751773, + "acc_norm_stderr": 0.02955545423677885 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053756, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053756 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.034076320938540496, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.034076320938540496 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3888268156424581, + "acc_stderr": 0.016303899530796116, + "acc_norm": 0.3888268156424581, + "acc_norm_stderr": 0.016303899530796116 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03032024326500413, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03032024326500413 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6693877551020408, + "acc_stderr": 0.030116426296540624, + "acc_norm": 0.6693877551020408, + "acc_norm_stderr": 0.030116426296540624 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7383966244725738, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.7383966244725738, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4452411994784876, + "acc_stderr": 0.012693421303973294, + "acc_norm": 0.4452411994784876, + "acc_norm_stderr": 0.012693421303973294 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.032962451101722294, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.032962451101722294 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205983, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205983 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6340269277845777, + "mc1_stderr": 0.01686294168408839, + "mc2": 0.7511193169029672, + "mc2_stderr": 0.014258897448244724 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4757969303423849, + "acc_stderr": 0.017170202466520745, + "acc_norm": 0.4970484061393152, + "acc_norm_stderr": 0.017190054580194694 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tlphams/solar-10.7b-merged-v0.1", + "model_sha": "bd1a9e09c6184c1cc6433274adfbd6c70c3a477c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tlphams/solar-10.7b-merged-v0.2/result_2024-04-17 08:36:25.json b/tlphams/solar-10.7b-merged-v0.2/result_2024-04-17 08:36:25.json new file mode 100644 index 0000000000000000000000000000000000000000..b9938c7f5dcebdd4aa46fabb771a8e5983a77d20 --- /dev/null +++ b/tlphams/solar-10.7b-merged-v0.2/result_2024-04-17 08:36:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6117747440273038, + "acc_stderr": 0.014241614207414035, + "acc_norm": 0.6655290102389079, + "acc_norm_stderr": 0.013787460322441375 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46504680342561244, + "acc_stderr": 0.0049775741884213204, + "acc_norm": 0.6314479187412866, + "acc_norm_stderr": 0.004814261966376847 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7660818713450293, + "acc_stderr": 0.03246721765117826, + "acc_norm": 0.7660818713450293, + "acc_norm_stderr": 0.03246721765117826 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7184466019417476, + "acc_stderr": 0.044532548363264673, + "acc_norm": 0.7184466019417476, + "acc_norm_stderr": 0.044532548363264673 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7420178799489144, + "acc_stderr": 0.01564583018834895, + "acc_norm": 0.7420178799489144, + "acc_norm_stderr": 0.01564583018834895 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.6, + "acc_stderr": 0.032025630761017373, + "acc_norm": 0.6, + "acc_norm_stderr": 0.032025630761017373 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5240963855421686, + "acc_stderr": 0.038879718495972646, + "acc_norm": 0.5240963855421686, + "acc_norm_stderr": 0.038879718495972646 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6559485530546624, + "acc_stderr": 0.02698147804364805, + "acc_norm": 0.6559485530546624, + "acc_norm_stderr": 0.02698147804364805 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6547085201793722, + "acc_stderr": 0.03191100192835794, + "acc_norm": 0.6547085201793722, + "acc_norm_stderr": 0.03191100192835794 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.7022900763358778, + "acc_stderr": 0.04010358942462203, + "acc_norm": 0.7022900763358778, + "acc_norm_stderr": 0.04010358942462203 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7878787878787878, + "acc_stderr": 0.029126522834586815, + "acc_norm": 0.7878787878787878, + "acc_norm_stderr": 0.029126522834586815 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5448275862068965, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.5448275862068965, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6932773109243697, + "acc_stderr": 0.029953823891887037, + "acc_norm": 0.6932773109243697, + "acc_norm_stderr": 0.029953823891887037 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.0241211254169412, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.0241211254169412 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.04587904741301811, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.04587904741301811 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.47783251231527096, + "acc_stderr": 0.03514528562175008, + "acc_norm": 0.47783251231527096, + "acc_norm_stderr": 0.03514528562175008 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.7064516129032258, + "acc_stderr": 0.025906087021319288, + "acc_norm": 0.7064516129032258, + "acc_norm_stderr": 0.025906087021319288 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8418803418803419, + "acc_stderr": 0.02390232554956041, + "acc_norm": 0.8418803418803419, + "acc_norm_stderr": 0.02390232554956041 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6075471698113207, + "acc_stderr": 0.03005258057955784, + "acc_norm": 0.6075471698113207, + "acc_norm_stderr": 0.03005258057955784 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083018, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.37748344370860926, + "acc_stderr": 0.0395802723112157, + "acc_norm": 0.37748344370860926, + "acc_norm_stderr": 0.0395802723112157 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7611940298507462, + "acc_stderr": 0.03014777593540922, + "acc_norm": 0.7611940298507462, + "acc_norm_stderr": 0.03014777593540922 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4656084656084656, + "acc_stderr": 0.025690321762493855, + "acc_norm": 0.4656084656084656, + "acc_norm_stderr": 0.025690321762493855 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6180555555555556, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.6180555555555556, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.77, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.77, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.653179190751445, + "acc_stderr": 0.025624723994030457, + "acc_norm": 0.653179190751445, + "acc_norm_stderr": 0.025624723994030457 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6503067484662577, + "acc_stderr": 0.03746668325470021, + "acc_norm": 0.6503067484662577, + "acc_norm_stderr": 0.03746668325470021 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.025842248700902164, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.025842248700902164 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7875647668393783, + "acc_stderr": 0.029519282616817227, + "acc_norm": 0.7875647668393783, + "acc_norm_stderr": 0.029519282616817227 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4824561403508772, + "acc_stderr": 0.04700708033551038, + "acc_norm": 0.4824561403508772, + "acc_norm_stderr": 0.04700708033551038 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7779816513761468, + "acc_stderr": 0.01781884956479661, + "acc_norm": 0.7779816513761468, + "acc_norm_stderr": 0.01781884956479661 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.5238095238095238, + "acc_stderr": 0.04467062628403273, + "acc_norm": 0.5238095238095238, + "acc_norm_stderr": 0.04467062628403273 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.630718954248366, + "acc_stderr": 0.027634176689602656, + "acc_norm": 0.630718954248366, + "acc_norm_stderr": 0.027634176689602656 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7851239669421488, + "acc_stderr": 0.037494924487096966, + "acc_norm": 0.7851239669421488, + "acc_norm_stderr": 0.037494924487096966 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.618421052631579, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.618421052631579, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.6062091503267973, + "acc_stderr": 0.01976621199107307, + "acc_norm": 0.6062091503267973, + "acc_norm_stderr": 0.01976621199107307 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.45390070921985815, + "acc_stderr": 0.02970045324729147, + "acc_norm": 0.45390070921985815, + "acc_norm_stderr": 0.02970045324729147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.49107142857142855, + "acc_stderr": 0.04745033255489122, + "acc_norm": 0.49107142857142855, + "acc_norm_stderr": 0.04745033255489122 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03362277436608043, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03362277436608043 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2837988826815642, + "acc_stderr": 0.015078358970751753, + "acc_norm": 0.2837988826815642, + "acc_norm_stderr": 0.015078358970751753 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768081, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768081 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5772058823529411, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.5772058823529411, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.7224489795918367, + "acc_stderr": 0.02866685779027465, + "acc_norm": 0.7224489795918367, + "acc_norm_stderr": 0.02866685779027465 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7637130801687764, + "acc_stderr": 0.027652153144159256, + "acc_norm": 0.7637130801687764, + "acc_norm_stderr": 0.027652153144159256 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.45045632333767927, + "acc_stderr": 0.012707390438502348, + "acc_norm": 0.45045632333767927, + "acc_norm_stderr": 0.012707390438502348 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.696078431372549, + "acc_stderr": 0.03228210387037892, + "acc_norm": 0.696078431372549, + "acc_norm_stderr": 0.03228210387037892 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.035886248000917095, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.035886248000917095 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.48225214198286415, + "mc1_stderr": 0.017492470843075366, + "mc2": 0.6047078469613301, + "mc2_stderr": 0.015439201999910569 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4757969303423849, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.49940968122786306, + "acc_norm_stderr": 0.017190342123448662 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tlphams/solar-10.7b-merged-v0.2", + "model_sha": "9fd5f9bd3165da5cb024ecb948931876e27f134f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/tmdduq/komt-mistral-7b-v1-dpo-osy-v1/result_2023-12-24 15:10:29.json b/tmdduq/komt-mistral-7b-v1-dpo-osy-v1/result_2023-12-24 15:10:29.json new file mode 100644 index 0000000000000000000000000000000000000000..552ddd8a5dd2b4dc452bfbd57615ddb0f73cc989 --- /dev/null +++ b/tmdduq/komt-mistral-7b-v1-dpo-osy-v1/result_2023-12-24 15:10:29.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32849829351535836, + "acc_stderr": 0.013724978465537357, + "acc_norm": 0.3839590443686007, + "acc_norm_stderr": 0.01421244498065189 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36964748058155744, + "acc_stderr": 0.004817227292240288, + "acc_norm": 0.4805815574586736, + "acc_norm_stderr": 0.00498601693867853 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4661558109833972, + "acc_stderr": 0.017838956009136802, + "acc_norm": 0.4661558109833972, + "acc_norm_stderr": 0.017838956009136802 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.031410821975962386, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.031410821975962386 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.0282908690541976, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.0282908690541976 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.03219079200419995, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.03219079200419995 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.032252942323996406, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.032252942323996406 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.024915243985987844, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.024915243985987844 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.03430462416103872, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.03430462416103872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.028100964724272638, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.028100964724272638 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6709401709401709, + "acc_stderr": 0.030782321577688173, + "acc_norm": 0.6709401709401709, + "acc_norm_stderr": 0.030782321577688173 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.03056159042673183, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.03056159042673183 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5024875621890548, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.5024875621890548, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.34104046242774566, + "acc_stderr": 0.03614665424180826, + "acc_norm": 0.34104046242774566, + "acc_norm_stderr": 0.03614665424180826 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.29894179894179895, + "acc_stderr": 0.02357760479165582, + "acc_norm": 0.29894179894179895, + "acc_norm_stderr": 0.02357760479165582 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.038009680605548594, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.038009680605548594 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.026907849856282532, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.026907849856282532 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4110429447852761, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.4110429447852761, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.027002521034516475, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.027002521034516475 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45077720207253885, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.45077720207253885, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.46238532110091746, + "acc_stderr": 0.02137657527439757, + "acc_norm": 0.46238532110091746, + "acc_norm_stderr": 0.02137657527439757 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.040061680838488774, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.040061680838488774 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.03925523381052932, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.03925523381052932 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.33169934640522875, + "acc_stderr": 0.01904748523936038, + "acc_norm": 0.33169934640522875, + "acc_norm_stderr": 0.01904748523936038 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042405, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042405 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915206, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915206 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03372343271653062, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03372343271653062 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3005586592178771, + "acc_stderr": 0.015334566806251166, + "acc_norm": 0.3005586592178771, + "acc_norm_stderr": 0.015334566806251166 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.02833295951403122, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.02833295951403122 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5061224489795918, + "acc_stderr": 0.03200682020163906, + "acc_norm": 0.5061224489795918, + "acc_norm_stderr": 0.03200682020163906 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.48523206751054854, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.48523206751054854, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3109517601043025, + "acc_stderr": 0.011822252917799203, + "acc_norm": 0.3109517601043025, + "acc_norm_stderr": 0.011822252917799203 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.032566854844603886, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.032566854844603886 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.036810508691615486, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.036810508691615486 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3402692778457772, + "mc1_stderr": 0.016586304901762557, + "mc2": 0.5212110856919681, + "mc2_stderr": 0.015656809915743582 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40731995277449823, + "acc_stderr": 0.01689245669519127, + "acc_norm": 0.4604486422668241, + "acc_norm_stderr": 0.017136487626049853 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "tmdduq/komt-mistral-7b-v1-dpo-osy-v1", + "model_sha": "fe2121c7d75e73671e1f995dea7728ba8b6a4588", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/uukuguy/speechless-code-mistral-7b-v1.0/result_2024-06-14 05:17:19.json b/uukuguy/speechless-code-mistral-7b-v1.0/result_2024-06-14 05:17:19.json new file mode 100644 index 0000000000000000000000000000000000000000..88d88b38f806cdbe4ab61fb2ce3e2f95879b630d --- /dev/null +++ b/uukuguy/speechless-code-mistral-7b-v1.0/result_2024-06-14 05:17:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.01388881628678211, + "acc_norm": 0.3856655290102389, + "acc_norm_stderr": 0.014224250973257182 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3694483170683131, + "acc_stderr": 0.004816690123209754, + "acc_norm": 0.476000796654053, + "acc_norm_stderr": 0.0049840302505072915 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4854368932038835, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.4854368932038835, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4789272030651341, + "acc_stderr": 0.017864076786212896, + "acc_norm": 0.4789272030651341, + "acc_norm_stderr": 0.017864076786212896 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236785, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236785 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45161290322580644, + "acc_stderr": 0.028310500348568392, + "acc_norm": 0.45161290322580644, + "acc_norm_stderr": 0.028310500348568392 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228395, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228395 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666654, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666654 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.02143642095552942, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.02143642095552942 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626057, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626057 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.019898412717635903, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.019898412717635903 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.0289473388516141, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.0289473388516141 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.033723432716530624, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.033723432716530624 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24804469273743016, + "acc_stderr": 0.014444157808261453, + "acc_norm": 0.24804469273743016, + "acc_norm_stderr": 0.014444157808261453 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.31985294117647056, + "acc_stderr": 0.028332959514031218, + "acc_norm": 0.31985294117647056, + "acc_norm_stderr": 0.028332959514031218 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.03198761546763125, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.03198761546763125 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5738396624472574, + "acc_stderr": 0.03219035703131775, + "acc_norm": 0.5738396624472574, + "acc_norm_stderr": 0.03219035703131775 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3709256844850065, + "acc_stderr": 0.012337391684530312, + "acc_norm": 0.3709256844850065, + "acc_norm_stderr": 0.012337391684530312 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384304, + "mc2": 0.4752507582244631, + "mc2_stderr": 0.015582734966776392 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46635182998819363, + "acc_stderr": 0.01715138411713187, + "acc_norm": 0.5100354191263282, + "acc_norm_stderr": 0.01718689128689406 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "uukuguy/speechless-code-mistral-7b-v1.0", + "model_sha": "1862e0a712efc6002112e9c1235a197d58419b37", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/uukuguy/speechless-zephyr-code-functionary-7b/result_2024-07-27 03:43:40.json b/uukuguy/speechless-zephyr-code-functionary-7b/result_2024-07-27 03:43:40.json new file mode 100644 index 0000000000000000000000000000000000000000..7adf5be206bbba799d529bf5e00bbe192d7248bc --- /dev/null +++ b/uukuguy/speechless-zephyr-code-functionary-7b/result_2024-07-27 03:43:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3464163822525597, + "acc_stderr": 0.013905011180063247, + "acc_norm": 0.3924914675767918, + "acc_norm_stderr": 0.014269634635670712 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37343158733320053, + "acc_stderr": 0.004827266662144025, + "acc_norm": 0.48615813582951606, + "acc_norm_stderr": 0.004987868988629997 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4674329501915709, + "acc_stderr": 0.017841995750520857, + "acc_norm": 0.4674329501915709, + "acc_norm_stderr": 0.017841995750520857 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353228, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353228 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236784, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236784 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.025348006031534788, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.025348006031534788 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.0487831731214563, + "acc_norm": 0.62, + "acc_norm_stderr": 0.0487831731214563 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486519, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486519 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.028286324075564414, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.028286324075564414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342592, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342592 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473075, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473075 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.0343751933733825, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.0343751933733825 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129274, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129274 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.039621355734862175, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.039621355734862175 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5276073619631901, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.5276073619631901, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656206, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656206 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.0360722806104775, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.0360722806104775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4990825688073395, + "acc_stderr": 0.021437287056051215, + "acc_norm": 0.4990825688073395, + "acc_norm_stderr": 0.021437287056051215 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.028580341065138282, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.028580341065138282 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.042059539338841226, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.042059539338841226 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.019933627776857425, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.019933627776857425 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.02904919034254346, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.02904919034254346 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.033384734032074016, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.033384734032074016 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3094972067039106, + "acc_stderr": 0.015461169002371534, + "acc_norm": 0.3094972067039106, + "acc_norm_stderr": 0.015461169002371534 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569736, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569736 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5551020408163265, + "acc_stderr": 0.031814251181977865, + "acc_norm": 0.5551020408163265, + "acc_norm_stderr": 0.031814251181977865 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3435462842242503, + "acc_stderr": 0.012128961174190166, + "acc_norm": 0.3435462842242503, + "acc_norm_stderr": 0.012128961174190166 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.03501038327635897, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.03501038327635897 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.016002651487361005, + "mc2": 0.4747679479652767, + "mc2_stderr": 0.015513520650679107 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48760330578512395, + "acc_stderr": 0.017185069732676524, + "acc_norm": 0.5218417945690673, + "acc_norm_stderr": 0.017173944474294385 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "uukuguy/speechless-zephyr-code-functionary-7b", + "model_sha": "d66fc775ece679966e352195c42444e9c70af7fa", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/uukuguy/zephyr-7b-alpha-dare-0.85/result_2024-05-15 21:17:19.json b/uukuguy/zephyr-7b-alpha-dare-0.85/result_2024-05-15 21:17:19.json new file mode 100644 index 0000000000000000000000000000000000000000..59abcb92ac4d059e7b2898f7b1262ede6e3d0b2d --- /dev/null +++ b/uukuguy/zephyr-7b-alpha-dare-0.85/result_2024-05-15 21:17:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3438566552901024, + "acc_stderr": 0.013880644570156213, + "acc_norm": 0.3890784982935154, + "acc_norm_stderr": 0.014247309976045605 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3737303326030671, + "acc_stderr": 0.004828045774734899, + "acc_norm": 0.48526190001991637, + "acc_norm_stderr": 0.004987613263678173 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.454661558109834, + "acc_stderr": 0.017806304585052606, + "acc_norm": 0.454661558109834, + "acc_norm_stderr": 0.017806304585052606 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376599, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376599 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932268, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932268 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.03851597683718533, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.03851597683718533 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.43946188340807174, + "acc_stderr": 0.03331092511038179, + "acc_norm": 0.43946188340807174, + "acc_norm_stderr": 0.03331092511038179 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5672268907563025, + "acc_stderr": 0.032183581077426124, + "acc_norm": 0.5672268907563025, + "acc_norm_stderr": 0.032183581077426124 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.025348006031534795, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.025348006031534795 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.034867317274198714, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.034867317274198714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653315, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653315 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749465, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749465 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4339622641509434, + "acc_stderr": 0.030503292013342585, + "acc_norm": 0.4339622641509434, + "acc_norm_stderr": 0.030503292013342585 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.02866120111652459, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.02866120111652459 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.25165562913907286, + "acc_stderr": 0.035433042343899844, + "acc_norm": 0.25165562913907286, + "acc_norm_stderr": 0.035433042343899844 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02510742548113728, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02510742548113728 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4382716049382716, + "acc_stderr": 0.027607914087400473, + "acc_norm": 0.4382716049382716, + "acc_norm_stderr": 0.027607914087400473 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.03606065001832919, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.03606065001832919 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362227, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362227 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089782, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089782 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.040335656678483205, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.040335656678483205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.019835176484375387, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.019835176484375387 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199502, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199502 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538271, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538271 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.35083798882681566, + "acc_stderr": 0.01596103667523097, + "acc_norm": 0.35083798882681566, + "acc_norm_stderr": 0.01596103667523097 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016633, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016633 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5907172995780591, + "acc_stderr": 0.03200704183359591, + "acc_norm": 0.5907172995780591, + "acc_norm_stderr": 0.03200704183359591 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3305084745762712, + "acc_stderr": 0.012014142101842974, + "acc_norm": 0.3305084745762712, + "acc_norm_stderr": 0.012014142101842974 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.44607843137254904, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.44607843137254904, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2937576499388005, + "mc1_stderr": 0.015945068581236618, + "mc2": 0.4709732949057789, + "mc2_stderr": 0.015470110603552233 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4899645808736718, + "acc_stderr": 0.017186891286894053, + "acc_norm": 0.526564344746163, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "uukuguy/zephyr-7b-alpha-dare-0.85", + "model_sha": "afe35301593b4ce2e7b5d1696066724ef1f802eb", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/uygarkurt/llama-3-merged-linear/result_2024-05-13 16:46:19.json b/uygarkurt/llama-3-merged-linear/result_2024-05-13 16:46:19.json new file mode 100644 index 0000000000000000000000000000000000000000..e8752165535ccf441c15f1d508c2ffb60e840274 --- /dev/null +++ b/uygarkurt/llama-3-merged-linear/result_2024-05-13 16:46:19.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43600682593856654, + "acc_stderr": 0.014491225699230916, + "acc_norm": 0.5017064846416383, + "acc_norm_stderr": 0.014611305705056987 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38617805218084045, + "acc_stderr": 0.004858771963468882, + "acc_norm": 0.513343955387373, + "acc_norm_stderr": 0.004988004122536518 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5964912280701754, + "acc_stderr": 0.037627386999170565, + "acc_norm": 0.5964912280701754, + "acc_norm_stderr": 0.037627386999170565 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041697, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041697 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45721583652618136, + "acc_stderr": 0.01781438523853443, + "acc_norm": 0.45721583652618136, + "acc_norm_stderr": 0.01781438523853443 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5819935691318328, + "acc_stderr": 0.028013651891995076, + "acc_norm": 0.5819935691318328, + "acc_norm_stderr": 0.028013651891995076 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5038167938931297, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.5038167938931297, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.035594435655639196, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.035594435655639196 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5862068965517241, + "acc_stderr": 0.04104269211806231, + "acc_norm": 0.5862068965517241, + "acc_norm_stderr": 0.04104269211806231 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5672268907563025, + "acc_stderr": 0.032183581077426124, + "acc_norm": 0.5672268907563025, + "acc_norm_stderr": 0.032183581077426124 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5205128205128206, + "acc_stderr": 0.02532966316348994, + "acc_norm": 0.5205128205128206, + "acc_norm_stderr": 0.02532966316348994 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5451612903225806, + "acc_stderr": 0.028327743091561077, + "acc_norm": 0.5451612903225806, + "acc_norm_stderr": 0.028327743091561077 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7478632478632479, + "acc_stderr": 0.02844796547623102, + "acc_norm": 0.7478632478632479, + "acc_norm_stderr": 0.02844796547623102 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3962962962962963, + "acc_stderr": 0.029822619458533997, + "acc_norm": 0.3962962962962963, + "acc_norm_stderr": 0.029822619458533997 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681681, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681681 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6965174129353234, + "acc_stderr": 0.03251006816458619, + "acc_norm": 0.6965174129353234, + "acc_norm_stderr": 0.03251006816458619 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.038073017265045105, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.038073017265045105 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3994708994708995, + "acc_stderr": 0.025225450284067877, + "acc_norm": 0.3994708994708995, + "acc_norm_stderr": 0.025225450284067877 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4861111111111111, + "acc_stderr": 0.04179596617581, + "acc_norm": 0.4861111111111111, + "acc_norm_stderr": 0.04179596617581 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5520231213872833, + "acc_stderr": 0.026772990653361826, + "acc_norm": 0.5520231213872833, + "acc_norm_stderr": 0.026772990653361826 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5339506172839507, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.5339506172839507, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.618348623853211, + "acc_stderr": 0.020828148517022596, + "acc_norm": 0.618348623853211, + "acc_norm_stderr": 0.020828148517022596 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.028332397483664274, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.028332397483664274 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635464, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635464 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.04060127035236395, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.04060127035236395 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.020142974553795198, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.020142974553795198 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.028538650028878638, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.028538650028878638 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.033981108902946366, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.033981108902946366 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2837988826815642, + "acc_stderr": 0.015078358970751757, + "acc_norm": 0.2837988826815642, + "acc_norm_stderr": 0.015078358970751757 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.02989616303312547, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.02989616303312547 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.636734693877551, + "acc_stderr": 0.030789051139030806, + "acc_norm": 0.636734693877551, + "acc_norm_stderr": 0.030789051139030806 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.030587326294702354, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.030587326294702354 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3813559322033898, + "acc_stderr": 0.012405509401888119, + "acc_norm": 0.3813559322033898, + "acc_norm_stderr": 0.012405509401888119 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03460228327239172, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03460228327239172 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6727272727272727, + "acc_stderr": 0.03663974994391242, + "acc_norm": 0.6727272727272727, + "acc_norm_stderr": 0.03663974994391242 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3818849449204406, + "mc1_stderr": 0.017008101939163498, + "mc2": 0.5656474128933694, + "mc2_stderr": 0.0160768880937946 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5242030696576151, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.551357733175915, + "acc_norm_stderr": 0.01709943051472578 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "uygarkurt/llama-3-merged-linear", + "model_sha": "3a053dd8d2e9117544bf9f608beafb1aabe7b09c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vaiv/GeM-2.0/result_2024-05-07 10:43:44.json b/vaiv/GeM-2.0/result_2024-05-07 10:43:44.json new file mode 100644 index 0000000000000000000000000000000000000000..6d081ad9a8084afadeed4efe80fbd80f4db5f179 --- /dev/null +++ b/vaiv/GeM-2.0/result_2024-05-07 10:43:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.42406143344709896, + "acc_stderr": 0.0144418896274644, + "acc_norm": 0.4718430034129693, + "acc_norm_stderr": 0.014588204105102202 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46883091017725553, + "acc_stderr": 0.004980076707392432, + "acc_norm": 0.633240390360486, + "acc_norm_stderr": 0.004809352075008938 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7192982456140351, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.7192982456140351, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107675, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107675 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.698595146871009, + "acc_stderr": 0.016409091097268798, + "acc_norm": 0.698595146871009, + "acc_norm_stderr": 0.016409091097268798 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.04266763404099582, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.04266763404099582 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699121, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699121 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234355, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234355 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.033948539651564025, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.033948539651564025 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4068965517241379, + "acc_stderr": 0.040937939812662374, + "acc_norm": 0.4068965517241379, + "acc_norm_stderr": 0.040937939812662374 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929777, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5756302521008403, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.5756302521008403, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.541025641025641, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.541025641025641, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.72, + "acc_stderr": 0.045126085985421296, + "acc_norm": 0.72, + "acc_norm_stderr": 0.045126085985421296 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2660098522167488, + "acc_stderr": 0.03108982600293753, + "acc_norm": 0.2660098522167488, + "acc_norm_stderr": 0.03108982600293753 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.49032258064516127, + "acc_stderr": 0.028438677998909565, + "acc_norm": 0.49032258064516127, + "acc_norm_stderr": 0.028438677998909565 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.02987257770889118, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.02987257770889118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5245283018867924, + "acc_stderr": 0.030735822206205608, + "acc_norm": 0.5245283018867924, + "acc_norm_stderr": 0.030735822206205608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028428, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028428 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4791666666666667, + "acc_stderr": 0.04177578950739993, + "acc_norm": 0.4791666666666667, + "acc_norm_stderr": 0.04177578950739993 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252609, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252609 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5705521472392638, + "acc_stderr": 0.03889066619112722, + "acc_norm": 0.5705521472392638, + "acc_norm_stderr": 0.03889066619112722 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5181347150259067, + "acc_stderr": 0.03606065001832919, + "acc_norm": 0.5181347150259067, + "acc_norm_stderr": 0.03606065001832919 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.32456140350877194, + "acc_stderr": 0.04404556157374768, + "acc_norm": 0.32456140350877194, + "acc_norm_stderr": 0.04404556157374768 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.671559633027523, + "acc_stderr": 0.020135902797298395, + "acc_norm": 0.671559633027523, + "acc_norm_stderr": 0.020135902797298395 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749234, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749234 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5032679738562091, + "acc_stderr": 0.02862930519400354, + "acc_norm": 0.5032679738562091, + "acc_norm_stderr": 0.02862930519400354 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.040633027314866704, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.040633027314866704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.02018014484330729, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.02018014484330729 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590954, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590954 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.03214952147802748, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.03214952147802748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.014242630070574892, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.014242630070574892 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41544117647058826, + "acc_stderr": 0.029935342707877746, + "acc_norm": 0.41544117647058826, + "acc_norm_stderr": 0.029935342707877746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4816326530612245, + "acc_stderr": 0.03198761546763126, + "acc_norm": 0.4816326530612245, + "acc_norm_stderr": 0.03198761546763126 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7088607594936709, + "acc_stderr": 0.029571601065753378, + "acc_norm": 0.7088607594936709, + "acc_norm_stderr": 0.029571601065753378 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36962190352020863, + "acc_stderr": 0.012328445778575269, + "acc_norm": 0.36962190352020863, + "acc_norm_stderr": 0.012328445778575269 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6519607843137255, + "acc_stderr": 0.03343311240488418, + "acc_norm": 0.6519607843137255, + "acc_norm_stderr": 0.03343311240488418 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03681050869161549, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03681050869161549 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.42076708539927266, + "mc2_stderr": 0.015226563257660227 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6446280991735537, + "acc_stderr": 0.016455496000314523, + "acc_norm": 0.6646989374262101, + "acc_norm_stderr": 0.01623098123298981 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vaiv/GeM-2.0", + "model_sha": "97014ebb11e877944fe3a7fa1b34dc332ae85ba3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vaiv/GeM2-Llamion-14B-Base/result_2024-05-16 08:28:06.json b/vaiv/GeM2-Llamion-14B-Base/result_2024-05-16 08:28:06.json new file mode 100644 index 0000000000000000000000000000000000000000..fe3a50da589d8b213d5be62c99307b11c68ae5fc --- /dev/null +++ b/vaiv/GeM2-Llamion-14B-Base/result_2024-05-16 08:28:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.447098976109215, + "acc_stderr": 0.014529380160526848, + "acc_norm": 0.492320819112628, + "acc_norm_stderr": 0.01460966744089257 + }, + "harness|ko_hellaswag|10": { + "acc": 0.48675562636924913, + "acc_stderr": 0.004988030554894802, + "acc_norm": 0.6606253734315873, + "acc_norm_stderr": 0.004725293905228257 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7719298245614035, + "acc_stderr": 0.032180937956023566, + "acc_norm": 0.7719298245614035, + "acc_norm_stderr": 0.032180937956023566 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7864077669902912, + "acc_stderr": 0.040580420156460344, + "acc_norm": 0.7864077669902912, + "acc_norm_stderr": 0.040580420156460344 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.789272030651341, + "acc_stderr": 0.01458381246586254, + "acc_norm": 0.789272030651341, + "acc_norm_stderr": 0.01458381246586254 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04244633238353228, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04244633238353228 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.6340425531914894, + "acc_stderr": 0.03148955829745529, + "acc_norm": 0.6340425531914894, + "acc_norm_stderr": 0.03148955829745529 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.6385542168674698, + "acc_stderr": 0.03740059382029321, + "acc_norm": 0.6385542168674698, + "acc_norm_stderr": 0.03740059382029321 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6913183279742765, + "acc_stderr": 0.026236965881153252, + "acc_norm": 0.6913183279742765, + "acc_norm_stderr": 0.026236965881153252 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.7130044843049327, + "acc_stderr": 0.030360379710291947, + "acc_norm": 0.7130044843049327, + "acc_norm_stderr": 0.030360379710291947 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.7786259541984732, + "acc_stderr": 0.036412970813137296, + "acc_norm": 0.7786259541984732, + "acc_norm_stderr": 0.036412970813137296 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.03053289223393203, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.03053289223393203 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6827586206896552, + "acc_stderr": 0.038783523721386215, + "acc_norm": 0.6827586206896552, + "acc_norm_stderr": 0.038783523721386215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.04959859966384181, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.04959859966384181 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.7394957983193278, + "acc_stderr": 0.02851025151234193, + "acc_norm": 0.7394957983193278, + "acc_norm_stderr": 0.02851025151234193 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.676923076923077, + "acc_stderr": 0.02371088850197058, + "acc_norm": 0.676923076923077, + "acc_norm_stderr": 0.02371088850197058 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.79, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.79, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.75, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.5320197044334976, + "acc_stderr": 0.03510766597959215, + "acc_norm": 0.5320197044334976, + "acc_norm_stderr": 0.03510766597959215 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6451612903225806, + "acc_stderr": 0.027218889773308757, + "acc_norm": 0.6451612903225806, + "acc_norm_stderr": 0.027218889773308757 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8632478632478633, + "acc_stderr": 0.022509033937077802, + "acc_norm": 0.8632478632478633, + "acc_norm_stderr": 0.022509033937077802 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.7018867924528301, + "acc_stderr": 0.02815283794249387, + "acc_norm": 0.7018867924528301, + "acc_norm_stderr": 0.02815283794249387 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.5481481481481482, + "acc_stderr": 0.030343862998512626, + "acc_norm": 0.5481481481481482, + "acc_norm_stderr": 0.030343862998512626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.5761589403973509, + "acc_stderr": 0.04034846678603397, + "acc_norm": 0.5761589403973509, + "acc_norm_stderr": 0.04034846678603397 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.8208955223880597, + "acc_stderr": 0.027113286753111848, + "acc_norm": 0.8208955223880597, + "acc_norm_stderr": 0.027113286753111848 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.7572254335260116, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.7572254335260116, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.544973544973545, + "acc_stderr": 0.025646928361049398, + "acc_norm": 0.544973544973545, + "acc_norm_stderr": 0.025646928361049398 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.7361111111111112, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.7361111111111112, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.9, + "acc_stderr": 0.030151134457776348, + "acc_norm": 0.9, + "acc_norm_stderr": 0.030151134457776348 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.7196531791907514, + "acc_stderr": 0.024182427496577605, + "acc_norm": 0.7196531791907514, + "acc_norm_stderr": 0.024182427496577605 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.7116564417177914, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.7116564417177914, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.7037037037037037, + "acc_stderr": 0.025407197798890155, + "acc_norm": 0.7037037037037037, + "acc_norm_stderr": 0.025407197798890155 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7512953367875648, + "acc_stderr": 0.031195840877700286, + "acc_norm": 0.7512953367875648, + "acc_norm_stderr": 0.031195840877700286 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.5175438596491229, + "acc_stderr": 0.0470070803355104, + "acc_norm": 0.5175438596491229, + "acc_norm_stderr": 0.0470070803355104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.8330275229357799, + "acc_stderr": 0.015990154885073396, + "acc_norm": 0.8330275229357799, + "acc_norm_stderr": 0.015990154885073396 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.023805186524888146, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.023805186524888146 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.8099173553719008, + "acc_stderr": 0.035817969517092825, + "acc_norm": 0.8099173553719008, + "acc_norm_stderr": 0.035817969517092825 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.7302631578947368, + "acc_stderr": 0.036117805602848975, + "acc_norm": 0.7302631578947368, + "acc_norm_stderr": 0.036117805602848975 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.6699346405228758, + "acc_stderr": 0.019023726160724553, + "acc_norm": 0.6699346405228758, + "acc_norm_stderr": 0.019023726160724553 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.450354609929078, + "acc_stderr": 0.029680105565029036, + "acc_norm": 0.450354609929078, + "acc_norm_stderr": 0.029680105565029036 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.625, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.625, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.0316746870682898, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.0316746870682898 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.5083798882681564, + "acc_stderr": 0.016720152794672486, + "acc_norm": 0.5083798882681564, + "acc_norm_stderr": 0.016720152794672486 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.6801470588235294, + "acc_stderr": 0.02833295951403122, + "acc_norm": 0.6801470588235294, + "acc_norm_stderr": 0.02833295951403122 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.7836734693877551, + "acc_stderr": 0.026358916334904028, + "acc_norm": 0.7836734693877551, + "acc_norm_stderr": 0.026358916334904028 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.8481012658227848, + "acc_stderr": 0.02336387809663245, + "acc_norm": 0.8481012658227848, + "acc_norm_stderr": 0.02336387809663245 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.43546284224250326, + "acc_stderr": 0.012663412101248344, + "acc_norm": 0.43546284224250326, + "acc_norm_stderr": 0.012663412101248344 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.8284313725490197, + "acc_stderr": 0.02646056956124063, + "acc_norm": 0.8284313725490197, + "acc_norm_stderr": 0.02646056956124063 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624335, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624335 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237269, + "mc2": 0.40635057569269595, + "mc2_stderr": 0.01521464320379055 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5478158205430933, + "acc_stderr": 0.017111567130916792, + "acc_norm": 0.5631641086186541, + "acc_norm_stderr": 0.017052633559856076 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vaiv/GeM2-Llamion-14B-Base", + "model_sha": "bdbd5ebd4f9f2806aa2ba9c221bb56ca4824c84d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vaiv/GeM2-Llamion-14B-Chat/result_2024-05-16 08:28:18.json b/vaiv/GeM2-Llamion-14B-Chat/result_2024-05-16 08:28:18.json new file mode 100644 index 0000000000000000000000000000000000000000..344039392ffd2446eddaf4a26a7a41582a7a4997 --- /dev/null +++ b/vaiv/GeM2-Llamion-14B-Chat/result_2024-05-16 08:28:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43856655290102387, + "acc_stderr": 0.014500682618212865, + "acc_norm": 0.48208191126279865, + "acc_norm_stderr": 0.014602005585490976 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4666401115315674, + "acc_stderr": 0.004978662946687273, + "acc_norm": 0.6169089822744473, + "acc_norm_stderr": 0.004851466623601452 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7192982456140351, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.7192982456140351, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7475728155339806, + "acc_stderr": 0.043012503996908764, + "acc_norm": 0.7475728155339806, + "acc_norm_stderr": 0.043012503996908764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7420178799489144, + "acc_stderr": 0.01564583018834895, + "acc_norm": 0.7420178799489144, + "acc_norm_stderr": 0.01564583018834895 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5481481481481482, + "acc_stderr": 0.04299268905480864, + "acc_norm": 0.5481481481481482, + "acc_norm_stderr": 0.04299268905480864 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5829787234042553, + "acc_stderr": 0.032232762667117124, + "acc_norm": 0.5829787234042553, + "acc_norm_stderr": 0.032232762667117124 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5240963855421686, + "acc_stderr": 0.038879718495972646, + "acc_norm": 0.5240963855421686, + "acc_norm_stderr": 0.038879718495972646 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6109324758842444, + "acc_stderr": 0.027690337536485372, + "acc_norm": 0.6109324758842444, + "acc_norm_stderr": 0.027690337536485372 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6457399103139013, + "acc_stderr": 0.03210062154134987, + "acc_norm": 0.6457399103139013, + "acc_norm_stderr": 0.03210062154134987 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.648854961832061, + "acc_stderr": 0.0418644516301375, + "acc_norm": 0.648854961832061, + "acc_norm_stderr": 0.0418644516301375 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.03191178226713547, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.03191178226713547 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5862068965517241, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.5862068965517241, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.04928099597287533, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.04928099597287533 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.031631458075523776, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.031631458075523776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.541025641025641, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.541025641025641, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4729064039408867, + "acc_stderr": 0.03512819077876106, + "acc_norm": 0.4729064039408867, + "acc_norm_stderr": 0.03512819077876106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.028040981380761536, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.028040981380761536 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8290598290598291, + "acc_stderr": 0.024662496845209807, + "acc_norm": 0.8290598290598291, + "acc_norm_stderr": 0.024662496845209807 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6226415094339622, + "acc_stderr": 0.02983280811479601, + "acc_norm": 0.6226415094339622, + "acc_norm_stderr": 0.02983280811479601 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113114, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.41721854304635764, + "acc_stderr": 0.0402614149763461, + "acc_norm": 0.41721854304635764, + "acc_norm_stderr": 0.0402614149763461 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7860696517412935, + "acc_stderr": 0.028996909693328923, + "acc_norm": 0.7860696517412935, + "acc_norm_stderr": 0.028996909693328923 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4497354497354497, + "acc_stderr": 0.02562085704293665, + "acc_norm": 0.4497354497354497, + "acc_norm_stderr": 0.02562085704293665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.8, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.8, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6184971098265896, + "acc_stderr": 0.0261521986197268, + "acc_norm": 0.6184971098265896, + "acc_norm_stderr": 0.0261521986197268 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6257668711656442, + "acc_stderr": 0.03802068102899614, + "acc_norm": 0.6257668711656442, + "acc_norm_stderr": 0.03802068102899614 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6327160493827161, + "acc_stderr": 0.02682280175950789, + "acc_norm": 0.6327160493827161, + "acc_norm_stderr": 0.02682280175950789 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6632124352331606, + "acc_stderr": 0.03410780251836184, + "acc_norm": 0.6632124352331606, + "acc_norm_stderr": 0.03410780251836184 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.045796394220704355, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.045796394220704355 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7229357798165138, + "acc_stderr": 0.01918848259016954, + "acc_norm": 0.7229357798165138, + "acc_norm_stderr": 0.01918848259016954 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.02768418188330289, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.02768418188330289 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.8016528925619835, + "acc_stderr": 0.03640118271990945, + "acc_norm": 0.8016528925619835, + "acc_norm_stderr": 0.03640118271990945 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.039105257528497236, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.039105257528497236 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5604575163398693, + "acc_stderr": 0.020079420408087918, + "acc_norm": 0.5604575163398693, + "acc_norm_stderr": 0.020079420408087918 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.02904919034254347, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.02904919034254347 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.49107142857142855, + "acc_stderr": 0.04745033255489123, + "acc_norm": 0.49107142857142855, + "acc_norm_stderr": 0.04745033255489123 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.33519553072625696, + "acc_stderr": 0.01578800719018588, + "acc_norm": 0.33519553072625696, + "acc_norm_stderr": 0.01578800719018588 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5404411764705882, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.5404411764705882, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.689795918367347, + "acc_stderr": 0.02961345987248438, + "acc_norm": 0.689795918367347, + "acc_norm_stderr": 0.02961345987248438 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7805907172995781, + "acc_stderr": 0.026939106581553945, + "acc_norm": 0.7805907172995781, + "acc_norm_stderr": 0.026939106581553945 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3891786179921773, + "acc_stderr": 0.012452613934287017, + "acc_norm": 0.3891786179921773, + "acc_norm_stderr": 0.012452613934287017 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7107843137254902, + "acc_stderr": 0.031822318676475544, + "acc_norm": 0.7107843137254902, + "acc_norm_stderr": 0.031822318676475544 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219374, + "mc2": 0.4141680036183659, + "mc2_stderr": 0.01535864240868654 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4935064935064935, + "acc_stderr": 0.01718890435907731, + "acc_norm": 0.5076741440377804, + "acc_norm_stderr": 0.017188329219654276 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vaiv/GeM2-Llamion-14B-Chat", + "model_sha": "bdbd9441cd1208868824487b3763fadd570f8fa1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vaiv/GeM2-Llamion-14B-LongChat/result_2024-06-09 01:27:51.json b/vaiv/GeM2-Llamion-14B-LongChat/result_2024-06-09 01:27:51.json new file mode 100644 index 0000000000000000000000000000000000000000..36ba3bb762b588ab5a798e4ff60ecb92872cba07 --- /dev/null +++ b/vaiv/GeM2-Llamion-14B-LongChat/result_2024-06-09 01:27:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4283276450511945, + "acc_stderr": 0.014460496367599019, + "acc_norm": 0.4803754266211604, + "acc_norm_stderr": 0.014600132075947103 + }, + "harness|ko_hellaswag|10": { + "acc": 0.44592710615415254, + "acc_stderr": 0.004960516570284905, + "acc_norm": 0.5868352917745469, + "acc_norm_stderr": 0.004913955705080121 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.03699658017656878, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.03699658017656878 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.685823754789272, + "acc_stderr": 0.016599291735884907, + "acc_norm": 0.685823754789272, + "acc_norm_stderr": 0.016599291735884907 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.548936170212766, + "acc_stderr": 0.03252909619613197, + "acc_norm": 0.548936170212766, + "acc_norm_stderr": 0.03252909619613197 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5916398713826366, + "acc_stderr": 0.02791705074848462, + "acc_norm": 0.5916398713826366, + "acc_norm_stderr": 0.02791705074848462 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6188340807174888, + "acc_stderr": 0.032596251184168264, + "acc_norm": 0.6188340807174888, + "acc_norm_stderr": 0.032596251184168264 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03358618145732523, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03358618145732523 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.048971049527263666, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.048971049527263666 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5256410256410257, + "acc_stderr": 0.02531764972644868, + "acc_norm": 0.5256410256410257, + "acc_norm_stderr": 0.02531764972644868 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5419354838709678, + "acc_stderr": 0.028343787250540615, + "acc_norm": 0.5419354838709678, + "acc_norm_stderr": 0.028343787250540615 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7905982905982906, + "acc_stderr": 0.02665569965392277, + "acc_norm": 0.7905982905982906, + "acc_norm_stderr": 0.02665569965392277 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5509433962264151, + "acc_stderr": 0.030612730713641092, + "acc_norm": 0.5509433962264151, + "acc_norm_stderr": 0.030612730713641092 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083018, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083018 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4021164021164021, + "acc_stderr": 0.02525303255499769, + "acc_norm": 0.4021164021164021, + "acc_norm_stderr": 0.02525303255499769 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6040462427745664, + "acc_stderr": 0.02632981334194625, + "acc_norm": 0.6040462427745664, + "acc_norm_stderr": 0.02632981334194625 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.50920245398773, + "acc_stderr": 0.03927705600787443, + "acc_norm": 0.50920245398773, + "acc_norm_stderr": 0.03927705600787443 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.027513747284379428, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.027513747284379428 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336936, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336936 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6678899082568808, + "acc_stderr": 0.020192682985423344, + "acc_norm": 0.6678899082568808, + "acc_norm_stderr": 0.020192682985423344 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.028491993586171566, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.028491993586171566 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5921052631578947, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.5921052631578947, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.48856209150326796, + "acc_stderr": 0.020222541515610863, + "acc_norm": 0.48856209150326796, + "acc_norm_stderr": 0.020222541515610863 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.028999080904806167, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.028999080904806167 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5, + "acc_stderr": 0.04745789978762494, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04745789978762494 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.033247089118091176, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.033247089118091176 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2927374301675978, + "acc_stderr": 0.015218109544410174, + "acc_norm": 0.2927374301675978, + "acc_norm_stderr": 0.015218109544410174 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03004261583271487, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03004261583271487 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6612244897959184, + "acc_stderr": 0.030299506562154188, + "acc_norm": 0.6612244897959184, + "acc_norm_stderr": 0.030299506562154188 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7130801687763713, + "acc_stderr": 0.029443773022594693, + "acc_norm": 0.7130801687763713, + "acc_norm_stderr": 0.029443773022594693 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3709256844850065, + "acc_stderr": 0.012337391684530309, + "acc_norm": 0.3709256844850065, + "acc_norm_stderr": 0.012337391684530309 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6715686274509803, + "acc_stderr": 0.032962451101722294, + "acc_norm": 0.6715686274509803, + "acc_norm_stderr": 0.032962451101722294 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512568, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512568 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2582619339045288, + "mc1_stderr": 0.015321821688476189, + "mc2": 0.39220892812170827, + "mc2_stderr": 0.015230762547471178 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.40613931523022434, + "acc_stderr": 0.016884749503191392, + "acc_norm": 0.44037780401416765, + "acc_norm_stderr": 0.017067699774312974 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vaiv/GeM2-Llamion-14B-LongChat", + "model_sha": "76988a75448ff2928dcc22c91e9bf8ec6c35cadd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vaiv/llamion-14b-base/result_2024-04-20 15:18:42.json b/vaiv/llamion-14b-base/result_2024-04-20 15:18:42.json new file mode 100644 index 0000000000000000000000000000000000000000..16b9fca0f9ef5a1ceabc3d00b0934e50f352d30e --- /dev/null +++ b/vaiv/llamion-14b-base/result_2024-04-20 15:18:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.447098976109215, + "acc_stderr": 0.014529380160526848, + "acc_norm": 0.492320819112628, + "acc_norm_stderr": 0.01460966744089257 + }, + "harness|ko_hellaswag|10": { + "acc": 0.48675562636924913, + "acc_stderr": 0.004988030554894802, + "acc_norm": 0.6606253734315873, + "acc_norm_stderr": 0.004725293905228257 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7719298245614035, + "acc_stderr": 0.032180937956023566, + "acc_norm": 0.7719298245614035, + "acc_norm_stderr": 0.032180937956023566 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7864077669902912, + "acc_stderr": 0.040580420156460344, + "acc_norm": 0.7864077669902912, + "acc_norm_stderr": 0.040580420156460344 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.789272030651341, + "acc_stderr": 0.01458381246586254, + "acc_norm": 0.789272030651341, + "acc_norm_stderr": 0.01458381246586254 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04244633238353228, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04244633238353228 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.6340425531914894, + "acc_stderr": 0.03148955829745529, + "acc_norm": 0.6340425531914894, + "acc_norm_stderr": 0.03148955829745529 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.6385542168674698, + "acc_stderr": 0.03740059382029321, + "acc_norm": 0.6385542168674698, + "acc_norm_stderr": 0.03740059382029321 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6913183279742765, + "acc_stderr": 0.026236965881153252, + "acc_norm": 0.6913183279742765, + "acc_norm_stderr": 0.026236965881153252 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.7130044843049327, + "acc_stderr": 0.030360379710291947, + "acc_norm": 0.7130044843049327, + "acc_norm_stderr": 0.030360379710291947 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.7786259541984732, + "acc_stderr": 0.036412970813137296, + "acc_norm": 0.7786259541984732, + "acc_norm_stderr": 0.036412970813137296 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.03053289223393203, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.03053289223393203 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6827586206896552, + "acc_stderr": 0.038783523721386215, + "acc_norm": 0.6827586206896552, + "acc_norm_stderr": 0.038783523721386215 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.04959859966384181, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.04959859966384181 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.7394957983193278, + "acc_stderr": 0.02851025151234193, + "acc_norm": 0.7394957983193278, + "acc_norm_stderr": 0.02851025151234193 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.676923076923077, + "acc_stderr": 0.02371088850197058, + "acc_norm": 0.676923076923077, + "acc_norm_stderr": 0.02371088850197058 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.79, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.79, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.75, + "acc_stderr": 0.04186091791394607, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04186091791394607 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.5320197044334976, + "acc_stderr": 0.03510766597959215, + "acc_norm": 0.5320197044334976, + "acc_norm_stderr": 0.03510766597959215 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6451612903225806, + "acc_stderr": 0.027218889773308757, + "acc_norm": 0.6451612903225806, + "acc_norm_stderr": 0.027218889773308757 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8632478632478633, + "acc_stderr": 0.022509033937077802, + "acc_norm": 0.8632478632478633, + "acc_norm_stderr": 0.022509033937077802 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.7018867924528301, + "acc_stderr": 0.02815283794249387, + "acc_norm": 0.7018867924528301, + "acc_norm_stderr": 0.02815283794249387 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.5481481481481482, + "acc_stderr": 0.030343862998512626, + "acc_norm": 0.5481481481481482, + "acc_norm_stderr": 0.030343862998512626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.5761589403973509, + "acc_stderr": 0.04034846678603397, + "acc_norm": 0.5761589403973509, + "acc_norm_stderr": 0.04034846678603397 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.8208955223880597, + "acc_stderr": 0.027113286753111848, + "acc_norm": 0.8208955223880597, + "acc_norm_stderr": 0.027113286753111848 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.7572254335260116, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.7572254335260116, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.544973544973545, + "acc_stderr": 0.025646928361049398, + "acc_norm": 0.544973544973545, + "acc_norm_stderr": 0.025646928361049398 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.7361111111111112, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.7361111111111112, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.9, + "acc_stderr": 0.030151134457776348, + "acc_norm": 0.9, + "acc_norm_stderr": 0.030151134457776348 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.7196531791907514, + "acc_stderr": 0.024182427496577605, + "acc_norm": 0.7196531791907514, + "acc_norm_stderr": 0.024182427496577605 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.7116564417177914, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.7116564417177914, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.7037037037037037, + "acc_stderr": 0.025407197798890155, + "acc_norm": 0.7037037037037037, + "acc_norm_stderr": 0.025407197798890155 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7512953367875648, + "acc_stderr": 0.031195840877700286, + "acc_norm": 0.7512953367875648, + "acc_norm_stderr": 0.031195840877700286 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.5175438596491229, + "acc_stderr": 0.0470070803355104, + "acc_norm": 0.5175438596491229, + "acc_norm_stderr": 0.0470070803355104 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.8330275229357799, + "acc_stderr": 0.015990154885073396, + "acc_norm": 0.8330275229357799, + "acc_norm_stderr": 0.015990154885073396 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.023805186524888146, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.023805186524888146 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.8099173553719008, + "acc_stderr": 0.035817969517092825, + "acc_norm": 0.8099173553719008, + "acc_norm_stderr": 0.035817969517092825 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.7302631578947368, + "acc_stderr": 0.036117805602848975, + "acc_norm": 0.7302631578947368, + "acc_norm_stderr": 0.036117805602848975 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.6699346405228758, + "acc_stderr": 0.019023726160724553, + "acc_norm": 0.6699346405228758, + "acc_norm_stderr": 0.019023726160724553 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.450354609929078, + "acc_stderr": 0.029680105565029036, + "acc_norm": 0.450354609929078, + "acc_norm_stderr": 0.029680105565029036 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.625, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.625, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.0316746870682898, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.0316746870682898 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.5083798882681564, + "acc_stderr": 0.016720152794672486, + "acc_norm": 0.5083798882681564, + "acc_norm_stderr": 0.016720152794672486 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.6801470588235294, + "acc_stderr": 0.02833295951403122, + "acc_norm": 0.6801470588235294, + "acc_norm_stderr": 0.02833295951403122 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.7836734693877551, + "acc_stderr": 0.026358916334904028, + "acc_norm": 0.7836734693877551, + "acc_norm_stderr": 0.026358916334904028 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.8481012658227848, + "acc_stderr": 0.02336387809663245, + "acc_norm": 0.8481012658227848, + "acc_norm_stderr": 0.02336387809663245 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.43546284224250326, + "acc_stderr": 0.012663412101248344, + "acc_norm": 0.43546284224250326, + "acc_norm_stderr": 0.012663412101248344 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.8284313725490197, + "acc_stderr": 0.02646056956124063, + "acc_norm": 0.8284313725490197, + "acc_norm_stderr": 0.02646056956124063 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624335, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624335 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2668298653610771, + "mc1_stderr": 0.015483691939237269, + "mc2": 0.40635057569269595, + "mc2_stderr": 0.01521464320379055 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5478158205430933, + "acc_stderr": 0.017111567130916792, + "acc_norm": 0.5631641086186541, + "acc_norm_stderr": 0.017052633559856076 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vaiv/llamion-14b-base", + "model_sha": "8fc6c5e6c901f52d72d9f48f905320c5473ac0d8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vaiv/llamion-14b-chat/result_2024-04-20 15:18:55.json b/vaiv/llamion-14b-chat/result_2024-04-20 15:18:55.json new file mode 100644 index 0000000000000000000000000000000000000000..fec14dedc1019634bd24e21c2b07c7f38fa93614 --- /dev/null +++ b/vaiv/llamion-14b-chat/result_2024-04-20 15:18:55.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.43856655290102387, + "acc_stderr": 0.014500682618212865, + "acc_norm": 0.48208191126279865, + "acc_norm_stderr": 0.014602005585490976 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4666401115315674, + "acc_stderr": 0.004978662946687273, + "acc_norm": 0.6169089822744473, + "acc_norm_stderr": 0.004851466623601452 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7192982456140351, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.7192982456140351, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7475728155339806, + "acc_stderr": 0.043012503996908764, + "acc_norm": 0.7475728155339806, + "acc_norm_stderr": 0.043012503996908764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7420178799489144, + "acc_stderr": 0.01564583018834895, + "acc_norm": 0.7420178799489144, + "acc_norm_stderr": 0.01564583018834895 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5481481481481482, + "acc_stderr": 0.04299268905480864, + "acc_norm": 0.5481481481481482, + "acc_norm_stderr": 0.04299268905480864 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5829787234042553, + "acc_stderr": 0.032232762667117124, + "acc_norm": 0.5829787234042553, + "acc_norm_stderr": 0.032232762667117124 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5240963855421686, + "acc_stderr": 0.038879718495972646, + "acc_norm": 0.5240963855421686, + "acc_norm_stderr": 0.038879718495972646 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6109324758842444, + "acc_stderr": 0.027690337536485372, + "acc_norm": 0.6109324758842444, + "acc_norm_stderr": 0.027690337536485372 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6457399103139013, + "acc_stderr": 0.03210062154134987, + "acc_norm": 0.6457399103139013, + "acc_norm_stderr": 0.03210062154134987 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.648854961832061, + "acc_stderr": 0.0418644516301375, + "acc_norm": 0.648854961832061, + "acc_norm_stderr": 0.0418644516301375 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.03191178226713547, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.03191178226713547 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5862068965517241, + "acc_stderr": 0.041042692118062316, + "acc_norm": 0.5862068965517241, + "acc_norm_stderr": 0.041042692118062316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.04928099597287533, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.04928099597287533 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6134453781512605, + "acc_stderr": 0.031631458075523776, + "acc_norm": 0.6134453781512605, + "acc_norm_stderr": 0.031631458075523776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.541025641025641, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.541025641025641, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.69, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.69, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4729064039408867, + "acc_stderr": 0.03512819077876106, + "acc_norm": 0.4729064039408867, + "acc_norm_stderr": 0.03512819077876106 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5838709677419355, + "acc_stderr": 0.028040981380761536, + "acc_norm": 0.5838709677419355, + "acc_norm_stderr": 0.028040981380761536 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8290598290598291, + "acc_stderr": 0.024662496845209807, + "acc_norm": 0.8290598290598291, + "acc_norm_stderr": 0.024662496845209807 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.6226415094339622, + "acc_stderr": 0.02983280811479601, + "acc_norm": 0.6226415094339622, + "acc_norm_stderr": 0.02983280811479601 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113114, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113114 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.41721854304635764, + "acc_stderr": 0.0402614149763461, + "acc_norm": 0.41721854304635764, + "acc_norm_stderr": 0.0402614149763461 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7860696517412935, + "acc_stderr": 0.028996909693328923, + "acc_norm": 0.7860696517412935, + "acc_norm_stderr": 0.028996909693328923 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4497354497354497, + "acc_stderr": 0.02562085704293665, + "acc_norm": 0.4497354497354497, + "acc_norm_stderr": 0.02562085704293665 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.8, + "acc_stderr": 0.04020151261036844, + "acc_norm": 0.8, + "acc_norm_stderr": 0.04020151261036844 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6184971098265896, + "acc_stderr": 0.0261521986197268, + "acc_norm": 0.6184971098265896, + "acc_norm_stderr": 0.0261521986197268 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6257668711656442, + "acc_stderr": 0.03802068102899614, + "acc_norm": 0.6257668711656442, + "acc_norm_stderr": 0.03802068102899614 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6327160493827161, + "acc_stderr": 0.02682280175950789, + "acc_norm": 0.6327160493827161, + "acc_norm_stderr": 0.02682280175950789 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6632124352331606, + "acc_stderr": 0.03410780251836184, + "acc_norm": 0.6632124352331606, + "acc_norm_stderr": 0.03410780251836184 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.045796394220704355, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.045796394220704355 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7229357798165138, + "acc_stderr": 0.01918848259016954, + "acc_norm": 0.7229357798165138, + "acc_norm_stderr": 0.01918848259016954 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.02768418188330289, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.02768418188330289 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.8016528925619835, + "acc_stderr": 0.03640118271990945, + "acc_norm": 0.8016528925619835, + "acc_norm_stderr": 0.03640118271990945 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6381578947368421, + "acc_stderr": 0.039105257528497236, + "acc_norm": 0.6381578947368421, + "acc_norm_stderr": 0.039105257528497236 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5604575163398693, + "acc_stderr": 0.020079420408087918, + "acc_norm": 0.5604575163398693, + "acc_norm_stderr": 0.020079420408087918 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.38652482269503546, + "acc_stderr": 0.02904919034254347, + "acc_norm": 0.38652482269503546, + "acc_norm_stderr": 0.02904919034254347 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.49107142857142855, + "acc_stderr": 0.04745033255489123, + "acc_norm": 0.49107142857142855, + "acc_norm_stderr": 0.04745033255489123 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.33519553072625696, + "acc_stderr": 0.01578800719018588, + "acc_norm": 0.33519553072625696, + "acc_norm_stderr": 0.01578800719018588 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5404411764705882, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.5404411764705882, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.689795918367347, + "acc_stderr": 0.02961345987248438, + "acc_norm": 0.689795918367347, + "acc_norm_stderr": 0.02961345987248438 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7805907172995781, + "acc_stderr": 0.026939106581553945, + "acc_norm": 0.7805907172995781, + "acc_norm_stderr": 0.026939106581553945 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3891786179921773, + "acc_stderr": 0.012452613934287017, + "acc_norm": 0.3891786179921773, + "acc_norm_stderr": 0.012452613934287017 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7107843137254902, + "acc_stderr": 0.031822318676475544, + "acc_norm": 0.7107843137254902, + "acc_norm_stderr": 0.031822318676475544 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6424242424242425, + "acc_stderr": 0.03742597043806587, + "acc_norm": 0.6424242424242425, + "acc_norm_stderr": 0.03742597043806587 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2741738066095471, + "mc1_stderr": 0.015616518497219374, + "mc2": 0.4141680036183659, + "mc2_stderr": 0.01535864240868654 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4935064935064935, + "acc_stderr": 0.01718890435907731, + "acc_norm": 0.5076741440377804, + "acc_norm_stderr": 0.017188329219654276 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vaiv/llamion-14b-chat", + "model_sha": "638caf7adac5c2feb3bea67f1dfc5f3e8bf2ce4a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vicgalle/CarbonBeagle-11B-truthy/result_2024-05-17 15:51:48.json b/vicgalle/CarbonBeagle-11B-truthy/result_2024-05-17 15:51:48.json new file mode 100644 index 0000000000000000000000000000000000000000..99c6543694b2f9bec8325c6c3e2113865704f6cb --- /dev/null +++ b/vicgalle/CarbonBeagle-11B-truthy/result_2024-05-17 15:51:48.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4121160409556314, + "acc_stderr": 0.0143839153022254, + "acc_norm": 0.47696245733788395, + "acc_norm_stderr": 0.014595873205358259 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41814379605656243, + "acc_stderr": 0.004922459820434775, + "acc_norm": 0.5557657837084247, + "acc_norm_stderr": 0.004958649623815337 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.038237270928823064, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.038237270928823064 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5734355044699873, + "acc_stderr": 0.017686066975675666, + "acc_norm": 0.5734355044699873, + "acc_norm_stderr": 0.017686066975675666 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3925925925925926, + "acc_stderr": 0.04218506215368879, + "acc_norm": 0.3925925925925926, + "acc_norm_stderr": 0.04218506215368879 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685517, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685517 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.028355633568328174, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.028355633568328174 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6616161616161617, + "acc_stderr": 0.03371124142626304, + "acc_norm": 0.6616161616161617, + "acc_norm_stderr": 0.03371124142626304 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.04913595201274498, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.04913595201274498 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5756302521008403, + "acc_stderr": 0.03210479051015776, + "acc_norm": 0.5756302521008403, + "acc_norm_stderr": 0.03210479051015776 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.025342671293807247, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.025342671293807247 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.04766075165356461, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.04766075165356461 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.532258064516129, + "acc_stderr": 0.028384747788813336, + "acc_norm": 0.532258064516129, + "acc_norm_stderr": 0.028384747788813336 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.02777883590493543, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.02777883590493543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44528301886792454, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.44528301886792454, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948482, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948482 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.025355741263055263, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.025355741263055263 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5433526011560693, + "acc_stderr": 0.026817718130348916, + "acc_norm": 0.5433526011560693, + "acc_norm_stderr": 0.026817718130348916 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.027744313443376536, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.027744313443376536 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6424870466321243, + "acc_stderr": 0.034588160421810114, + "acc_norm": 0.6424870466321243, + "acc_norm_stderr": 0.034588160421810114 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6018348623853211, + "acc_stderr": 0.020987989422654268, + "acc_norm": 0.6018348623853211, + "acc_norm_stderr": 0.020987989422654268 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5328947368421053, + "acc_stderr": 0.04060127035236395, + "acc_norm": 0.5328947368421053, + "acc_norm_stderr": 0.04060127035236395 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.02019659493354119, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.02019659493354119 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028547, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028547 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372432, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372432 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.029855261393483927, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.029855261393483927 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.679324894514768, + "acc_stderr": 0.03038193194999041, + "acc_norm": 0.679324894514768, + "acc_norm_stderr": 0.03038193194999041 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3624511082138201, + "acc_stderr": 0.012277512533252518, + "acc_norm": 0.3624511082138201, + "acc_norm_stderr": 0.012277512533252518 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.029554292605695066, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.029554292605695066 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.22424242424242424, + "acc_stderr": 0.032568666616811015, + "acc_norm": 0.22424242424242424, + "acc_norm_stderr": 0.032568666616811015 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.43329253365973075, + "mc1_stderr": 0.017347024450107502, + "mc2": 0.6076704873070271, + "mc2_stderr": 0.016057367013989104 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5206611570247934, + "acc_stderr": 0.017175671279836446, + "acc_norm": 0.5218417945690673, + "acc_norm_stderr": 0.01717394447429438 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vicgalle/CarbonBeagle-11B-truthy", + "model_sha": "476cd2a6d938bddb38dfbeb4cb21e3e34303413d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vicgalle/CarbonBeagle-11B/result_2024-06-17 17:19:04.json b/vicgalle/CarbonBeagle-11B/result_2024-06-17 17:19:04.json new file mode 100644 index 0000000000000000000000000000000000000000..a3365a9ce4a7bb6cacb43890c9683034c4449f92 --- /dev/null +++ b/vicgalle/CarbonBeagle-11B/result_2024-06-17 17:19:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4129692832764505, + "acc_stderr": 0.014388344935398326, + "acc_norm": 0.4761092150170648, + "acc_norm_stderr": 0.014594701798071654 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4182433778131846, + "acc_stderr": 0.004922624636945243, + "acc_norm": 0.5560645289782912, + "acc_norm_stderr": 0.004958314114266496 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6407766990291263, + "acc_stderr": 0.04750458399041696, + "acc_norm": 0.6407766990291263, + "acc_norm_stderr": 0.04750458399041696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5696040868454662, + "acc_stderr": 0.01770586877629241, + "acc_norm": 0.5696040868454662, + "acc_norm_stderr": 0.01770586877629241 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4222222222222222, + "acc_stderr": 0.042667634040995814, + "acc_norm": 0.4222222222222222, + "acc_norm_stderr": 0.042667634040995814 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5498392282958199, + "acc_stderr": 0.028256660723360177, + "acc_norm": 0.5498392282958199, + "acc_norm_stderr": 0.028256660723360177 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5426008968609866, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.5426008968609866, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.043749285605997376, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.043749285605997376 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6717171717171717, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.6717171717171717, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319616, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.03228410626716391, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.03228410626716391 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.02527589207024065, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.02527589207024065 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5648148148148148, + "acc_stderr": 0.04792898170907062, + "acc_norm": 0.5648148148148148, + "acc_norm_stderr": 0.04792898170907062 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036545, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036545 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5612903225806452, + "acc_stderr": 0.028229497320317213, + "acc_norm": 0.5612903225806452, + "acc_norm_stderr": 0.028229497320317213 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.027236013946196673, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.027236013946196673 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.03067609659938918, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.03067609659938918 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.03794012674697029, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.03794012674697029 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41798941798941797, + "acc_stderr": 0.02540255550326091, + "acc_norm": 0.41798941798941797, + "acc_norm_stderr": 0.02540255550326091 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.04161402398403279, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.04161402398403279 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5751445086705202, + "acc_stderr": 0.026613350840261736, + "acc_norm": 0.5751445086705202, + "acc_norm_stderr": 0.026613350840261736 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5339506172839507, + "acc_stderr": 0.027756535257347666, + "acc_norm": 0.5339506172839507, + "acc_norm_stderr": 0.027756535257347666 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6476683937823834, + "acc_stderr": 0.03447478286414355, + "acc_norm": 0.6476683937823834, + "acc_norm_stderr": 0.03447478286414355 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583704, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583704 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6146788990825688, + "acc_stderr": 0.020865850852794125, + "acc_norm": 0.6146788990825688, + "acc_norm_stderr": 0.020865850852794125 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.043457245702925335, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.043457245702925335 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5526315789473685, + "acc_stderr": 0.04046336883978251, + "acc_norm": 0.5526315789473685, + "acc_norm_stderr": 0.04046336883978251 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.02017548876548403, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.02017548876548403 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859926, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859926 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569746, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6122448979591837, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.6122448979591837, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6877637130801688, + "acc_stderr": 0.030165137867847015, + "acc_norm": 0.6877637130801688, + "acc_norm_stderr": 0.030165137867847015 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.378748370273794, + "acc_stderr": 0.012389052105003753, + "acc_norm": 0.378748370273794, + "acc_norm_stderr": 0.012389052105003753 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23039215686274508, + "acc_stderr": 0.02955429260569506, + "acc_norm": 0.23039215686274508, + "acc_norm_stderr": 0.02955429260569506 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.031922715695482995, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.031922715695482995 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35128518971848227, + "mc1_stderr": 0.0167113581635444, + "mc2": 0.510147924152594, + "mc2_stderr": 0.016393549288025715 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4769775678866588, + "acc_stderr": 0.017172121546727634, + "acc_norm": 0.4852420306965762, + "acc_norm_stderr": 0.017182864434998564 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vicgalle/CarbonBeagle-11B", + "model_sha": "3fe9bf5327606d013b182fed17a472f5f043759b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vicgalle/Configurable-Hermes-2-Pro-Llama-3-8B/result_2024-07-02 14:12:39.json b/vicgalle/Configurable-Hermes-2-Pro-Llama-3-8B/result_2024-07-02 14:12:39.json new file mode 100644 index 0000000000000000000000000000000000000000..857555e75772b45324ec1470da775a6a64849e52 --- /dev/null +++ b/vicgalle/Configurable-Hermes-2-Pro-Llama-3-8B/result_2024-07-02 14:12:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39761092150170646, + "acc_stderr": 0.014301752223279536, + "acc_norm": 0.44283276450511944, + "acc_norm_stderr": 0.014515573873348902 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3942441744672376, + "acc_stderr": 0.00487688998311084, + "acc_norm": 0.5155347540330611, + "acc_norm_stderr": 0.004987372476207027 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6549707602339181, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.6549707602339181, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5351213282247765, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.5351213282247765, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542124, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542124 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4553191489361702, + "acc_stderr": 0.03255525359340355, + "acc_norm": 0.4553191489361702, + "acc_norm_stderr": 0.03255525359340355 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5337620578778135, + "acc_stderr": 0.028333277109562804, + "acc_norm": 0.5337620578778135, + "acc_norm_stderr": 0.028333277109562804 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.03526552724601198, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.03526552724601198 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.04755129616062946, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.04755129616062946 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.02533466708095496, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.02533466708095496 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.047500773411999854, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.047500773411999854 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.0343046241610387, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.0343046241610387 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5161290322580645, + "acc_stderr": 0.028429203176724555, + "acc_norm": 0.5161290322580645, + "acc_norm_stderr": 0.028429203176724555 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7564102564102564, + "acc_stderr": 0.0281209665039144, + "acc_norm": 0.7564102564102564, + "acc_norm_stderr": 0.0281209665039144 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5471698113207547, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.5471698113207547, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.029318203645206865, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.029318203645206865 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.03368787466115459, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.03368787466115459 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273958, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273958 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.024870815251057093, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.024870815251057093 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5123456790123457, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.5123456790123457, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695234, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695234 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5751295336787565, + "acc_stderr": 0.035674713352125395, + "acc_norm": 0.5751295336787565, + "acc_norm_stderr": 0.035674713352125395 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5853211009174312, + "acc_stderr": 0.021122903208602585, + "acc_norm": 0.5853211009174312, + "acc_norm_stderr": 0.021122903208602585 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.044631127206771704, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.044631127206771704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.506578947368421, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.506578947368421, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.41013071895424835, + "acc_stderr": 0.01989841271763589, + "acc_norm": 0.41013071895424835, + "acc_norm_stderr": 0.01989841271763589 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.31560283687943264, + "acc_stderr": 0.027724989449509314, + "acc_norm": 0.31560283687943264, + "acc_norm_stderr": 0.027724989449509314 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.047184714852195886, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.047184714852195886 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.03344887382997865, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.03344887382997865 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.32737430167597764, + "acc_stderr": 0.015694238967737383, + "acc_norm": 0.32737430167597764, + "acc_norm_stderr": 0.015694238967737383 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.0296246635811597, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.0296246635811597 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.03086214492108756, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.03086214492108756 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6708860759493671, + "acc_stderr": 0.03058732629470237, + "acc_norm": 0.6708860759493671, + "acc_norm_stderr": 0.03058732629470237 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35528031290743156, + "acc_stderr": 0.012223623364044037, + "acc_norm": 0.35528031290743156, + "acc_norm_stderr": 0.012223623364044037 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03756335775187896, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03756335775187896 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3635250917992656, + "mc1_stderr": 0.016838862883965817, + "mc2": 0.5313038882196374, + "mc2_stderr": 0.01606560416598617 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48406139315230223, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.5053128689492326, + "acc_norm_stderr": 0.01718938362722971 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vicgalle/Configurable-Hermes-2-Pro-Llama-3-8B", + "model_sha": "3cb5792509966a963645be24fdbeb2e7dc6cac15", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vicgalle/Configurable-Janus-7B/result_2024-06-14 07:07:18.json b/vicgalle/Configurable-Janus-7B/result_2024-06-14 07:07:18.json new file mode 100644 index 0000000000000000000000000000000000000000..7b3433da97f54acc73d70c865f40713ea5179286 --- /dev/null +++ b/vicgalle/Configurable-Janus-7B/result_2024-06-14 07:07:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34215017064846415, + "acc_stderr": 0.01386415215917728, + "acc_norm": 0.3993174061433447, + "acc_norm_stderr": 0.014312094557946704 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38109938259310894, + "acc_stderr": 0.00484664373566655, + "acc_norm": 0.49591714797849035, + "acc_norm_stderr": 0.004989615052547476 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.017852981266633955, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.017852981266633955 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.02827435985489425, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.02827435985489425 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.03515520728670417, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.03515520728670417 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5252100840336135, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.5252100840336135, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986483, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.02876034895652341, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.02876034895652341 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.030471445867183228, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.030471445867183228 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972744, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972744 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524582, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524582 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.037242495958177295, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.037242495958177295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.02467786284133278, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.02467786284133278 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5346820809248555, + "acc_stderr": 0.026854257928258886, + "acc_norm": 0.5346820809248555, + "acc_norm_stderr": 0.026854257928258886 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939392, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939392 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4954128440366973, + "acc_stderr": 0.021436420955529435, + "acc_norm": 0.4954128440366973, + "acc_norm_stderr": 0.021436420955529435 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.028526383452142624, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.028526383452142624 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352167, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352167 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.46710526315789475, + "acc_stderr": 0.040601270352363966, + "acc_norm": 0.46710526315789475, + "acc_norm_stderr": 0.040601270352363966 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.01988622103750187, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.01988622103750187 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650154, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650154 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875192, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875192 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3229050279329609, + "acc_stderr": 0.015638440380241484, + "acc_norm": 0.3229050279329609, + "acc_norm_stderr": 0.015638440380241484 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5387755102040817, + "acc_stderr": 0.031912820526692774, + "acc_norm": 0.5387755102040817, + "acc_norm_stderr": 0.031912820526692774 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35984354628422427, + "acc_stderr": 0.01225826048368981, + "acc_norm": 0.35984354628422427, + "acc_norm_stderr": 0.01225826048368981 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4950980392156863, + "acc_stderr": 0.03509143375606786, + "acc_norm": 0.4950980392156863, + "acc_norm_stderr": 0.03509143375606786 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35862913096695226, + "mc1_stderr": 0.016789289499502022, + "mc2": 0.5263499701230321, + "mc2_stderr": 0.016204107361881243 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4332939787485242, + "acc_stderr": 0.0170366836418931, + "acc_norm": 0.4510035419126328, + "acc_norm_stderr": 0.017107618859549353 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vicgalle/Configurable-Janus-7B", + "model_sha": "a50c423332324e1fbc346eeadcfe74a7013ec6c1", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vicgalle/Configurable-Llama-3-8B-v0.3/result_2024-06-17 17:24:25.json b/vicgalle/Configurable-Llama-3-8B-v0.3/result_2024-06-17 17:24:25.json new file mode 100644 index 0000000000000000000000000000000000000000..37f3eaed1008841675545df8294d51471f3a30f9 --- /dev/null +++ b/vicgalle/Configurable-Llama-3-8B-v0.3/result_2024-06-17 17:24:25.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.38139931740614336, + "acc_stderr": 0.014194389086685254, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955267 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3615813582951603, + "acc_stderr": 0.004794764843685282, + "acc_norm": 0.47012547301334395, + "acc_norm_stderr": 0.004980866814462752 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5497076023391813, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.5497076023391813, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.017784034534992457, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.017784034534992457 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742399, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.032579014820998356, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.032579014820998356 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5530546623794212, + "acc_stderr": 0.028237769422085342, + "acc_norm": 0.5530546623794212, + "acc_norm_stderr": 0.028237769422085342 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.041443118108781526, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.041443118108781526 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.047840607041056527, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.047840607041056527 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5336134453781513, + "acc_stderr": 0.03240501447690071, + "acc_norm": 0.5336134453781513, + "acc_norm_stderr": 0.03240501447690071 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5076923076923077, + "acc_stderr": 0.02534800603153475, + "acc_norm": 0.5076923076923077, + "acc_norm_stderr": 0.02534800603153475 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.0497569851956243, + "acc_norm": 0.57, + "acc_norm_stderr": 0.0497569851956243 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.535483870967742, + "acc_stderr": 0.02837228779796294, + "acc_norm": 0.535483870967742, + "acc_norm_stderr": 0.02837228779796294 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7393162393162394, + "acc_stderr": 0.028760348956523414, + "acc_norm": 0.7393162393162394, + "acc_norm_stderr": 0.028760348956523414 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.0307673947078081, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.0307673947078081 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.029318203645206865, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.029318203645206865 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681681, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681681 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.03784271932887467, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.03784271932887467 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.02494236893115978, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.02494236893115978 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.03582724530036093, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.03582724530036093 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070434, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070434 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5871559633027523, + "acc_stderr": 0.02110912813341392, + "acc_norm": 0.5871559633027523, + "acc_norm_stderr": 0.02110912813341392 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5424836601307189, + "acc_stderr": 0.028526383452142635, + "acc_norm": 0.5424836601307189, + "acc_norm_stderr": 0.028526383452142635 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626057, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626057 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.43790849673202614, + "acc_stderr": 0.020071257886886525, + "acc_norm": 0.43790849673202614, + "acc_norm_stderr": 0.020071257886886525 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.02847350127296376, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.02847350127296376 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.04697113923010213, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.04697113923010213 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.033674621388960775, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.033674621388960775 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27262569832402234, + "acc_stderr": 0.01489339173524962, + "acc_norm": 0.27262569832402234, + "acc_norm_stderr": 0.01489339173524962 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.45, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.02952009569768776, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.02952009569768776 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6408163265306123, + "acc_stderr": 0.03071356045510849, + "acc_norm": 0.6408163265306123, + "acc_norm_stderr": 0.03071356045510849 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.031137304297185798, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.031137304297185798 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37157757496740546, + "acc_stderr": 0.012341828514528285, + "acc_norm": 0.37157757496740546, + "acc_norm_stderr": 0.012341828514528285 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3402692778457772, + "mc1_stderr": 0.016586304901762564, + "mc2": 0.5322048883546253, + "mc2_stderr": 0.015962091791797247 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4911452184179457, + "acc_stderr": 0.01718765819933674, + "acc_norm": 0.5171192443919717, + "acc_norm_stderr": 0.017180275246085626 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vicgalle/Configurable-Llama-3-8B-v0.3", + "model_sha": "73b9c1bfc75301d5bcd84d85be0d995aaeec8fe2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vicgalle/Configurable-Mistral-7B/result_2024-06-12 19:40:04.json b/vicgalle/Configurable-Mistral-7B/result_2024-06-12 19:40:04.json new file mode 100644 index 0000000000000000000000000000000000000000..49388b567ec15483380328b020439acc8f2ed1ed --- /dev/null +++ b/vicgalle/Configurable-Mistral-7B/result_2024-06-12 19:40:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.30716723549488056, + "acc_stderr": 0.013481034054980945, + "acc_norm": 0.35921501706484643, + "acc_norm_stderr": 0.014020224155839154 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36885082652858, + "acc_stderr": 0.0048150733340006, + "acc_norm": 0.46614220274845647, + "acc_norm_stderr": 0.004978328190775524 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4367816091954023, + "acc_stderr": 0.01773647083780067, + "acc_norm": 0.4367816091954023, + "acc_norm_stderr": 0.01773647083780067 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339526, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339526 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.43086816720257237, + "acc_stderr": 0.028125340983972714, + "acc_norm": 0.43086816720257237, + "acc_norm_stderr": 0.028125340983972714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449296, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449296 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.043482080516448585, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.043482080516448585 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.04144311810878151, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.04144311810878151 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201943, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201943 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.032422250271150074, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.032422250271150074 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4230769230769231, + "acc_stderr": 0.025049197876042335, + "acc_norm": 0.4230769230769231, + "acc_norm_stderr": 0.025049197876042335 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317216, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317216 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.03088273697413866, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.03088273697413866 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983045, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983045 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028424, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028424 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303118, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303118 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836183, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836183 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278007, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278007 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44770642201834865, + "acc_stderr": 0.021319754962425455, + "acc_norm": 0.44770642201834865, + "acc_norm_stderr": 0.021319754962425455 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.04325506042017086, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.04325506042017086 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5950413223140496, + "acc_stderr": 0.04481137755942469, + "acc_norm": 0.5950413223140496, + "acc_norm_stderr": 0.04481137755942469 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412236, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412236 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.02847350127296376, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.02847350127296376 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875191, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875191 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.03236585252602157, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.03236585252602157 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3206703910614525, + "acc_stderr": 0.0156099295593484, + "acc_norm": 0.3206703910614525, + "acc_norm_stderr": 0.0156099295593484 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396567, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396567 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.03199615232806286, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.03199615232806286 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3213820078226858, + "acc_stderr": 0.01192758135226508, + "acc_norm": 0.3213820078226858, + "acc_norm_stderr": 0.01192758135226508 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.03495624522015473, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.03495624522015473 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.46060606060606063, + "acc_stderr": 0.03892207016552013, + "acc_norm": 0.46060606060606063, + "acc_norm_stderr": 0.03892207016552013 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3769889840881273, + "mc1_stderr": 0.016965517578930354, + "mc2": 0.5421294854896095, + "mc2_stderr": 0.01627318551552847 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4049586776859504, + "acc_stderr": 0.016876941165045612, + "acc_norm": 0.4179456906729634, + "acc_norm_stderr": 0.016957292005279716 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vicgalle/Configurable-Mistral-7B", + "model_sha": "6943dc17a4ba8160f3bbc050638c548bf9dd0c55", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vicgalle/Configurable-Yi-1.5-9B-Chat/result_2024-05-29 17:01:04.json b/vicgalle/Configurable-Yi-1.5-9B-Chat/result_2024-05-29 17:01:04.json new file mode 100644 index 0000000000000000000000000000000000000000..3c05cc09f0fd3cd753f5a7f28206ca0009cf5920 --- /dev/null +++ b/vicgalle/Configurable-Yi-1.5-9B-Chat/result_2024-05-29 17:01:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3242320819112628, + "acc_stderr": 0.013678810399518822, + "acc_norm": 0.35921501706484643, + "acc_norm_stderr": 0.014020224155839148 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3355905198167696, + "acc_stderr": 0.004712314511950961, + "acc_norm": 0.4053973312089225, + "acc_norm_stderr": 0.00489965370403284 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.391812865497076, + "acc_stderr": 0.037439798259264, + "acc_norm": 0.391812865497076, + "acc_norm_stderr": 0.037439798259264 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.04939291447273482, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.04939291447273482 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.43039591315453385, + "acc_stderr": 0.017705868776292374, + "acc_norm": 0.43039591315453385, + "acc_norm_stderr": 0.017705868776292374 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.0391545063041425, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.0391545063041425 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4425531914893617, + "acc_stderr": 0.03246956919789959, + "acc_norm": 0.4425531914893617, + "acc_norm_stderr": 0.03246956919789959 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42443729903536975, + "acc_stderr": 0.02807192824794621, + "acc_norm": 0.42443729903536975, + "acc_norm_stderr": 0.02807192824794621 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5202020202020202, + "acc_stderr": 0.03559443565563919, + "acc_norm": 0.5202020202020202, + "acc_norm_stderr": 0.03559443565563919 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5724137931034483, + "acc_stderr": 0.041227371113703316, + "acc_norm": 0.5724137931034483, + "acc_norm_stderr": 0.041227371113703316 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5076923076923077, + "acc_stderr": 0.025348006031534767, + "acc_norm": 0.5076923076923077, + "acc_norm_stderr": 0.025348006031534767 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43349753694581283, + "acc_stderr": 0.03486731727419872, + "acc_norm": 0.43349753694581283, + "acc_norm_stderr": 0.03486731727419872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.030572811310299607, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.030572811310299607 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.030437794342983045, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.030437794342983045 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4636363636363636, + "acc_stderr": 0.047764491623961985, + "acc_norm": 0.4636363636363636, + "acc_norm_stderr": 0.047764491623961985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.030401786406101503, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.030401786406101503 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5105820105820106, + "acc_stderr": 0.02574554227604548, + "acc_norm": 0.5105820105820106, + "acc_norm_stderr": 0.02574554227604548 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.03922378290610991, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.03922378290610991 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.02773102275353928, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.02773102275353928 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.036002440698671784, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.036002440698671784 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366596, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44403669724770645, + "acc_stderr": 0.02130262121165452, + "acc_norm": 0.44403669724770645, + "acc_norm_stderr": 0.02130262121165452 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.46825396825396826, + "acc_stderr": 0.04463112720677171, + "acc_norm": 0.46825396825396826, + "acc_norm_stderr": 0.04463112720677171 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4605263157894737, + "acc_stderr": 0.04056242252249034, + "acc_norm": 0.4605263157894737, + "acc_norm_stderr": 0.04056242252249034 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.019675808135281525, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.019675808135281525 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.40070921985815605, + "acc_stderr": 0.02923346574557309, + "acc_norm": 0.40070921985815605, + "acc_norm_stderr": 0.02923346574557309 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2916201117318436, + "acc_stderr": 0.01520103251252043, + "acc_norm": 0.2916201117318436, + "acc_norm_stderr": 0.01520103251252043 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.7, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29044117647058826, + "acc_stderr": 0.02757646862274052, + "acc_norm": 0.29044117647058826, + "acc_norm_stderr": 0.02757646862274052 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5591836734693878, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.5591836734693878, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.43037974683544306, + "acc_stderr": 0.032230171959375976, + "acc_norm": 0.43037974683544306, + "acc_norm_stderr": 0.032230171959375976 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.012134433741002575, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.012134433741002575 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.03410785338904718, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.03410785338904718 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.34761321909424725, + "mc1_stderr": 0.016670769188897303, + "mc2": 0.5205202660955828, + "mc2_stderr": 0.0165979400355126 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5277449822904369, + "acc_stderr": 0.017163867979456016, + "acc_norm": 0.5348288075560803, + "acc_norm_stderr": 0.017148598015747422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vicgalle/Configurable-Yi-1.5-9B-Chat", + "model_sha": "992cb2232caae78eff6a836b2e0642f7cbf6018e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vicgalle/ConfigurableBeagle-11B/result_2024-06-12 19:43:15.json b/vicgalle/ConfigurableBeagle-11B/result_2024-06-12 19:43:15.json new file mode 100644 index 0000000000000000000000000000000000000000..3696c04c2e3f8c4008288f09917ba377a1ecbcb7 --- /dev/null +++ b/vicgalle/ConfigurableBeagle-11B/result_2024-06-12 19:43:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.40273037542662116, + "acc_stderr": 0.01433223630679016, + "acc_norm": 0.46245733788395904, + "acc_norm_stderr": 0.014570144495075573 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41196972714598684, + "acc_stderr": 0.0049118377305822055, + "acc_norm": 0.5487950607448715, + "acc_norm_stderr": 0.004965963647210314 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.04721188506097172, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.04721188506097172 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.565772669220945, + "acc_stderr": 0.017724589389677785, + "acc_norm": 0.565772669220945, + "acc_norm_stderr": 0.017724589389677785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5498392282958199, + "acc_stderr": 0.028256660723360177, + "acc_norm": 0.5498392282958199, + "acc_norm_stderr": 0.028256660723360177 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5291479820627802, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.5291479820627802, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6212121212121212, + "acc_stderr": 0.03456088731993747, + "acc_norm": 0.6212121212121212, + "acc_norm_stderr": 0.03456088731993747 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.041443118108781526, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.041443118108781526 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5714285714285714, + "acc_stderr": 0.03214536859788639, + "acc_norm": 0.5714285714285714, + "acc_norm_stderr": 0.03214536859788639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.0253480060315348, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.0253480060315348 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5193548387096775, + "acc_stderr": 0.02842268740431211, + "acc_norm": 0.5193548387096775, + "acc_norm_stderr": 0.02842268740431211 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.027236013946196687, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.027236013946196687 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.030635627957961816, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.030635627957961816 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.02822644674968351, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.02822644674968351 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518026, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518026 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.41005291005291006, + "acc_stderr": 0.025331202438944433, + "acc_norm": 0.41005291005291006, + "acc_norm_stderr": 0.025331202438944433 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.026803720583206174, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.026803720583206174 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5617283950617284, + "acc_stderr": 0.02760791408740048, + "acc_norm": 0.5617283950617284, + "acc_norm_stderr": 0.02760791408740048 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.03526077095548241, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.03526077095548241 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583704, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583704 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5963302752293578, + "acc_stderr": 0.021035704856574956, + "acc_norm": 0.5963302752293578, + "acc_norm_stderr": 0.021035704856574956 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46895424836601307, + "acc_stderr": 0.020188804456361883, + "acc_norm": 0.46895424836601307, + "acc_norm_stderr": 0.020188804456361883 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973647, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973647 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.034076320938540496, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.034076320938540496 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2737430167597765, + "acc_stderr": 0.014912413096372432, + "acc_norm": 0.2737430167597765, + "acc_norm_stderr": 0.014912413096372432 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40441176470588236, + "acc_stderr": 0.029812630701569743, + "acc_norm": 0.40441176470588236, + "acc_norm_stderr": 0.029812630701569743 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5265306122448979, + "acc_stderr": 0.03196412734523272, + "acc_norm": 0.5265306122448979, + "acc_norm_stderr": 0.03196412734523272 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6877637130801688, + "acc_stderr": 0.03016513786784701, + "acc_norm": 0.6877637130801688, + "acc_norm_stderr": 0.03016513786784701 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.37157757496740546, + "acc_stderr": 0.012341828514528275, + "acc_norm": 0.37157757496740546, + "acc_norm_stderr": 0.012341828514528275 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.028867431449849313, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.028867431449849313 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03225078108306289, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03225078108306289 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.41370869033047736, + "mc1_stderr": 0.017240861812099804, + "mc2": 0.576035038910601, + "mc2_stderr": 0.01624931822723386 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5560802833530106, + "acc_stderr": 0.01708188462354254, + "acc_norm": 0.5655253837072018, + "acc_norm_stderr": 0.017042098620824935 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vicgalle/ConfigurableBeagle-11B", + "model_sha": "bbc16dbf94b8e8a99bb3e2ada6755faf9c2990dd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vicgalle/ConfigurableHermes-7B/result_2024-06-17 17:22:40.json b/vicgalle/ConfigurableHermes-7B/result_2024-06-17 17:22:40.json new file mode 100644 index 0000000000000000000000000000000000000000..0e92bb80515445d197fe49dcdb18e5c18725aa9d --- /dev/null +++ b/vicgalle/ConfigurableHermes-7B/result_2024-06-17 17:22:40.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.013855831287497723, + "acc_norm": 0.37542662116040953, + "acc_norm_stderr": 0.014150631435111726 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37950607448715395, + "acc_stderr": 0.0048427232340220346, + "acc_norm": 0.48665604461262696, + "acc_norm_stderr": 0.004988004122536514 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47509578544061304, + "acc_stderr": 0.017857770704901015, + "acc_norm": 0.47509578544061304, + "acc_norm_stderr": 0.017857770704901015 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.0314108219759624, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.0314108219759624 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.043285772152629715, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.043285772152629715 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.03547601494006938, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.03547601494006938 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207763, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207763 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4512820512820513, + "acc_stderr": 0.02523038123893483, + "acc_norm": 0.4512820512820513, + "acc_norm_stderr": 0.02523038123893483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3870967741935484, + "acc_stderr": 0.027709359675032488, + "acc_norm": 0.3870967741935484, + "acc_norm_stderr": 0.027709359675032488 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809445, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809445 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467512, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467512 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176088, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176088 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.37572254335260113, + "acc_stderr": 0.03692820767264867, + "acc_norm": 0.37572254335260113, + "acc_norm_stderr": 0.03692820767264867 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.025197101074246487, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.025197101074246487 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0269150473553698, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0269150473553698 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539284, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539284 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5647668393782384, + "acc_stderr": 0.03578038165008586, + "acc_norm": 0.5647668393782384, + "acc_norm_stderr": 0.03578038165008586 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.045144961328736334, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.045144961328736334 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795132, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795132 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528787, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528787 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041018, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041018 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3937908496732026, + "acc_stderr": 0.01976621199107307, + "acc_norm": 0.3937908496732026, + "acc_norm_stderr": 0.01976621199107307 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125146, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.19217877094972066, + "acc_stderr": 0.013177759505210083, + "acc_norm": 0.19217877094972066, + "acc_norm_stderr": 0.013177759505210083 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3639705882352941, + "acc_stderr": 0.029227192460032025, + "acc_norm": 0.3639705882352941, + "acc_norm_stderr": 0.029227192460032025 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.031843998738112236, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.031843998738112236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3376792698826597, + "acc_stderr": 0.01207856377714556, + "acc_norm": 0.3376792698826597, + "acc_norm_stderr": 0.01207856377714556 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.034411900234824655, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.034411900234824655 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.35006119951040393, + "mc1_stderr": 0.016697949420151036, + "mc2": 0.5482564495416423, + "mc2_stderr": 0.016240700116219246 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4793388429752066, + "acc_stderr": 0.017175671279836446, + "acc_norm": 0.4935064935064935, + "acc_norm_stderr": 0.017188904359077318 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vicgalle/ConfigurableHermes-7B", + "model_sha": "1333a88eaf6591836b2d9825d1eaec7260f336c9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vicgalle/ConfigurableSOLAR-10.7B/result_2024-06-12 19:45:34.json b/vicgalle/ConfigurableSOLAR-10.7B/result_2024-06-12 19:45:34.json new file mode 100644 index 0000000000000000000000000000000000000000..fa3f3be0c4f4967ab651ace8e8f25f0d75eca496 --- /dev/null +++ b/vicgalle/ConfigurableSOLAR-10.7B/result_2024-06-12 19:45:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.014150631435111728, + "acc_norm": 0.454778156996587, + "acc_norm_stderr": 0.014551507060836355 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39294961163114916, + "acc_stderr": 0.004874076250521576, + "acc_norm": 0.5222067317267477, + "acc_norm_stderr": 0.0049848576711871 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5847953216374269, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.5847953216374269, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5530012771392082, + "acc_stderr": 0.017779225233394227, + "acc_norm": 0.5530012771392082, + "acc_norm_stderr": 0.017779225233394227 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996795, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996795 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835363, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835363 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.45180722891566266, + "acc_stderr": 0.03874371556587953, + "acc_norm": 0.45180722891566266, + "acc_norm_stderr": 0.03874371556587953 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5305466237942122, + "acc_stderr": 0.028345045864840625, + "acc_norm": 0.5305466237942122, + "acc_norm_stderr": 0.028345045864840625 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.03394853965156402, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.03394853965156402 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.04835503696107223, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.04835503696107223 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5051282051282051, + "acc_stderr": 0.025349672906838643, + "acc_norm": 0.5051282051282051, + "acc_norm_stderr": 0.025349672906838643 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.04999999999999999, + "acc_norm": 0.55, + "acc_norm_stderr": 0.04999999999999999 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.034381579670365446, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.034381579670365446 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5419354838709678, + "acc_stderr": 0.028343787250540618, + "acc_norm": 0.5419354838709678, + "acc_norm_stderr": 0.028343787250540618 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.029480360549541194, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.029480360549541194 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.03077265364207565, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.03077265364207565 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.028317533496066482, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.028317533496066482 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6716417910447762, + "acc_stderr": 0.033206858897443244, + "acc_norm": 0.6716417910447762, + "acc_norm_stderr": 0.033206858897443244 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.0381189098894041, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.0381189098894041 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.025279850397404907, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.025279850397404907 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04174752578923185, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04174752578923185 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.558641975308642, + "acc_stderr": 0.02762873715566878, + "acc_norm": 0.558641975308642, + "acc_norm_stderr": 0.02762873715566878 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6476683937823834, + "acc_stderr": 0.034474782864143565, + "acc_norm": 0.6476683937823834, + "acc_norm_stderr": 0.034474782864143565 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6036697247706422, + "acc_stderr": 0.02097146994790053, + "acc_norm": 0.6036697247706422, + "acc_norm_stderr": 0.02097146994790053 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.028568699752225868, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.028568699752225868 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.020200164564804588, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.020200164564804588 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.02833801742861132, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.02833801742861132 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285712, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285712 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.033922384053216174, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.033922384053216174 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2636871508379888, + "acc_stderr": 0.014736926383761976, + "acc_norm": 0.2636871508379888, + "acc_norm_stderr": 0.014736926383761976 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4632352941176471, + "acc_stderr": 0.03029061918048569, + "acc_norm": 0.4632352941176471, + "acc_norm_stderr": 0.03029061918048569 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6040816326530613, + "acc_stderr": 0.03130802899065686, + "acc_norm": 0.6040816326530613, + "acc_norm_stderr": 0.03130802899065686 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3917861799217731, + "acc_stderr": 0.012467564418145118, + "acc_norm": 0.3917861799217731, + "acc_norm_stderr": 0.012467564418145118 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5637254901960784, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.5637254901960784, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.593939393939394, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.593939393939394, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3353733170134639, + "mc1_stderr": 0.016527534039668987, + "mc2": 0.5123885248457408, + "mc2_stderr": 0.016551704729664756 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5218417945690673, + "acc_stderr": 0.017173944474294385, + "acc_norm": 0.526564344746163, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vicgalle/ConfigurableSOLAR-10.7B", + "model_sha": "9d9baad88ea9dbaa61881f15e4f0d16e931033b4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vihangd/smartyplats-7b-v1/result_2023-10-21 10:56:03.json b/vihangd/smartyplats-7b-v1/result_2023-10-21 10:56:03.json new file mode 100644 index 0000000000000000000000000000000000000000..a463a2c6d5fa18ee83c23ab87d63786bad812351 --- /dev/null +++ b/vihangd/smartyplats-7b-v1/result_2023-10-21 10:56:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2781569965870307, + "acc_stderr": 0.0130944699195388, + "acc_norm": 0.30631399317406144, + "acc_norm_stderr": 0.013470584417276511 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33320055765783707, + "acc_stderr": 0.004703942346762255, + "acc_norm": 0.3875721967735511, + "acc_norm_stderr": 0.004862003566798538 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.038110796698335316, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.038110796698335316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45721583652618136, + "acc_stderr": 0.01781438523853443, + "acc_norm": 0.45721583652618136, + "acc_norm_stderr": 0.01781438523853443 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.48231511254019294, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.48231511254019294, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4439461883408072, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.4439461883408072, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.3816793893129771, + "acc_stderr": 0.0426073515764456, + "acc_norm": 0.3816793893129771, + "acc_norm_stderr": 0.0426073515764456 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.43434343434343436, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.43434343434343436, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.453781512605042, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.453781512605042, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.04792898170907061, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.04792898170907061 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.42258064516129035, + "acc_stderr": 0.02810096472427264, + "acc_norm": 0.42258064516129035, + "acc_norm_stderr": 0.02810096472427264 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748842, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748842 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911522, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911522 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507383, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507383 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416907, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416907 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752045, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752045 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3125, + "acc_stderr": 0.038760854559127644, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.038760854559127644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.65, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.038890666191127216, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.038890666191127216 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4228395061728395, + "acc_stderr": 0.027487472980871598, + "acc_norm": 0.4228395061728395, + "acc_norm_stderr": 0.027487472980871598 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46113989637305697, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.46113989637305697, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.43853211009174314, + "acc_stderr": 0.021274713073954562, + "acc_norm": 0.43853211009174314, + "acc_norm_stderr": 0.021274713073954562 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.027914055510468, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.027914055510468 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.04320767807536669, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.04320767807536669 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4133986928104575, + "acc_stderr": 0.019922115682786696, + "acc_norm": 0.4133986928104575, + "acc_norm_stderr": 0.019922115682786696 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.028538650028878648, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.028538650028878648 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2994413407821229, + "acc_stderr": 0.015318257745976708, + "acc_norm": 0.2994413407821229, + "acc_norm_stderr": 0.015318257745976708 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396563, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396563 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5443037974683544, + "acc_stderr": 0.03241920684693333, + "acc_norm": 0.5443037974683544, + "acc_norm_stderr": 0.03241920684693333 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3376792698826597, + "acc_stderr": 0.012078563777145546, + "acc_norm": 0.3376792698826597, + "acc_norm_stderr": 0.012078563777145546 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35294117647058826, + "acc_stderr": 0.033540924375915195, + "acc_norm": 0.35294117647058826, + "acc_norm_stderr": 0.033540924375915195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2778457772337821, + "mc1_stderr": 0.015680929364024633, + "mc2": 0.45442787164664084, + "mc2_stderr": 0.016775457950621752 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4037780401416765, + "acc_stderr": 0.01686903154029863, + "acc_norm": 0.47107438016528924, + "acc_norm_stderr": 0.01716156394991635 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vihangd/smartyplats-7b-v1", + "model_sha": "f9180b83a6e2051c5780d2ad336278226a3d425d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vilm/Quyen-Pro-v0.1/result_2024-05-23 13:57:47.json b/vilm/Quyen-Pro-v0.1/result_2024-05-23 13:57:47.json new file mode 100644 index 0000000000000000000000000000000000000000..cb6f54bb5225311720321679953a63636df10e16 --- /dev/null +++ b/vilm/Quyen-Pro-v0.1/result_2024-05-23 13:57:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.014049106564955014, + "acc_norm": 0.40273037542662116, + "acc_norm_stderr": 0.01433223630679014 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3784106751643099, + "acc_stderr": 0.004839995745602314, + "acc_norm": 0.4910376419040032, + "acc_norm_stderr": 0.004988979750014438 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6140350877192983, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.6140350877192983, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.0458212416016155, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.0458212416016155 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5951468710089399, + "acc_stderr": 0.017553246467720267, + "acc_norm": 0.5951468710089399, + "acc_norm_stderr": 0.017553246467720267 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977112, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977112 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5787234042553191, + "acc_stderr": 0.032278345101462665, + "acc_norm": 0.5787234042553191, + "acc_norm_stderr": 0.032278345101462665 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.03828401115079023, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.03828401115079023 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5627009646302251, + "acc_stderr": 0.028173917761762906, + "acc_norm": 0.5627009646302251, + "acc_norm_stderr": 0.028173917761762906 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.042438692422305246, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.042438692422305246 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786754, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786754 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006715, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006715 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.03156663099215415, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.03156663099215415 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5641025641025641, + "acc_stderr": 0.02514180151117749, + "acc_norm": 0.5641025641025641, + "acc_norm_stderr": 0.02514180151117749 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024932, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.035107665979592174, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.035107665979592174 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6, + "acc_stderr": 0.027869320571664618, + "acc_norm": 0.6, + "acc_norm_stderr": 0.027869320571664618 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.027236013946196687, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.027236013946196687 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5358490566037736, + "acc_stderr": 0.030693675018458006, + "acc_norm": 0.5358490566037736, + "acc_norm_stderr": 0.030693675018458006 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.04582004841505417, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.04582004841505417 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.030039842454069283, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.030039842454069283 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.681592039800995, + "acc_stderr": 0.03294118479054095, + "acc_norm": 0.681592039800995, + "acc_norm_stderr": 0.03294118479054095 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4947089947089947, + "acc_stderr": 0.02574986828855657, + "acc_norm": 0.4947089947089947, + "acc_norm_stderr": 0.02574986828855657 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.7, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.7, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6011560693641619, + "acc_stderr": 0.026362437574546545, + "acc_norm": 0.6011560693641619, + "acc_norm_stderr": 0.026362437574546545 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5493827160493827, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.5493827160493827, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6528497409326425, + "acc_stderr": 0.03435696168361356, + "acc_norm": 0.6528497409326425, + "acc_norm_stderr": 0.03435696168361356 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.045981880578165414, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.045981880578165414 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6477064220183486, + "acc_stderr": 0.02048056884399899, + "acc_norm": 0.6477064220183486, + "acc_norm_stderr": 0.02048056884399899 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.47619047619047616, + "acc_stderr": 0.04467062628403273, + "acc_norm": 0.47619047619047616, + "acc_norm_stderr": 0.04467062628403273 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5816993464052288, + "acc_stderr": 0.028245134024387296, + "acc_norm": 0.5816993464052288, + "acc_norm_stderr": 0.028245134024387296 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5986842105263158, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.5986842105263158, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.020217030653186453, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.020217030653186453 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4078014184397163, + "acc_stderr": 0.02931601177634356, + "acc_norm": 0.4078014184397163, + "acc_norm_stderr": 0.02931601177634356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.49107142857142855, + "acc_stderr": 0.04745033255489123, + "acc_norm": 0.49107142857142855, + "acc_norm_stderr": 0.04745033255489123 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5231481481481481, + "acc_stderr": 0.034063153607115065, + "acc_norm": 0.5231481481481481, + "acc_norm_stderr": 0.034063153607115065 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2569832402234637, + "acc_stderr": 0.014614465821966346, + "acc_norm": 0.2569832402234637, + "acc_norm_stderr": 0.014614465821966346 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.03000856284500348, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.03000856284500348 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6571428571428571, + "acc_stderr": 0.03038726291954772, + "acc_norm": 0.6571428571428571, + "acc_norm_stderr": 0.03038726291954772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7341772151898734, + "acc_stderr": 0.028756799629658342, + "acc_norm": 0.7341772151898734, + "acc_norm_stderr": 0.028756799629658342 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3644067796610169, + "acc_stderr": 0.01229169498305648, + "acc_norm": 0.3644067796610169, + "acc_norm_stderr": 0.01229169498305648 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6127450980392157, + "acc_stderr": 0.03418931233833344, + "acc_norm": 0.6127450980392157, + "acc_norm_stderr": 0.03418931233833344 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6606060606060606, + "acc_stderr": 0.03697442205031595, + "acc_norm": 0.6606060606060606, + "acc_norm_stderr": 0.03697442205031595 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.34516523867809057, + "mc1_stderr": 0.016643103319274943, + "mc2": 0.5117587491261114, + "mc2_stderr": 0.01619238978158076 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.48642266824085006, + "acc_stderr": 0.01718401506040145, + "acc_norm": 0.5808736717827627, + "acc_norm_stderr": 0.016963995010862792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vilm/Quyen-Pro-v0.1", + "model_sha": "9bfbf7f2ef1fde8059e1e5814d674ec8604fff6c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vitruv/vitruv_1/result_2024-02-01 08:20:08.json b/vitruv/vitruv_1/result_2024-02-01 08:20:08.json new file mode 100644 index 0000000000000000000000000000000000000000..23a0e620b80fed932c0c3443c52b6e3bdbcf9c17 --- /dev/null +++ b/vitruv/vitruv_1/result_2024-02-01 08:20:08.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4189419795221843, + "acc_stderr": 0.014418106953639013, + "acc_norm": 0.4803754266211604, + "acc_norm_stderr": 0.014600132075947094 + }, + "harness|ko_hellaswag|10": { + "acc": 0.429097789285003, + "acc_stderr": 0.004939358145561316, + "acc_norm": 0.5861382194781916, + "acc_norm_stderr": 0.0049151774069562575 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529917, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529917 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6116504854368932, + "acc_stderr": 0.048257293373563895, + "acc_norm": 0.6116504854368932, + "acc_norm_stderr": 0.048257293373563895 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.611749680715198, + "acc_stderr": 0.01742767329554432, + "acc_norm": 0.611749680715198, + "acc_norm_stderr": 0.01742767329554432 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146268, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146268 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120575, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120575 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5246636771300448, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.5246636771300448, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.043564472026650695, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.043564472026650695 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.035402943770953675, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.035402943770953675 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4206896551724138, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.4206896551724138, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364396, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364396 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4205128205128205, + "acc_stderr": 0.025028610276710855, + "acc_norm": 0.4205128205128205, + "acc_norm_stderr": 0.025028610276710855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.029996951858349483, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.029996951858349483 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.29259259259259257, + "acc_stderr": 0.027738969632176095, + "acc_norm": 0.29259259259259257, + "acc_norm_stderr": 0.027738969632176095 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.26490066225165565, + "acc_stderr": 0.03603038545360384, + "acc_norm": 0.26490066225165565, + "acc_norm_stderr": 0.03603038545360384 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5223880597014925, + "acc_stderr": 0.035319879302087305, + "acc_norm": 0.5223880597014925, + "acc_norm_stderr": 0.035319879302087305 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247079, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247079 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.024130158299762606, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.024130158299762606 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3958333333333333, + "acc_stderr": 0.04089465449325583, + "acc_norm": 0.3958333333333333, + "acc_norm_stderr": 0.04089465449325583 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.026864624366756656, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.026864624366756656 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4783950617283951, + "acc_stderr": 0.027794760105008736, + "acc_norm": 0.4783950617283951, + "acc_norm_stderr": 0.027794760105008736 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.03590910952235524, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.03590910952235524 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366597, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366597 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5284403669724771, + "acc_stderr": 0.021402615697348047, + "acc_norm": 0.5284403669724771, + "acc_norm_stderr": 0.021402615697348047 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.02835895631342355, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.02835895631342355 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981748, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981748 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42810457516339867, + "acc_stderr": 0.0200176292142131, + "acc_norm": 0.42810457516339867, + "acc_norm_stderr": 0.0200176292142131 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.028538650028878638, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.028538650028878638 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.03324708911809117, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.03324708911809117 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2670391061452514, + "acc_stderr": 0.014796502622562551, + "acc_norm": 0.2670391061452514, + "acc_norm_stderr": 0.014796502622562551 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254174, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254174 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.031299208255302136, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.031299208255302136 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3220338983050847, + "acc_stderr": 0.01193393607189109, + "acc_norm": 0.3220338983050847, + "acc_norm_stderr": 0.01193393607189109 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5833333333333334, + "acc_stderr": 0.03460228327239171, + "acc_norm": 0.5833333333333334, + "acc_norm_stderr": 0.03460228327239171 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26560587515299877, + "mc1_stderr": 0.015461027627253597, + "mc2": 0.42140463999829414, + "mc2_stderr": 0.014891604322929288 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4970484061393152, + "acc_stderr": 0.017190054580194694, + "acc_norm": 0.5348288075560803, + "acc_norm_stderr": 0.017148598015747422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vitruv/vitruv_1", + "model_sha": "1bf67cd0e4e1fd3bb753b51e693a7e11a3c240ec", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/vitruv/vitruv_2/result_2024-03-20 22:45:06.json b/vitruv/vitruv_2/result_2024-03-20 22:45:06.json new file mode 100644 index 0000000000000000000000000000000000000000..675a1aac51c1eeb78aa6846bd68be9f0a5fc1423 --- /dev/null +++ b/vitruv/vitruv_2/result_2024-03-20 22:45:06.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3165529010238908, + "acc_stderr": 0.01359243151906808, + "acc_norm": 0.37627986348122866, + "acc_norm_stderr": 0.014157022555407166 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3851822346146186, + "acc_stderr": 0.004856437955719853, + "acc_norm": 0.5125473013343955, + "acc_norm_stderr": 0.00498821003383201 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.03762738699917055, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.03762738699917055 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.30097087378640774, + "acc_stderr": 0.045416094465039476, + "acc_norm": 0.30097087378640774, + "acc_norm_stderr": 0.045416094465039476 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3997445721583653, + "acc_stderr": 0.017516847907053275, + "acc_norm": 0.3997445721583653, + "acc_norm_stderr": 0.017516847907053275 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.04171654161354543, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.04171654161354543 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.030472973363380045, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.030472973363380045 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.0371172519074075, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.0371172519074075 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3762057877813505, + "acc_stderr": 0.02751392568354943, + "acc_norm": 0.3762057877813505, + "acc_norm_stderr": 0.02751392568354943 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4349775784753363, + "acc_stderr": 0.03327283370271345, + "acc_norm": 0.4349775784753363, + "acc_norm_stderr": 0.03327283370271345 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.31297709923664124, + "acc_stderr": 0.04066962905677698, + "acc_norm": 0.31297709923664124, + "acc_norm_stderr": 0.04066962905677698 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.0347327959083696, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.0347327959083696 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309993, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309993 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.31932773109243695, + "acc_stderr": 0.030283995525884396, + "acc_norm": 0.31932773109243695, + "acc_norm_stderr": 0.030283995525884396 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2692307692307692, + "acc_stderr": 0.02248938979365484, + "acc_norm": 0.2692307692307692, + "acc_norm_stderr": 0.02248938979365484 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.045879047413018105, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.045879047413018105 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.30049261083743845, + "acc_stderr": 0.03225799476233485, + "acc_norm": 0.30049261083743845, + "acc_norm_stderr": 0.03225799476233485 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.33225806451612905, + "acc_stderr": 0.026795560848122797, + "acc_norm": 0.33225806451612905, + "acc_norm_stderr": 0.026795560848122797 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5128205128205128, + "acc_stderr": 0.03274531938842351, + "acc_norm": 0.5128205128205128, + "acc_norm_stderr": 0.03274531938842351 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.33962264150943394, + "acc_stderr": 0.029146904747798335, + "acc_norm": 0.33962264150943394, + "acc_norm_stderr": 0.029146904747798335 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.02719593480408563, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.02719593480408563 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23178807947019867, + "acc_stderr": 0.03445406271987054, + "acc_norm": 0.23178807947019867, + "acc_norm_stderr": 0.03445406271987054 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.31840796019900497, + "acc_stderr": 0.032941184790540944, + "acc_norm": 0.31840796019900497, + "acc_norm_stderr": 0.032941184790540944 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.022569897074918417, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.022569897074918417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.28034682080924855, + "acc_stderr": 0.024182427496577612, + "acc_norm": 0.28034682080924855, + "acc_norm_stderr": 0.024182427496577612 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.294478527607362, + "acc_stderr": 0.03581165790474082, + "acc_norm": 0.294478527607362, + "acc_norm_stderr": 0.03581165790474082 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.32407407407407407, + "acc_stderr": 0.02604176620271716, + "acc_norm": 0.32407407407407407, + "acc_norm_stderr": 0.02604176620271716 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.24352331606217617, + "acc_stderr": 0.030975436386845426, + "acc_norm": 0.24352331606217617, + "acc_norm_stderr": 0.030975436386845426 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3229357798165138, + "acc_stderr": 0.02004811592341532, + "acc_norm": 0.3229357798165138, + "acc_norm_stderr": 0.02004811592341532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.19047619047619047, + "acc_stderr": 0.035122074123020534, + "acc_norm": 0.19047619047619047, + "acc_norm_stderr": 0.035122074123020534 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.3366013071895425, + "acc_stderr": 0.02705797462449438, + "acc_norm": 0.3366013071895425, + "acc_norm_stderr": 0.02705797462449438 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.2727272727272727, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.2727272727272727, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21710526315789475, + "acc_stderr": 0.03355045304882924, + "acc_norm": 0.21710526315789475, + "acc_norm_stderr": 0.03355045304882924 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2630718954248366, + "acc_stderr": 0.017812676542320657, + "acc_norm": 0.2630718954248366, + "acc_norm_stderr": 0.017812676542320657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2801418439716312, + "acc_stderr": 0.02678917235114025, + "acc_norm": 0.2801418439716312, + "acc_norm_stderr": 0.02678917235114025 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.19444444444444445, + "acc_stderr": 0.026991454502036744, + "acc_norm": 0.19444444444444445, + "acc_norm_stderr": 0.026991454502036744 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808852, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808852 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816508, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816508 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.02667925227010311, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.02667925227010311 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.21224489795918366, + "acc_stderr": 0.026176967197866764, + "acc_norm": 0.21224489795918366, + "acc_norm_stderr": 0.026176967197866764 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.350210970464135, + "acc_stderr": 0.031052391937584353, + "acc_norm": 0.350210970464135, + "acc_norm_stderr": 0.031052391937584353 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25488917861799215, + "acc_stderr": 0.011130509812662974, + "acc_norm": 0.25488917861799215, + "acc_norm_stderr": 0.011130509812662974 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.03132179803083291, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.03132179803083291 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.30303030303030304, + "acc_stderr": 0.035886248000917075, + "acc_norm": 0.30303030303030304, + "acc_norm_stderr": 0.035886248000917075 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23255813953488372, + "mc1_stderr": 0.014789157531080517, + "mc2": 0.3768837549466162, + "mc2_stderr": 0.014633138385160598 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.2798110979929162, + "acc_stderr": 0.01543371579542776, + "acc_norm": 0.3305785123966942, + "acc_norm_stderr": 0.0161734232988457 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "vitruv/vitruv_2", + "model_sha": "db9d4443473291aedc6765283d925156c0736a85", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wenbopan/Faro-Yi-9B-DPO/result_2024-07-11 22:03:52.json b/wenbopan/Faro-Yi-9B-DPO/result_2024-07-11 22:03:52.json new file mode 100644 index 0000000000000000000000000000000000000000..8c6e778bd4fd19d4e3f21bdcb68a180c351a21b6 --- /dev/null +++ b/wenbopan/Faro-Yi-9B-DPO/result_2024-07-11 22:03:52.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.302901023890785, + "acc_stderr": 0.013428241573185349, + "acc_norm": 0.35665529010238906, + "acc_norm_stderr": 0.013998056902620204 + }, + "harness|ko_hellaswag|10": { + "acc": 0.34007169886476796, + "acc_stderr": 0.004727648057897935, + "acc_norm": 0.4228241386178052, + "acc_norm_stderr": 0.00492998369279506 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4152046783625731, + "acc_stderr": 0.03779275945503201, + "acc_norm": 0.4152046783625731, + "acc_norm_stderr": 0.03779275945503201 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4763729246487867, + "acc_stderr": 0.017859989765176453, + "acc_norm": 0.4763729246487867, + "acc_norm_stderr": 0.017859989765176453 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.032025630761017346, + "acc_norm": 0.4, + "acc_norm_stderr": 0.032025630761017346 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4855305466237942, + "acc_stderr": 0.028386198084177673, + "acc_norm": 0.4855305466237942, + "acc_norm_stderr": 0.028386198084177673 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.043482080516448585, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.043482080516448585 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.035476014940069384, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.035476014940069384 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.03236361111951941, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.03236361111951941 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240637, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240637 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983693, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983693 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6837606837606838, + "acc_stderr": 0.03046365674734026, + "acc_norm": 0.6837606837606838, + "acc_norm_stderr": 0.03046365674734026 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.03053333843046751, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.03053333843046751 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.030242862397654002, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.030242862397654002 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.038020397601079024, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.038020397601079024 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283647, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283647 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.5132275132275133, + "acc_stderr": 0.025742297289575142, + "acc_norm": 0.5132275132275133, + "acc_norm_stderr": 0.025742297289575142 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.49693251533742333, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.49693251533742333, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4567901234567901, + "acc_stderr": 0.027716661650194038, + "acc_norm": 0.4567901234567901, + "acc_norm_stderr": 0.027716661650194038 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.03582724530036093, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.03582724530036093 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04644602091222317, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04644602091222317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47339449541284406, + "acc_stderr": 0.021406952688151588, + "acc_norm": 0.47339449541284406, + "acc_norm_stderr": 0.021406952688151588 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.02858034106513829, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.02858034106513829 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212094, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.019780465954777535, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.019780465954777535 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.43617021276595747, + "acc_stderr": 0.02958345203628407, + "acc_norm": 0.43617021276595747, + "acc_norm_stderr": 0.02958345203628407 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4398148148148148, + "acc_stderr": 0.03385177976044811, + "acc_norm": 0.4398148148148148, + "acc_norm_stderr": 0.03385177976044811 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.32849162011173183, + "acc_stderr": 0.015707935398496447, + "acc_norm": 0.32849162011173183, + "acc_norm_stderr": 0.015707935398496447 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.027971541370170595, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.027971541370170595 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6122448979591837, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.6122448979591837, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5485232067510548, + "acc_stderr": 0.032393600173974704, + "acc_norm": 0.5485232067510548, + "acc_norm_stderr": 0.032393600173974704 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3670143415906128, + "acc_stderr": 0.012310264244842137, + "acc_norm": 0.3670143415906128, + "acc_norm_stderr": 0.012310264244842137 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3402692778457772, + "mc1_stderr": 0.016586304901762564, + "mc2": 0.5276406131087285, + "mc2_stderr": 0.015981459134112884 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.51357733175915, + "acc_stderr": 0.01718401506040145, + "acc_norm": 0.526564344746163, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wenbopan/Faro-Yi-9B-DPO", + "model_sha": "58a4349f57957f527fdde51fd0cfcc558e8854d9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wenbopan/Faro-Yi-9B/result_2024-07-06 15:38:35.json b/wenbopan/Faro-Yi-9B/result_2024-07-06 15:38:35.json new file mode 100644 index 0000000000000000000000000000000000000000..6e7c10aa8452b64ecbf7dfb4b9b04a08ebe0ea1a --- /dev/null +++ b/wenbopan/Faro-Yi-9B/result_2024-07-06 15:38:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2901023890784983, + "acc_stderr": 0.013261573677520767, + "acc_norm": 0.34726962457337884, + "acc_norm_stderr": 0.013913034529620437 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3307110137422824, + "acc_stderr": 0.004695076629884531, + "acc_norm": 0.4080860386377216, + "acc_norm_stderr": 0.004904747752286946 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.03815827365913236, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.03815827365913236 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107675, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107675 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4789272030651341, + "acc_stderr": 0.017864076786212896, + "acc_norm": 0.4789272030651341, + "acc_norm_stderr": 0.017864076786212896 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.040491220417025055, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.040491220417025055 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.43829787234042555, + "acc_stderr": 0.03243618636108102, + "acc_norm": 0.43829787234042555, + "acc_norm_stderr": 0.03243618636108102 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42168674698795183, + "acc_stderr": 0.03844453181770917, + "acc_norm": 0.42168674698795183, + "acc_norm_stderr": 0.03844453181770917 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993177, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.03244980849990028, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.03244980849990028 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4717948717948718, + "acc_stderr": 0.0253106392549339, + "acc_norm": 0.4717948717948718, + "acc_norm_stderr": 0.0253106392549339 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199985, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199985 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.034711928605184676, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.034711928605184676 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47419354838709676, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.47419354838709676, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731837, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731837 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.03014913560136593, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.03014913560136593 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.034683432951111266, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.034683432951111266 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518026, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518026 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4973544973544973, + "acc_stderr": 0.025750949678130387, + "acc_norm": 0.4973544973544973, + "acc_norm_stderr": 0.025750949678130387 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.02691189868637792, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.02691189868637792 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.43209876543209874, + "acc_stderr": 0.02756301097160668, + "acc_norm": 0.43209876543209874, + "acc_norm_stderr": 0.02756301097160668 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.03594413711272438, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.03594413711272438 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04644602091222317, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04644602091222317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46405228758169936, + "acc_stderr": 0.028555827516528784, + "acc_norm": 0.46405228758169936, + "acc_norm_stderr": 0.028555827516528784 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4342105263157895, + "acc_stderr": 0.04033565667848319, + "acc_norm": 0.4342105263157895, + "acc_norm_stderr": 0.04033565667848319 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.01988622103750187, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.01988622103750187 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4397163120567376, + "acc_stderr": 0.02960991207559411, + "acc_norm": 0.4397163120567376, + "acc_norm_stderr": 0.02960991207559411 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4675925925925926, + "acc_stderr": 0.03402801581358966, + "acc_norm": 0.4675925925925926, + "acc_norm_stderr": 0.03402801581358966 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29720670391061454, + "acc_stderr": 0.015285313353641597, + "acc_norm": 0.29720670391061454, + "acc_norm_stderr": 0.015285313353641597 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.76, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.76, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.02881472242225418, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.02881472242225418 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6, + "acc_stderr": 0.031362502409358936, + "acc_norm": 0.6, + "acc_norm_stderr": 0.031362502409358936 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5654008438818565, + "acc_stderr": 0.03226759995510145, + "acc_norm": 0.5654008438818565, + "acc_norm_stderr": 0.03226759995510145 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3709256844850065, + "acc_stderr": 0.012337391684530312, + "acc_norm": 0.3709256844850065, + "acc_norm_stderr": 0.012337391684530312 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4852941176470588, + "acc_stderr": 0.03507793834791324, + "acc_norm": 0.4852941176470588, + "acc_norm_stderr": 0.03507793834791324 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3317013463892289, + "mc1_stderr": 0.016482148810241473, + "mc2": 0.5103600706281549, + "mc2_stderr": 0.015763973140807 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5194805194805194, + "acc_stderr": 0.01717730199234254, + "acc_norm": 0.5312868949232585, + "acc_norm_stderr": 0.017156666859785466 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wenbopan/Faro-Yi-9B", + "model_sha": "f16d3efcadcffda843ced6dd9970a57c919c9d15", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/werty1248/Llama-3-Ko-8B-Instruct-AOG/result_2024-05-21 16:00:42.json b/werty1248/Llama-3-Ko-8B-Instruct-AOG/result_2024-05-21 16:00:42.json new file mode 100644 index 0000000000000000000000000000000000000000..c0a7e11b8c9aabed2eeed50ee13a12711c05930d --- /dev/null +++ b/werty1248/Llama-3-Ko-8B-Instruct-AOG/result_2024-05-21 16:00:42.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4189419795221843, + "acc_stderr": 0.014418106953639015, + "acc_norm": 0.47525597269624575, + "acc_norm_stderr": 0.014593487694937735 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41635132443736306, + "acc_stderr": 0.004919457850104228, + "acc_norm": 0.5585540728938458, + "acc_norm_stderr": 0.004955447564694056 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6140350877192983, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.6140350877192983, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258973, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258973 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5951468710089399, + "acc_stderr": 0.01755324646772027, + "acc_norm": 0.5951468710089399, + "acc_norm_stderr": 0.01755324646772027 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467382, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467382 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5144694533762058, + "acc_stderr": 0.02838619808417768, + "acc_norm": 0.5144694533762058, + "acc_norm_stderr": 0.02838619808417768 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.03496130972056125, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.03496130972056125 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5655172413793104, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.5655172413793104, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.04440521906179328, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.04440521906179328 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5102564102564102, + "acc_stderr": 0.025345672221942374, + "acc_norm": 0.5102564102564102, + "acc_norm_stderr": 0.025345672221942374 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998571, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998571 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.02777883590493543, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.02777883590493543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5283018867924528, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.5283018867924528, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.02874204090394849, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.02874204090394849 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6368159203980099, + "acc_stderr": 0.03400598505599015, + "acc_norm": 0.6368159203980099, + "acc_norm_stderr": 0.03400598505599015 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.03778621079092056, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.03778621079092056 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.34656084656084657, + "acc_stderr": 0.024508777521028417, + "acc_norm": 0.34656084656084657, + "acc_norm_stderr": 0.024508777521028417 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.041614023984032786, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.041614023984032786 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.546242774566474, + "acc_stderr": 0.02680372058320619, + "acc_norm": 0.546242774566474, + "acc_norm_stderr": 0.02680372058320619 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5339506172839507, + "acc_stderr": 0.027756535257347663, + "acc_norm": 0.5339506172839507, + "acc_norm_stderr": 0.027756535257347663 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932269, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932269 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6269430051813472, + "acc_stderr": 0.03490205592048574, + "acc_norm": 0.6269430051813472, + "acc_norm_stderr": 0.03490205592048574 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6220183486238532, + "acc_stderr": 0.02078918706672811, + "acc_norm": 0.6220183486238532, + "acc_norm_stderr": 0.02078918706672811 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3253968253968254, + "acc_stderr": 0.041905964388711366, + "acc_norm": 0.3253968253968254, + "acc_norm_stderr": 0.041905964388711366 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.04065578140908705, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.04065578140908705 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.040633027314866725, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.040633027314866725 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.020142974553795198, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.020142974553795198 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.0457237235873743, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.0457237235873743 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.03085199299325701, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.03085199299325701 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23910614525139665, + "acc_stderr": 0.014265554192331146, + "acc_norm": 0.23910614525139665, + "acc_norm_stderr": 0.014265554192331146 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.375, + "acc_stderr": 0.029408372932278746, + "acc_norm": 0.375, + "acc_norm_stderr": 0.029408372932278746 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5387755102040817, + "acc_stderr": 0.031912820526692774, + "acc_norm": 0.5387755102040817, + "acc_norm_stderr": 0.031912820526692774 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3239895697522816, + "acc_stderr": 0.011952840809646573, + "acc_norm": 0.3239895697522816, + "acc_norm_stderr": 0.011952840809646573 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29253365973072215, + "mc1_stderr": 0.015925597445286165, + "mc2": 0.46838553700622165, + "mc2_stderr": 0.015228260707910906 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5230224321133412, + "acc_stderr": 0.017172121546727634, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.016819438642971404 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "werty1248/Llama-3-Ko-8B-Instruct-AOG", + "model_sha": "cf6da7563697a7d0f004b80f1e3efd2b7d681dc5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/werty1248/Llama-3-Ko-8B-OpenOrca/result_2024-04-30 09:52:04.json b/werty1248/Llama-3-Ko-8B-OpenOrca/result_2024-04-30 09:52:04.json new file mode 100644 index 0000000000000000000000000000000000000000..08d1e529a4b6593f30cbd982d69e765388796aad --- /dev/null +++ b/werty1248/Llama-3-Ko-8B-OpenOrca/result_2024-04-30 09:52:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36177474402730375, + "acc_stderr": 0.014041957945038075, + "acc_norm": 0.4180887372013652, + "acc_norm_stderr": 0.014413988396996084 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3950408285202151, + "acc_stderr": 0.004878603699686036, + "acc_norm": 0.522903804023103, + "acc_norm_stderr": 0.004984543540932337 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.03786720706234214, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.03786720706234214 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5351213282247765, + "acc_stderr": 0.017835798806290642, + "acc_norm": 0.5351213282247765, + "acc_norm_stderr": 0.017835798806290642 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.032321469162244675, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.032321469162244675 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.47266881028938906, + "acc_stderr": 0.028355633568328188, + "acc_norm": 0.47266881028938906, + "acc_norm_stderr": 0.028355633568328188 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.0416656757710158, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.0416656757710158 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017834, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017834 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575494, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575494 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4774193548387097, + "acc_stderr": 0.028414985019707868, + "acc_norm": 0.4774193548387097, + "acc_norm_stderr": 0.028414985019707868 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.717948717948718, + "acc_stderr": 0.02948036054954119, + "acc_norm": 0.717948717948718, + "acc_norm_stderr": 0.02948036054954119 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4867924528301887, + "acc_stderr": 0.030762134874500476, + "acc_norm": 0.4867924528301887, + "acc_norm_stderr": 0.030762134874500476 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.02889774874113115, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.02889774874113115 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099522, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099522 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149138, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303118, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303118 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334384, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334384 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5803108808290155, + "acc_stderr": 0.03561587327685884, + "acc_norm": 0.5803108808290155, + "acc_norm_stderr": 0.03561587327685884 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5412844036697247, + "acc_stderr": 0.02136412253388169, + "acc_norm": 0.5412844036697247, + "acc_norm_stderr": 0.02136412253388169 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127154, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127154 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.02845263998508801, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.02845263998508801 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.044658697805310094, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.044658697805310094 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.01978046595477751, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.01978046595477751 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101366, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101366 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613539, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613539 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289784, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289784 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23687150837988827, + "acc_stderr": 0.01421957078810399, + "acc_norm": 0.23687150837988827, + "acc_norm_stderr": 0.01421957078810399 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824855, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824855 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.30612244897959184, + "acc_stderr": 0.029504896454595954, + "acc_norm": 0.30612244897959184, + "acc_norm_stderr": 0.029504896454595954 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5316455696202531, + "acc_stderr": 0.032481974005110756, + "acc_norm": 0.5316455696202531, + "acc_norm_stderr": 0.032481974005110756 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2803129074315515, + "acc_stderr": 0.01147155594495862, + "acc_norm": 0.2803129074315515, + "acc_norm_stderr": 0.01147155594495862 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.03434131164719128, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.03434131164719128 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.47878787878787876, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.47878787878787876, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454892, + "mc2": 0.45008358764164297, + "mc2_stderr": 0.01546619804053694 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5336481700118064, + "acc_stderr": 0.017151384117131862, + "acc_norm": 0.641086186540732, + "acc_norm_stderr": 0.016491802102999036 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "werty1248/Llama-3-Ko-8B-OpenOrca", + "model_sha": "aa3a7590a0feeb87eb1d21a8fa9445da35901b6c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wkshin89/Yi-Ko-6B-Instruct-v1.0/result_2024-01-10 05:26:20.json b/wkshin89/Yi-Ko-6B-Instruct-v1.0/result_2024-01-10 05:26:20.json new file mode 100644 index 0000000000000000000000000000000000000000..656ee629136ab135101ff7352befb3d1bcf0751f --- /dev/null +++ b/wkshin89/Yi-Ko-6B-Instruct-v1.0/result_2024-01-10 05:26:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3438566552901024, + "acc_stderr": 0.01388064457015621, + "acc_norm": 0.41723549488054607, + "acc_norm_stderr": 0.014409825518403079 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3988249352718582, + "acc_stderr": 0.004886559008754987, + "acc_norm": 0.5324636526588329, + "acc_norm_stderr": 0.0049792529549773125 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.04911147107365777, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.04911147107365777 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.545338441890166, + "acc_stderr": 0.0178063045850526, + "acc_norm": 0.545338441890166, + "acc_norm_stderr": 0.0178063045850526 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.42962962962962964, + "acc_stderr": 0.04276349494376598, + "acc_norm": 0.42962962962962964, + "acc_norm_stderr": 0.04276349494376598 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.028397944907806612, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.028397944907806612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6414141414141414, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.6414141414141414, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.032422250271150074, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.032422250271150074 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4641025641025641, + "acc_stderr": 0.025285585990017838, + "acc_norm": 0.4641025641025641, + "acc_norm_stderr": 0.025285585990017838 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5129032258064516, + "acc_stderr": 0.02843453315268187, + "acc_norm": 0.5129032258064516, + "acc_norm_stderr": 0.02843453315268187 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748845, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748845 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524582, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524582 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.03822746937658753, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.03822746937658753 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.0379401267469703, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.0379401267469703 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.02459497512892094, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.02459497512892094 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.041614023984032786, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.041614023984032786 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.026907849856282532, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.026907849856282532 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836185, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836185 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6128440366972477, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.6128440366972477, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.039325376803928704, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.039325376803928704 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4934640522875817, + "acc_stderr": 0.02862747055055606, + "acc_norm": 0.4934640522875817, + "acc_norm_stderr": 0.02862747055055606 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3937908496732026, + "acc_stderr": 0.01976621199107307, + "acc_norm": 0.3937908496732026, + "acc_norm_stderr": 0.01976621199107307 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042394, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042394 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.031674687068289804, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.031674687068289804 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961438, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961438 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4632352941176471, + "acc_stderr": 0.03029061918048569, + "acc_norm": 0.4632352941176471, + "acc_norm_stderr": 0.03029061918048569 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33702737940026073, + "acc_stderr": 0.012072836273691328, + "acc_norm": 0.33702737940026073, + "acc_norm_stderr": 0.012072836273691328 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.593939393939394, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.593939393939394, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557963, + "mc2": 0.4169052594974332, + "mc2_stderr": 0.014825852497704694 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5360094451003542, + "acc_stderr": 0.017145715365486664, + "acc_norm": 0.5808736717827627, + "acc_norm_stderr": 0.016963995010862796 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wkshin89/Yi-Ko-6B-Instruct-v1.0", + "model_sha": "21d1f298bccd885c09e9719fa88df08ab29057cf", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wkshin89/Yi-Ko-6B-Instruct-v1.1/result_2024-01-13 03:44:10.json b/wkshin89/Yi-Ko-6B-Instruct-v1.1/result_2024-01-13 03:44:10.json new file mode 100644 index 0000000000000000000000000000000000000000..e71d905f53576ea46e9efb38346ac7eca70c9319 --- /dev/null +++ b/wkshin89/Yi-Ko-6B-Instruct-v1.1/result_2024-01-13 03:44:10.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34812286689419797, + "acc_stderr": 0.013921008595179349, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.014422181226303024 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40051782513443535, + "acc_stderr": 0.004890019356021091, + "acc_norm": 0.5368452499502091, + "acc_norm_stderr": 0.004976214989483506 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5491698595146871, + "acc_stderr": 0.017793297572699037, + "acc_norm": 0.5491698595146871, + "acc_norm_stderr": 0.017793297572699037 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04316378599511326, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04316378599511326 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5129032258064516, + "acc_stderr": 0.02843453315268187, + "acc_norm": 0.5129032258064516, + "acc_norm_stderr": 0.02843453315268187 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809447, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809447 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075657, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075657 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028604, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028604 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602841997, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602841997 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6110091743119266, + "acc_stderr": 0.020902300887392866, + "acc_norm": 0.6110091743119266, + "acc_norm_stderr": 0.020902300887392866 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848879, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848879 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.019861155193829156, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.019861155193829156 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611313, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611313 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467761, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467761 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925282, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925282 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714857, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714857 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.011977676704715995, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.011977676704715995 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070264, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070264 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485083, + "mc2": 0.4132673389976497, + "mc2_stderr": 0.014744665577524083 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5489964580873672, + "acc_stderr": 0.01710761885954935, + "acc_norm": 0.6009445100354192, + "acc_norm_stderr": 0.016836377292849307 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wkshin89/Yi-Ko-6B-Instruct-v1.1", + "model_sha": "1909454055c7fd8e94d1f0636129aee1130c5fa2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wkshin89/Yi-Ko-6B-Instruct-v1.1_/result_2024-01-21 08:23:34.json b/wkshin89/Yi-Ko-6B-Instruct-v1.1_/result_2024-01-21 08:23:34.json new file mode 100644 index 0000000000000000000000000000000000000000..f3451e1065143c9c06e94d91fe8fa08973085707 --- /dev/null +++ b/wkshin89/Yi-Ko-6B-Instruct-v1.1_/result_2024-01-21 08:23:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34812286689419797, + "acc_stderr": 0.013921008595179349, + "acc_norm": 0.4197952218430034, + "acc_norm_stderr": 0.014422181226303024 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40051782513443535, + "acc_stderr": 0.004890019356021091, + "acc_norm": 0.5368452499502091, + "acc_norm_stderr": 0.004976214989483506 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5491698595146871, + "acc_stderr": 0.017793297572699037, + "acc_norm": 0.5491698595146871, + "acc_norm_stderr": 0.017793297572699037 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04316378599511326, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04316378599511326 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3674698795180723, + "acc_stderr": 0.03753267402120574, + "acc_norm": 0.3674698795180723, + "acc_norm_stderr": 0.03753267402120574 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.02838032284907713, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.02838032284907713 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.034468977386593325, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.034468977386593325 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.30392156862745096, + "acc_stderr": 0.04576665403207762, + "acc_norm": 0.30392156862745096, + "acc_norm_stderr": 0.04576665403207762 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4831932773109244, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.4831932773109244, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542129, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542129 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.03452453903822039, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.03452453903822039 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5129032258064516, + "acc_stderr": 0.02843453315268187, + "acc_norm": 0.5129032258064516, + "acc_norm_stderr": 0.02843453315268187 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.02934311479809447, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.02934311479809447 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075657, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075657 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028604, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028604 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.0374246119388725, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.0374246119388725 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602841997, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602841997 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44171779141104295, + "acc_stderr": 0.03901591825836184, + "acc_norm": 0.44171779141104295, + "acc_norm_stderr": 0.03901591825836184 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6110091743119266, + "acc_stderr": 0.020902300887392866, + "acc_norm": 0.6110091743119266, + "acc_norm_stderr": 0.020902300887392866 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.04006168083848879, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.04006168083848879 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167965, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167965 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.042664163633521685, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.042664163633521685 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.019861155193829156, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.019861155193829156 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.34397163120567376, + "acc_stderr": 0.028338017428611313, + "acc_norm": 0.34397163120567376, + "acc_norm_stderr": 0.028338017428611313 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.26785714285714285, + "acc_stderr": 0.04203277291467761, + "acc_norm": 0.26785714285714285, + "acc_norm_stderr": 0.04203277291467761 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925282, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925282 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.030042615832714857, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.030042615832714857 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39183673469387753, + "acc_stderr": 0.03125127591089165, + "acc_norm": 0.39183673469387753, + "acc_norm_stderr": 0.03125127591089165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091156, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091156 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.011977676704715995, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.011977676704715995 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.553921568627451, + "acc_stderr": 0.034888454513049734, + "acc_norm": 0.553921568627451, + "acc_norm_stderr": 0.034888454513049734 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070264, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070264 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25703794369645044, + "mc1_stderr": 0.015298077509485083, + "mc2": 0.4132673389976497, + "mc2_stderr": 0.014744665577524083 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5489964580873672, + "acc_stderr": 0.01710761885954935, + "acc_norm": 0.6009445100354192, + "acc_norm_stderr": 0.016836377292849307 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wkshin89/Yi-Ko-6B-Instruct-v1.1_", + "model_sha": "1909454055c7fd8e94d1f0636129aee1130c5fa2", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wkshin89/mistral-7b-instruct-ko-test-v0.1/result_2023-12-29 07:27:35.json b/wkshin89/mistral-7b-instruct-ko-test-v0.1/result_2023-12-29 07:27:35.json new file mode 100644 index 0000000000000000000000000000000000000000..39193de3e196dec34db2d14918c47149e0691735 --- /dev/null +++ b/wkshin89/mistral-7b-instruct-ko-test-v0.1/result_2023-12-29 07:27:35.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34897610921501704, + "acc_stderr": 0.0139289334613825, + "acc_norm": 0.38822525597269625, + "acc_norm_stderr": 0.01424161420741404 + }, + "harness|ko_hellaswag|10": { + "acc": 0.37442740489942244, + "acc_stderr": 0.004829856058603582, + "acc_norm": 0.48894642501493724, + "acc_norm_stderr": 0.00498856194427739 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4763729246487867, + "acc_stderr": 0.01785998976517645, + "acc_norm": 0.4763729246487867, + "acc_norm_stderr": 0.01785998976517645 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.04153948404742398, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.04153948404742398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4085106382978723, + "acc_stderr": 0.03213418026701576, + "acc_norm": 0.4085106382978723, + "acc_norm_stderr": 0.03213418026701576 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.463855421686747, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.463855421686747, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5080385852090032, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.5080385852090032, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.46564885496183206, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.46564885496183206, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.0356071651653106, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.0356071651653106 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.03175367846096626, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.03175367846096626 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4461538461538462, + "acc_stderr": 0.025203571773028333, + "acc_norm": 0.4461538461538462, + "acc_norm_stderr": 0.025203571773028333 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.03459058815883231, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.03459058815883231 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.02987257770889118, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.02987257770889118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009812, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009812 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251976, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251976 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752045, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752045 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3055555555555556, + "acc_stderr": 0.03852084696008534, + "acc_norm": 0.3055555555555556, + "acc_norm_stderr": 0.03852084696008534 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666654, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666654 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.44041450777202074, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.44041450777202074, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070434, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070434 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.44954128440366975, + "acc_stderr": 0.021327881417823377, + "acc_norm": 0.44954128440366975, + "acc_norm_stderr": 0.021327881417823377 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.02838425670488304, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.02838425670488304 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04017901275981749, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04017901275981749 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.019808281317449848, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.019808281317449848 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3425925925925926, + "acc_stderr": 0.032365852526021574, + "acc_norm": 0.3425925925925926, + "acc_norm_stderr": 0.032365852526021574 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24022346368715083, + "acc_stderr": 0.014288343803925295, + "acc_norm": 0.24022346368715083, + "acc_norm_stderr": 0.014288343803925295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.30514705882352944, + "acc_stderr": 0.027971541370170598, + "acc_norm": 0.30514705882352944, + "acc_norm_stderr": 0.027971541370170598 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.03210353032241268, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.03210353032241268 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3435462842242503, + "acc_stderr": 0.012128961174190156, + "acc_norm": 0.3435462842242503, + "acc_norm_stderr": 0.012128961174190156 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.03465868196380757, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.03465868196380757 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2974296205630355, + "mc1_stderr": 0.016002651487361005, + "mc2": 0.4595701455440173, + "mc2_stderr": 0.015464603150764228 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38016528925619836, + "acc_stderr": 0.016689333596980122, + "acc_norm": 0.4474616292798111, + "acc_norm_stderr": 0.017095190301500585 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wkshin89/mistral-7b-instruct-ko-test-v0.1", + "model_sha": "6c4c20b04e67dc4f0aa797b28ecf0f9a213370c3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wkshin89/mistral-7b-instruct-ko-test-v0.2/result_2024-01-02 02:49:22.json b/wkshin89/mistral-7b-instruct-ko-test-v0.2/result_2024-01-02 02:49:22.json new file mode 100644 index 0000000000000000000000000000000000000000..6dff6f1d2cf3a3c0500a2b99a01411b177c57881 --- /dev/null +++ b/wkshin89/mistral-7b-instruct-ko-test-v0.2/result_2024-01-02 02:49:22.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34897610921501704, + "acc_stderr": 0.013928933461382506, + "acc_norm": 0.3839590443686007, + "acc_norm_stderr": 0.01421244498065189 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3811989643497311, + "acc_stderr": 0.004846886929763467, + "acc_norm": 0.49302927703644694, + "acc_norm_stderr": 0.004989296471157074 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5145631067961165, + "acc_stderr": 0.04948637324026637, + "acc_norm": 0.5145631067961165, + "acc_norm_stderr": 0.04948637324026637 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4904214559386973, + "acc_stderr": 0.017876682275340873, + "acc_norm": 0.4904214559386973, + "acc_norm_stderr": 0.017876682275340873 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4758842443729904, + "acc_stderr": 0.02836504154256457, + "acc_norm": 0.4758842443729904, + "acc_norm_stderr": 0.02836504154256457 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071722, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071722 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5707070707070707, + "acc_stderr": 0.035265527246011986, + "acc_norm": 0.5707070707070707, + "acc_norm_stderr": 0.035265527246011986 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4789915966386555, + "acc_stderr": 0.0324498084999003, + "acc_norm": 0.4789915966386555, + "acc_norm_stderr": 0.0324498084999003 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.46153846153846156, + "acc_stderr": 0.025275892070240634, + "acc_norm": 0.46153846153846156, + "acc_norm_stderr": 0.025275892070240634 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411022, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411022 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411018, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411018 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760628, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760628 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.03481904844438803, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.03481904844438803 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.02987257770889119, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.02987257770889119 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467506, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467506 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3699421965317919, + "acc_stderr": 0.03681229633394319, + "acc_norm": 0.3699421965317919, + "acc_norm_stderr": 0.03681229633394319 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488585, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488585 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.026907849856282542, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.026907849856282542 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.02776768960683393, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.02776768960683393 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5492227979274611, + "acc_stderr": 0.035909109522355244, + "acc_norm": 0.5492227979274611, + "acc_norm_stderr": 0.035909109522355244 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48256880733944957, + "acc_stderr": 0.021424291871853147, + "acc_norm": 0.48256880733944957, + "acc_norm_stderr": 0.021424291871853147 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.043062412591271526, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.043062412591271526 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.028452639985088003, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.028452639985088003 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4407894736842105, + "acc_stderr": 0.04040311062490436, + "acc_norm": 0.4407894736842105, + "acc_norm_stderr": 0.04040311062490436 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762633, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762633 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.028406627809590947, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.028406627809590947 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291519, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291519 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.03293377139415191, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.03293377139415191 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28156424581005585, + "acc_stderr": 0.015042290171866118, + "acc_norm": 0.28156424581005585, + "acc_norm_stderr": 0.015042290171866118 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254174, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254174 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.3795918367346939, + "acc_stderr": 0.031067211262872485, + "acc_norm": 0.3795918367346939, + "acc_norm_stderr": 0.031067211262872485 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5611814345991561, + "acc_stderr": 0.032302649315470375, + "acc_norm": 0.5611814345991561, + "acc_norm_stderr": 0.032302649315470375 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3318122555410691, + "acc_stderr": 0.012026088259897634, + "acc_norm": 0.3318122555410691, + "acc_norm_stderr": 0.012026088259897634 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2594859241126071, + "mc1_stderr": 0.015345409485557952, + "mc2": 0.4511332135853451, + "mc2_stderr": 0.01542351667663033 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.42502951593860683, + "acc_stderr": 0.016996016308362887, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.017142736117643304 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wkshin89/mistral-7b-instruct-ko-test-v0.2", + "model_sha": "0fc39bb2df0cb05a72030e20233757166b8a51c3", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wkshin89/yi-ko-6b-instruct-test-v0.1/result_2024-01-04 10:03:09.json b/wkshin89/yi-ko-6b-instruct-test-v0.1/result_2024-01-04 10:03:09.json new file mode 100644 index 0000000000000000000000000000000000000000..918e7415fb722c3f2e57db68fb28c45b58739d60 --- /dev/null +++ b/wkshin89/yi-ko-6b-instruct-test-v0.1/result_2024-01-04 10:03:09.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35494880546075086, + "acc_stderr": 0.013983036904094087, + "acc_norm": 0.4257679180887372, + "acc_norm_stderr": 0.014449464278868802 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40131447918741286, + "acc_stderr": 0.0048916267180972705, + "acc_norm": 0.5387373033260306, + "acc_norm_stderr": 0.004974783753309709 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5478927203065134, + "acc_stderr": 0.01779775149386563, + "acc_norm": 0.5478927203065134, + "acc_norm_stderr": 0.01779775149386563 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4983922829581994, + "acc_stderr": 0.028397944907806612, + "acc_norm": 0.4983922829581994, + "acc_norm_stderr": 0.028397944907806612 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773404, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773404 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6464646464646465, + "acc_stderr": 0.03406086723547155, + "acc_norm": 0.6464646464646465, + "acc_norm_stderr": 0.03406086723547155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.47478991596638653, + "acc_stderr": 0.03243718055137411, + "acc_norm": 0.47478991596638653, + "acc_norm_stderr": 0.03243718055137411 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5025641025641026, + "acc_stderr": 0.025350672979412188, + "acc_norm": 0.5025641025641026, + "acc_norm_stderr": 0.025350672979412188 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5096774193548387, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.5096774193548387, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.02974504857267406, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.02974504857267406 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.03077265364207565, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.03077265364207565 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3296296296296296, + "acc_stderr": 0.028661201116524586, + "acc_norm": 0.3296296296296296, + "acc_norm_stderr": 0.028661201116524586 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495301, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495301 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.03772446857518027, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.03772446857518027 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4513888888888889, + "acc_stderr": 0.041614023984032786, + "acc_norm": 0.4513888888888889, + "acc_norm_stderr": 0.041614023984032786 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.48265895953757226, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.48265895953757226, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.038818912133343826, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.038818912133343826 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6256880733944954, + "acc_stderr": 0.02074895940898832, + "acc_norm": 0.6256880733944954, + "acc_norm_stderr": 0.02074895940898832 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.25396825396825395, + "acc_stderr": 0.03893259610604674, + "acc_norm": 0.25396825396825395, + "acc_norm_stderr": 0.03893259610604674 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.49019607843137253, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.49019607843137253, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.019861155193829163, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.019861155193829163 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.02826765748265014, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.02826765748265014 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291517, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291517 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24134078212290502, + "acc_stderr": 0.014310999547961443, + "acc_norm": 0.24134078212290502, + "acc_norm_stderr": 0.014310999547961443 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4204081632653061, + "acc_stderr": 0.03160106993449604, + "acc_norm": 0.4204081632653061, + "acc_norm_stderr": 0.03160106993449604 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32659713168187743, + "acc_stderr": 0.011977676704715997, + "acc_norm": 0.32659713168187743, + "acc_norm_stderr": 0.011977676704715997 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5490196078431373, + "acc_stderr": 0.034924061041636124, + "acc_norm": 0.5490196078431373, + "acc_norm_stderr": 0.034924061041636124 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.593939393939394, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.593939393939394, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2631578947368421, + "mc1_stderr": 0.015415241740237031, + "mc2": 0.4140010168289715, + "mc2_stderr": 0.01480502513683876 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5407319952774499, + "acc_stderr": 0.01713321827653767, + "acc_norm": 0.5938606847697757, + "acc_norm_stderr": 0.0168847495031914 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wkshin89/yi-ko-6b-instruct-test-v0.1", + "model_sha": "f608497dba14d06c286f875bb23ecb598780be6a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wkshin89/yi-ko-6b-instruct-test-v0.2/result_2024-01-04 16:17:04.json b/wkshin89/yi-ko-6b-instruct-test-v0.2/result_2024-01-04 16:17:04.json new file mode 100644 index 0000000000000000000000000000000000000000..4b8da4e4f72bad83befaec3ccd7e450e735771ad --- /dev/null +++ b/wkshin89/yi-ko-6b-instruct-test-v0.2/result_2024-01-04 16:17:04.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3447098976109215, + "acc_stderr": 0.013888816286782112, + "acc_norm": 0.4180887372013652, + "acc_norm_stderr": 0.014413988396996083 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39932284405496915, + "acc_stderr": 0.004887583074180845, + "acc_norm": 0.5369448317068313, + "acc_norm_stderr": 0.004976141457736868 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.0383161053282193, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.0383161053282193 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5363984674329502, + "acc_stderr": 0.01783252407959326, + "acc_norm": 0.5363984674329502, + "acc_norm_stderr": 0.01783252407959326 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.40425531914893614, + "acc_stderr": 0.03208115750788684, + "acc_norm": 0.40425531914893614, + "acc_norm_stderr": 0.03208115750788684 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758396, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758396 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5048231511254019, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.5048231511254019, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4618834080717489, + "acc_stderr": 0.03346015011973228, + "acc_norm": 0.4618834080717489, + "acc_norm_stderr": 0.03346015011973228 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6565656565656566, + "acc_stderr": 0.03383201223244444, + "acc_norm": 0.6565656565656566, + "acc_norm_stderr": 0.03383201223244444 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04690650298201942, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04690650298201942 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49743589743589745, + "acc_stderr": 0.025350672979412202, + "acc_norm": 0.49743589743589745, + "acc_norm_stderr": 0.025350672979412202 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5225806451612903, + "acc_stderr": 0.02841498501970786, + "acc_norm": 0.5225806451612903, + "acc_norm_stderr": 0.02841498501970786 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.029343114798094455, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.029343114798094455 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5094339622641509, + "acc_stderr": 0.030767394707808093, + "acc_norm": 0.5094339622641509, + "acc_norm_stderr": 0.030767394707808093 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.028578348365473072, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.028578348365473072 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6069651741293532, + "acc_stderr": 0.0345368246603156, + "acc_norm": 0.6069651741293532, + "acc_norm_stderr": 0.0345368246603156 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602841997, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602841997 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377913, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377913 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327235, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327235 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.035553003195576686, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.035553003195576686 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6201834862385321, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.6201834862385321, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.24603174603174602, + "acc_stderr": 0.03852273364924315, + "acc_norm": 0.24603174603174602, + "acc_norm_stderr": 0.03852273364924315 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40032679738562094, + "acc_stderr": 0.019821843688271765, + "acc_norm": 0.40032679738562094, + "acc_norm_stderr": 0.019821843688271765 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.43673469387755104, + "acc_stderr": 0.031751952375833226, + "acc_norm": 0.43673469387755104, + "acc_norm_stderr": 0.031751952375833226 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214936, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214936 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.03498501649369527, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.03498501649369527 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.03872592983524754, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.03872592983524754 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.26193390452876375, + "mc1_stderr": 0.015392118805015008, + "mc2": 0.41591991076293416, + "mc2_stderr": 0.014755214666360166 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5430932703659976, + "acc_stderr": 0.01712638909308678, + "acc_norm": 0.6044864226682408, + "acc_norm_stderr": 0.016810815902206042 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wkshin89/yi-ko-6b-instruct-test-v0.2", + "model_sha": "247b01dc63dd7e13f115cbf6ece8c9e22610e642", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wngkdud/llama2_DPO_test_v1/result_2023-11-20 08:26:17.json b/wngkdud/llama2_DPO_test_v1/result_2023-11-20 08:26:17.json new file mode 100644 index 0000000000000000000000000000000000000000..b96087c1d333215982a228b2096e0f3e4705d743 --- /dev/null +++ b/wngkdud/llama2_DPO_test_v1/result_2023-11-20 08:26:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3319112627986348, + "acc_stderr": 0.013760988200880534, + "acc_norm": 0.38822525597269625, + "acc_norm_stderr": 0.014241614207414047 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4018123879705238, + "acc_stderr": 0.004892624490937213, + "acc_norm": 0.5252937661820355, + "acc_norm_stderr": 0.004983392650570966 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.048257293373563895, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.048257293373563895 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4559386973180077, + "acc_stderr": 0.01781040392543536, + "acc_norm": 0.4559386973180077, + "acc_norm_stderr": 0.01781040392543536 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3872340425531915, + "acc_stderr": 0.03184389265339525, + "acc_norm": 0.3872340425531915, + "acc_norm_stderr": 0.03184389265339525 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.35542168674698793, + "acc_stderr": 0.03726214354322415, + "acc_norm": 0.35542168674698793, + "acc_norm_stderr": 0.03726214354322415 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.028333277109562783, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.028333277109562783 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123005, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123005 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.35877862595419846, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.35877862595419846, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4797979797979798, + "acc_stderr": 0.03559443565563918, + "acc_norm": 0.4797979797979798, + "acc_norm_stderr": 0.03559443565563918 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2827586206896552, + "acc_stderr": 0.03752833958003337, + "acc_norm": 0.2827586206896552, + "acc_norm_stderr": 0.03752833958003337 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3067226890756303, + "acc_stderr": 0.029953823891887055, + "acc_norm": 0.3067226890756303, + "acc_norm_stderr": 0.029953823891887055 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.33589743589743587, + "acc_stderr": 0.02394672474156397, + "acc_norm": 0.33589743589743587, + "acc_norm_stderr": 0.02394672474156397 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110175, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110175 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.0332085274234831, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.0332085274234831 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36451612903225805, + "acc_stderr": 0.02737987122994325, + "acc_norm": 0.36451612903225805, + "acc_norm_stderr": 0.02737987122994325 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6068376068376068, + "acc_stderr": 0.03199957924651048, + "acc_norm": 0.6068376068376068, + "acc_norm_stderr": 0.03199957924651048 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.37358490566037733, + "acc_stderr": 0.029773082713319878, + "acc_norm": 0.37358490566037733, + "acc_norm_stderr": 0.029773082713319878 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.23841059602649006, + "acc_stderr": 0.03479185572599661, + "acc_norm": 0.23841059602649006, + "acc_norm_stderr": 0.03479185572599661 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4577114427860697, + "acc_stderr": 0.035228658640995975, + "acc_norm": 0.4577114427860697, + "acc_norm_stderr": 0.035228658640995975 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.31213872832369943, + "acc_stderr": 0.035331333893236574, + "acc_norm": 0.31213872832369943, + "acc_norm_stderr": 0.035331333893236574 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2671957671957672, + "acc_stderr": 0.02278967314577657, + "acc_norm": 0.2671957671957672, + "acc_norm_stderr": 0.02278967314577657 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.02642481659400985, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.02642481659400985 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456024, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456024 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4104938271604938, + "acc_stderr": 0.027371350925124768, + "acc_norm": 0.4104938271604938, + "acc_norm_stderr": 0.027371350925124768 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.39896373056994816, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.39896373056994816, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481425, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481425 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362237, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362237 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.20634920634920634, + "acc_stderr": 0.036196045241242494, + "acc_norm": 0.20634920634920634, + "acc_norm_stderr": 0.036196045241242494 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.34967320261437906, + "acc_stderr": 0.0273053080762747, + "acc_norm": 0.34967320261437906, + "acc_norm_stderr": 0.0273053080762747 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5371900826446281, + "acc_stderr": 0.045517111961042175, + "acc_norm": 0.5371900826446281, + "acc_norm_stderr": 0.045517111961042175 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.03782728980865469, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.03782728980865469 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3480392156862745, + "acc_stderr": 0.019270998708223974, + "acc_norm": 0.3480392156862745, + "acc_norm_stderr": 0.019270998708223974 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.029157522184605603, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.029157522184605603 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.27205882352941174, + "acc_stderr": 0.027033041151681456, + "acc_norm": 0.27205882352941174, + "acc_norm_stderr": 0.027033041151681456 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.02916273841024978, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.02916273841024978 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4472573839662447, + "acc_stderr": 0.03236564251614192, + "acc_norm": 0.4472573839662447, + "acc_norm_stderr": 0.03236564251614192 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.28748370273794005, + "acc_stderr": 0.011559337355708512, + "acc_norm": 0.28748370273794005, + "acc_norm_stderr": 0.011559337355708512 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.033321399446680854, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.033321399446680854 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.016114124156882455, + "mc2": 0.48120300528886556, + "mc2_stderr": 0.016615872220447157 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4344746162927981, + "acc_stderr": 0.017042098620824935, + "acc_norm": 0.448642266824085, + "acc_norm_stderr": 0.017099430514725792 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wngkdud/llama2_DPO_test_v1", + "model_sha": "69af6cea4762a14fddbeeddb7f42375bd9a81181", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wngkdud/llama2_koen_13b_SFTtrain/result_2023-11-22 07:45:54.json b/wngkdud/llama2_koen_13b_SFTtrain/result_2023-11-22 07:45:54.json new file mode 100644 index 0000000000000000000000000000000000000000..9a645235fe191ab361d8c389da205c79e4aa04de --- /dev/null +++ b/wngkdud/llama2_koen_13b_SFTtrain/result_2023-11-22 07:45:54.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.33532423208191126, + "acc_stderr": 0.013796182947785564, + "acc_norm": 0.39334470989761094, + "acc_norm_stderr": 0.014275101465693026 + }, + "harness|ko_hellaswag|10": { + "acc": 0.38856801433977295, + "acc_stderr": 0.00486428617673183, + "acc_norm": 0.5029874526986656, + "acc_norm_stderr": 0.0049896923443139935 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.0482572933735639, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.0482572933735639 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.48659003831417624, + "acc_stderr": 0.017873531736510392, + "acc_norm": 0.48659003831417624, + "acc_norm_stderr": 0.017873531736510392 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753399, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753399 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39148936170212767, + "acc_stderr": 0.03190701242326812, + "acc_norm": 0.39148936170212767, + "acc_norm_stderr": 0.03190701242326812 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4630225080385852, + "acc_stderr": 0.028320325830105908, + "acc_norm": 0.4630225080385852, + "acc_norm_stderr": 0.028320325830105908 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5353535353535354, + "acc_stderr": 0.03553436368828065, + "acc_norm": 0.5353535353535354, + "acc_norm_stderr": 0.03553436368828065 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.03806142687309993, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.03806142687309993 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617746, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617746 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.36554621848739494, + "acc_stderr": 0.0312821770636846, + "acc_norm": 0.36554621848739494, + "acc_norm_stderr": 0.0312821770636846 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.024784316942156367, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.024784316942156367 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.034223985656575494, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.034223985656575494 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.027906150826041143, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.027906150826041143 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6239316239316239, + "acc_stderr": 0.03173393632969482, + "acc_norm": 0.6239316239316239, + "acc_norm_stderr": 0.03173393632969482 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.39245283018867927, + "acc_stderr": 0.03005258057955785, + "acc_norm": 0.39245283018867927, + "acc_norm_stderr": 0.03005258057955785 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.027080372815145644, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.027080372815145644 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.527363184079602, + "acc_stderr": 0.035302355173346824, + "acc_norm": 0.527363184079602, + "acc_norm_stderr": 0.035302355173346824 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3179190751445087, + "acc_stderr": 0.03550683989165582, + "acc_norm": 0.3179190751445087, + "acc_norm_stderr": 0.03550683989165582 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.022644212615525218, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.022644212615525218 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.026817718130348916, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.026817718130348916 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4294478527607362, + "acc_stderr": 0.03889066619112722, + "acc_norm": 0.4294478527607362, + "acc_norm_stderr": 0.03889066619112722 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.42901234567901236, + "acc_stderr": 0.027538925613470863, + "acc_norm": 0.42901234567901236, + "acc_norm_stderr": 0.027538925613470863 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.0356747133521254, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.0356747133521254 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.21929824561403508, + "acc_stderr": 0.03892431106518752, + "acc_norm": 0.21929824561403508, + "acc_norm_stderr": 0.03892431106518752 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4917431192660551, + "acc_stderr": 0.021434399918214338, + "acc_norm": 0.4917431192660551, + "acc_norm_stderr": 0.021434399918214338 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.027684181883302895, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.027684181883302895 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.34868421052631576, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.34868421052631576, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.36764705882352944, + "acc_stderr": 0.019506291693954854, + "acc_norm": 0.36764705882352944, + "acc_norm_stderr": 0.019506291693954854 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30141843971631205, + "acc_stderr": 0.02737412888263115, + "acc_norm": 0.30141843971631205, + "acc_norm_stderr": 0.02737412888263115 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257013, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3272058823529412, + "acc_stderr": 0.028501452860396573, + "acc_norm": 0.3272058823529412, + "acc_norm_stderr": 0.028501452860396573 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2938775510204082, + "acc_stderr": 0.029162738410249772, + "acc_norm": 0.2938775510204082, + "acc_norm_stderr": 0.029162738410249772 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.510548523206751, + "acc_stderr": 0.032539983791662855, + "acc_norm": 0.510548523206751, + "acc_norm_stderr": 0.032539983791662855 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2926988265971317, + "acc_stderr": 0.011620949195849535, + "acc_norm": 0.2926988265971317, + "acc_norm_stderr": 0.011620949195849535 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.03434131164719128, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.03434131164719128 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.503030303030303, + "acc_stderr": 0.03904272341431855, + "acc_norm": 0.503030303030303, + "acc_norm_stderr": 0.03904272341431855 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.29008567931456547, + "mc1_stderr": 0.01588623687420952, + "mc2": 0.4530197496571, + "mc2_stderr": 0.01640666508928725 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4155844155844156, + "acc_stderr": 0.01694358631307657, + "acc_norm": 0.45218417945690675, + "acc_norm_stderr": 0.017111567130916782 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wngkdud/llama2_koen_13b_SFTtrain", + "model_sha": "3d446bd9c006d91347daa92a69c6a876506c39bc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wons/Yi-6b-test-v0.1/result_2023-12-02 10:17:46.json b/wons/Yi-6b-test-v0.1/result_2023-12-02 10:17:46.json new file mode 100644 index 0000000000000000000000000000000000000000..8441523b3b6fb505df1f2b99a98cdcc3b634c869 --- /dev/null +++ b/wons/Yi-6b-test-v0.1/result_2023-12-02 10:17:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3412969283276451, + "acc_stderr": 0.013855831287497723, + "acc_norm": 0.4104095563139932, + "acc_norm_stderr": 0.014374922192642666 + }, + "harness|ko_hellaswag|10": { + "acc": 0.398725353515236, + "acc_stderr": 0.004886353563571851, + "acc_norm": 0.5297749452300339, + "acc_norm_stderr": 0.004980926198798982 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49707602339181284, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.49707602339181284, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5312899106002554, + "acc_stderr": 0.017844918090468547, + "acc_norm": 0.5312899106002554, + "acc_norm_stderr": 0.017844918090468547 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.032321469162244695, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.032321469162244695 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758396, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758396 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.028396770444111298, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.028396770444111298 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.04384140024078016, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.04384140024078016 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6414141414141414, + "acc_stderr": 0.03416903640391521, + "acc_norm": 0.6414141414141414, + "acc_norm_stderr": 0.03416903640391521 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.03247734334448111, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.03247734334448111 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736118, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736118 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5258064516129032, + "acc_stderr": 0.028406095057653326, + "acc_norm": 0.5258064516129032, + "acc_norm_stderr": 0.028406095057653326 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7307692307692307, + "acc_stderr": 0.029058588303748842, + "acc_norm": 0.7307692307692307, + "acc_norm_stderr": 0.029058588303748842 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32592592592592595, + "acc_stderr": 0.02857834836547307, + "acc_norm": 0.32592592592592595, + "acc_norm_stderr": 0.02857834836547307 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.0379401267469703, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.0379401267469703 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.024677862841332783, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.024677862841332783 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.026915047355369818, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.026915047355369818 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4539877300613497, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.4539877300613497, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583302, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583302 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5854922279792746, + "acc_stderr": 0.03555300319557669, + "acc_norm": 0.5854922279792746, + "acc_norm_stderr": 0.03555300319557669 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3508771929824561, + "acc_stderr": 0.04489539350270698, + "acc_norm": 0.3508771929824561, + "acc_norm_stderr": 0.04489539350270698 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6146788990825688, + "acc_stderr": 0.02086585085279411, + "acc_norm": 0.6146788990825688, + "acc_norm_stderr": 0.02086585085279411 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5326797385620915, + "acc_stderr": 0.02856869975222588, + "acc_norm": 0.5326797385620915, + "acc_norm_stderr": 0.02856869975222588 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.019861155193829163, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.019861155193829163 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.02819553487396673, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.02819553487396673 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.03256850570293646, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.03256850570293646 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4375, + "acc_stderr": 0.030134614954403924, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.030134614954403924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5102040816326531, + "acc_stderr": 0.03200255347893783, + "acc_norm": 0.5102040816326531, + "acc_norm_stderr": 0.03200255347893783 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6329113924050633, + "acc_stderr": 0.031376240725616185, + "acc_norm": 0.6329113924050633, + "acc_norm_stderr": 0.031376240725616185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3363754889178618, + "acc_stderr": 0.012067083079452229, + "acc_norm": 0.3363754889178618, + "acc_norm_stderr": 0.012067083079452229 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766372, + "mc2": 0.4097167044022003, + "mc2_stderr": 0.014779447855728677 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5242030696576151, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5844155844155844, + "acc_norm_stderr": 0.01694358631307656 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wons/Yi-6b-test-v0.1", + "model_sha": "6d9f3acc8e9e02f0dc1457cfa67bf8f721850a63", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wons/llama2-13b-dpo-test-v0.2/result_2023-11-29 14:42:30.json b/wons/llama2-13b-dpo-test-v0.2/result_2023-11-29 14:42:30.json new file mode 100644 index 0000000000000000000000000000000000000000..928a0fb984d66c0f7019639f24d5ad9cd438a61a --- /dev/null +++ b/wons/llama2-13b-dpo-test-v0.2/result_2023-11-29 14:42:30.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3302047781569966, + "acc_stderr": 0.013743085603760427, + "acc_norm": 0.3796928327645051, + "acc_norm_stderr": 0.014182119866974872 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39852619000199163, + "acc_stderr": 0.0048859420408945585, + "acc_norm": 0.5236008763194583, + "acc_norm_stderr": 0.004984219681732655 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3883495145631068, + "acc_stderr": 0.04825729337356389, + "acc_norm": 0.3883495145631068, + "acc_norm_stderr": 0.04825729337356389 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5057471264367817, + "acc_stderr": 0.017878782326129234, + "acc_norm": 0.5057471264367817, + "acc_norm_stderr": 0.017878782326129234 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.43703703703703706, + "acc_stderr": 0.04284958639753398, + "acc_norm": 0.43703703703703706, + "acc_norm_stderr": 0.04284958639753398 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768078, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768078 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3276595744680851, + "acc_stderr": 0.030683020843231008, + "acc_norm": 0.3276595744680851, + "acc_norm_stderr": 0.030683020843231008 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3433734939759036, + "acc_stderr": 0.03696584317010601, + "acc_norm": 0.3433734939759036, + "acc_norm_stderr": 0.03696584317010601 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.0282908690541976, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.0282908690541976 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550989, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550989 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.41919191919191917, + "acc_stderr": 0.035155207286704175, + "acc_norm": 0.41919191919191917, + "acc_norm_stderr": 0.035155207286704175 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03960933549451207, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03960933549451207 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.031753678460966245, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.031753678460966245 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4256410256410256, + "acc_stderr": 0.025069094387296542, + "acc_norm": 0.4256410256410256, + "acc_norm_stderr": 0.025069094387296542 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.048129173245368216, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.048129173245368216 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165897, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165897 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.03205953453789293, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.03205953453789293 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41509433962264153, + "acc_stderr": 0.030325945789286105, + "acc_norm": 0.41509433962264153, + "acc_norm_stderr": 0.030325945789286105 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.39090909090909093, + "acc_stderr": 0.04673752333670237, + "acc_norm": 0.39090909090909093, + "acc_norm_stderr": 0.04673752333670237 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02534809746809784, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02534809746809784 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5422885572139303, + "acc_stderr": 0.03522865864099597, + "acc_norm": 0.5422885572139303, + "acc_norm_stderr": 0.03522865864099597 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4277456647398844, + "acc_stderr": 0.037724468575180276, + "acc_norm": 0.4277456647398844, + "acc_norm_stderr": 0.037724468575180276 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3263888888888889, + "acc_stderr": 0.03921067198982266, + "acc_norm": 0.3263888888888889, + "acc_norm_stderr": 0.03921067198982266 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.40173410404624277, + "acc_stderr": 0.02639410417764363, + "acc_norm": 0.40173410404624277, + "acc_norm_stderr": 0.02639410417764363 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3987730061349693, + "acc_stderr": 0.03847021420456023, + "acc_norm": 0.3987730061349693, + "acc_norm_stderr": 0.03847021420456023 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.027628737155668777, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.027628737155668777 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.04096985139843671, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.04096985139843671 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.040406101782088394, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.040406101782088394 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283683, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283683 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5619834710743802, + "acc_stderr": 0.045291468044357915, + "acc_norm": 0.5619834710743802, + "acc_norm_stderr": 0.045291468044357915 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.35130718954248363, + "acc_stderr": 0.01931267606578657, + "acc_norm": 0.35130718954248363, + "acc_norm_stderr": 0.01931267606578657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3404255319148936, + "acc_stderr": 0.028267657482650144, + "acc_norm": 0.3404255319148936, + "acc_norm_stderr": 0.028267657482650144 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.014551553659369923, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.014551553659369923 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.49264705882352944, + "acc_stderr": 0.030369552523902173, + "acc_norm": 0.49264705882352944, + "acc_norm_stderr": 0.030369552523902173 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4122448979591837, + "acc_stderr": 0.0315123604467428, + "acc_norm": 0.4122448979591837, + "acc_norm_stderr": 0.0315123604467428 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4978902953586498, + "acc_stderr": 0.032546938018020076, + "acc_norm": 0.4978902953586498, + "acc_norm_stderr": 0.032546938018020076 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3044328552803129, + "acc_stderr": 0.011752877592597577, + "acc_norm": 0.3044328552803129, + "acc_norm_stderr": 0.011752877592597577 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.033933885849584046, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.033933885849584046 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.37576499388004897, + "mc1_stderr": 0.016954584060214297, + "mc2": 0.5484649933900534, + "mc2_stderr": 0.015659278554917617 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6115702479338843, + "acc_stderr": 0.016756921571069422, + "acc_norm": 0.6458087367178277, + "acc_norm_stderr": 0.01644317574921476 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wons/llama2-13b-dpo-test-v0.2", + "model_sha": "05fff3bca94d8e55146c1aef68557eabb41e2051", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wons/llama2-13b-test-v0.1/result_2023-11-22 07:53:03.json b/wons/llama2-13b-test-v0.1/result_2023-11-22 07:53:03.json new file mode 100644 index 0000000000000000000000000000000000000000..1bcfde4b77becad0f8a3a397feb09c4decdf6152 --- /dev/null +++ b/wons/llama2-13b-test-v0.1/result_2023-11-22 07:53:03.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37457337883959047, + "acc_stderr": 0.014144193471893456, + "acc_norm": 0.45563139931740615, + "acc_norm_stderr": 0.014553749939306866 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42202748456482775, + "acc_stderr": 0.004928735103635848, + "acc_norm": 0.5662218681537542, + "acc_norm_stderr": 0.00494582405650181 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5380116959064327, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.5380116959064327, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.46601941747572817, + "acc_stderr": 0.04939291447273481, + "acc_norm": 0.46601941747572817, + "acc_norm_stderr": 0.04939291447273481 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5300127713920817, + "acc_stderr": 0.017847723086649083, + "acc_norm": 0.5300127713920817, + "acc_norm_stderr": 0.017847723086649083 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4127659574468085, + "acc_stderr": 0.03218471141400351, + "acc_norm": 0.4127659574468085, + "acc_norm_stderr": 0.03218471141400351 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.43373493975903615, + "acc_stderr": 0.03858158940685515, + "acc_norm": 0.43373493975903615, + "acc_norm_stderr": 0.03858158940685515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.028396770444111288, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.028396770444111288 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.51010101010101, + "acc_stderr": 0.035616254886737454, + "acc_norm": 0.51010101010101, + "acc_norm_stderr": 0.035616254886737454 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.04104269211806232, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.04104269211806232 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.04220773659171453, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.04220773659171453 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102308, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102308 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406795, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406795 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4129032258064516, + "acc_stderr": 0.02800913812540039, + "acc_norm": 0.4129032258064516, + "acc_norm_stderr": 0.02800913812540039 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.42641509433962266, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.42641509433962266, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340496, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340496 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.582089552238806, + "acc_stderr": 0.034875586404620636, + "acc_norm": 0.582089552238806, + "acc_norm_stderr": 0.034875586404620636 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.02326651221373057, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.02326651221373057 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.375, + "acc_stderr": 0.04048439222695598, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04048439222695598 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.026915047355369804, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.026915047355369804 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.02774431344337654, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.02774431344337654 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.47150259067357514, + "acc_stderr": 0.03602573571288441, + "acc_norm": 0.47150259067357514, + "acc_norm_stderr": 0.03602573571288441 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5100917431192661, + "acc_stderr": 0.02143295620345333, + "acc_norm": 0.5100917431192661, + "acc_norm_stderr": 0.02143295620345333 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147125, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147125 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.42483660130718953, + "acc_stderr": 0.02830457667314112, + "acc_norm": 0.42483660130718953, + "acc_norm_stderr": 0.02830457667314112 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4144736842105263, + "acc_stderr": 0.04008973785779206, + "acc_norm": 0.4144736842105263, + "acc_norm_stderr": 0.04008973785779206 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3660130718954248, + "acc_stderr": 0.019488025745529675, + "acc_norm": 0.3660130718954248, + "acc_norm_stderr": 0.019488025745529675 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3049645390070922, + "acc_stderr": 0.027464708442022135, + "acc_norm": 0.3049645390070922, + "acc_norm_stderr": 0.027464708442022135 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.24107142857142858, + "acc_stderr": 0.04059867246952687, + "acc_norm": 0.24107142857142858, + "acc_norm_stderr": 0.04059867246952687 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3148148148148148, + "acc_stderr": 0.03167468706828978, + "acc_norm": 0.3148148148148148, + "acc_norm_stderr": 0.03167468706828978 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4227941176470588, + "acc_stderr": 0.030008562845003476, + "acc_norm": 0.4227941176470588, + "acc_norm_stderr": 0.030008562845003476 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5569620253164557, + "acc_stderr": 0.032335327775334835, + "acc_norm": 0.5569620253164557, + "acc_norm_stderr": 0.032335327775334835 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3070404172099087, + "acc_stderr": 0.011780959114513778, + "acc_norm": 0.3070404172099087, + "acc_norm_stderr": 0.011780959114513778 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4362745098039216, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.4362745098039216, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737302, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737302 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775523, + "mc2": 0.42332436951734187, + "mc2_stderr": 0.014852154991640701 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4852420306965762, + "acc_stderr": 0.017182864434998567, + "acc_norm": 0.5537190082644629, + "acc_norm_stderr": 0.017090852631668336 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wons/llama2-13b-test-v0.1", + "model_sha": "7d81f655a9450c5b65eeeb3126373d7e08e8186f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wons/mistral-7B-test-v0.1/result_2023-11-22 14:04:32.json b/wons/mistral-7B-test-v0.1/result_2023-11-22 14:04:32.json new file mode 100644 index 0000000000000000000000000000000000000000..ea7506cbd198e674c3ed8aa7da23ef27d0bd2e0e --- /dev/null +++ b/wons/mistral-7B-test-v0.1/result_2023-11-22 14:04:32.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3455631399317406, + "acc_stderr": 0.013896938461145687, + "acc_norm": 0.39334470989761094, + "acc_norm_stderr": 0.014275101465693028 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3843855805616411, + "acc_stderr": 0.004854555294017561, + "acc_norm": 0.4870543716391157, + "acc_norm_stderr": 0.004988108663179765 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5029239766081871, + "acc_stderr": 0.03834759370936839, + "acc_norm": 0.5029239766081871, + "acc_norm_stderr": 0.03834759370936839 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458935, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458935 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4955300127713921, + "acc_stderr": 0.01787924897058439, + "acc_norm": 0.4955300127713921, + "acc_norm_stderr": 0.01787924897058439 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.041716541613545426, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.041716541613545426 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.34893617021276596, + "acc_stderr": 0.031158522131357794, + "acc_norm": 0.34893617021276596, + "acc_norm_stderr": 0.031158522131357794 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.038695433234721015, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.038695433234721015 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4887459807073955, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.4887459807073955, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.033516951676526276, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.033516951676526276 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48091603053435117, + "acc_stderr": 0.04382094705550988, + "acc_norm": 0.48091603053435117, + "acc_norm_stderr": 0.04382094705550988 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5505050505050505, + "acc_stderr": 0.035441324919479704, + "acc_norm": 0.5505050505050505, + "acc_norm_stderr": 0.035441324919479704 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5462184873949579, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.5462184873949579, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.025348006031534795, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.025348006031534795 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5, + "acc_stderr": 0.028444006199428714, + "acc_norm": 0.5, + "acc_norm_stderr": 0.028444006199428714 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7350427350427351, + "acc_stderr": 0.028911208802749465, + "acc_norm": 0.7350427350427351, + "acc_norm_stderr": 0.028911208802749465 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4188679245283019, + "acc_stderr": 0.03036505082911521, + "acc_norm": 0.4188679245283019, + "acc_norm_stderr": 0.03036505082911521 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.44545454545454544, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.44545454545454544, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6119402985074627, + "acc_stderr": 0.03445789964362749, + "acc_norm": 0.6119402985074627, + "acc_norm_stderr": 0.03445789964362749 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895538, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895538 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.025075981767601684, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.025075981767601684 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723369, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723369 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237101, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.026917296179149123, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.026917296179149123 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5129533678756477, + "acc_stderr": 0.03607228061047749, + "acc_norm": 0.5129533678756477, + "acc_norm_stderr": 0.03607228061047749 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48440366972477067, + "acc_stderr": 0.02142689153920805, + "acc_norm": 0.48440366972477067, + "acc_norm_stderr": 0.02142689153920805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4738562091503268, + "acc_stderr": 0.028590752958852394, + "acc_norm": 0.4738562091503268, + "acc_norm_stderr": 0.028590752958852394 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.44, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.44, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6611570247933884, + "acc_stderr": 0.0432076780753667, + "acc_norm": 0.6611570247933884, + "acc_norm_stderr": 0.0432076780753667 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.019794488900024113, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.019794488900024113 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199506, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199506 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4732142857142857, + "acc_stderr": 0.047389751192741546, + "acc_norm": 0.4732142857142857, + "acc_norm_stderr": 0.047389751192741546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3128491620111732, + "acc_stderr": 0.015506892594647267, + "acc_norm": 0.3128491620111732, + "acc_norm_stderr": 0.015506892594647267 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.030105636570016636, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.030105636570016636 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5346938775510204, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.5346938775510204, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32920469361147325, + "acc_stderr": 0.012002091666902312, + "acc_norm": 0.32920469361147325, + "acc_norm_stderr": 0.012002091666902312 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.03454236585380609, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.03454236585380609 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.038956580652718446, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.038956580652718446 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.01565960575532691, + "mc2": 0.45950713329073445, + "mc2_stderr": 0.015528962534409833 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4757969303423849, + "acc_stderr": 0.017170202466520748, + "acc_norm": 0.5029515938606848, + "acc_norm_stderr": 0.017190054580194694 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wons/mistral-7B-test-v0.1", + "model_sha": "ad71e129d126732f7c2f08bdbf88cdfab5866e45", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wons/mistral-7B-test-v0.2/result_2023-11-29 03:56:01.json b/wons/mistral-7B-test-v0.2/result_2023-11-29 03:56:01.json new file mode 100644 index 0000000000000000000000000000000000000000..0f6cc40378385337ae49065c02cad8011db15b78 --- /dev/null +++ b/wons/mistral-7B-test-v0.2/result_2023-11-29 03:56:01.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.34044368600682595, + "acc_stderr": 0.01384746051889298, + "acc_norm": 0.3703071672354949, + "acc_norm_stderr": 0.01411129875167495 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3637721569408484, + "acc_stderr": 0.004801009657690444, + "acc_norm": 0.4645488946425015, + "acc_norm_stderr": 0.0049772234853420255 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.47381864623243936, + "acc_stderr": 0.017855434554041975, + "acc_norm": 0.47381864623243936, + "acc_norm_stderr": 0.017855434554041975 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.03163910665367291, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.03163910665367291 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.028150232244535597, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.028150232244535597 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4484304932735426, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.4484304932735426, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5404040404040404, + "acc_stderr": 0.035507024651313425, + "acc_norm": 0.5404040404040404, + "acc_norm_stderr": 0.035507024651313425 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.17647058823529413, + "acc_stderr": 0.03793281185307809, + "acc_norm": 0.17647058823529413, + "acc_norm_stderr": 0.03793281185307809 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5168067226890757, + "acc_stderr": 0.03246013680375308, + "acc_norm": 0.5168067226890757, + "acc_norm_stderr": 0.03246013680375308 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.0253480060315348, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.0253480060315348 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4483870967741935, + "acc_stderr": 0.028292056830112735, + "acc_norm": 0.4483870967741935, + "acc_norm_stderr": 0.028292056830112735 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.02828632407556441, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.02828632407556441 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4909090909090909, + "acc_stderr": 0.04788339768702861, + "acc_norm": 0.4909090909090909, + "acc_norm_stderr": 0.04788339768702861 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.029116617606083004, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.029116617606083004 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6318407960199005, + "acc_stderr": 0.03410410565495302, + "acc_norm": 0.6318407960199005, + "acc_norm_stderr": 0.03410410565495302 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.037242495958177295, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.037242495958177295 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699947, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699947 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.040166600304512336, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.040166600304512336 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.56, + "acc_stderr": 0.049888765156985884, + "acc_norm": 0.56, + "acc_norm_stderr": 0.049888765156985884 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5025906735751295, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.5025906735751295, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4954128440366973, + "acc_stderr": 0.021436420955529424, + "acc_norm": 0.4954128440366973, + "acc_norm_stderr": 0.021436420955529424 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.044444444444444495, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.044444444444444495 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.028599936776089786, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.028599936776089786 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488584, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488584 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777471, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777471 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3790849673202614, + "acc_stderr": 0.019627444748412236, + "acc_norm": 0.3790849673202614, + "acc_norm_stderr": 0.019627444748412236 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199492, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199492 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.01485499393801009, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.01485499393801009 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.02967428828131118, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.02967428828131118 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5142857142857142, + "acc_stderr": 0.031996152328062855, + "acc_norm": 0.5142857142857142, + "acc_norm_stderr": 0.031996152328062855 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214933, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214933 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.37575757575757573, + "acc_stderr": 0.037818873532059816, + "acc_norm": 0.37575757575757573, + "acc_norm_stderr": 0.037818873532059816 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.30966952264381886, + "mc1_stderr": 0.016185744355144898, + "mc2": 0.4777339871786822, + "mc2_stderr": 0.015453835300523385 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4037780401416765, + "acc_stderr": 0.01686903154029863, + "acc_norm": 0.4332939787485242, + "acc_norm_stderr": 0.017036683641893098 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wons/mistral-7B-test-v0.2", + "model_sha": "fa2a9ef5ec5670fa4bb3f590f1d08995ea498d24", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wons/mistral-7B-test-v0.3/result_2023-11-29 09:06:39.json b/wons/mistral-7B-test-v0.3/result_2023-11-29 09:06:39.json new file mode 100644 index 0000000000000000000000000000000000000000..5dc097d3766937d4c882fc5f6de2fdb25cb2c184 --- /dev/null +++ b/wons/mistral-7B-test-v0.3/result_2023-11-29 09:06:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.32849829351535836, + "acc_stderr": 0.013724978465537368, + "acc_norm": 0.378839590443686, + "acc_norm_stderr": 0.014175915490000322 + }, + "harness|ko_hellaswag|10": { + "acc": 0.35929097789285, + "acc_stderr": 0.004788120727316244, + "acc_norm": 0.4631547500497909, + "acc_norm_stderr": 0.0049762149894835035 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.038295098689947286, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.038295098689947286 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5631067961165048, + "acc_stderr": 0.049111471073657764, + "acc_norm": 0.5631067961165048, + "acc_norm_stderr": 0.049111471073657764 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.45977011494252873, + "acc_stderr": 0.01782199409693353, + "acc_norm": 0.45977011494252873, + "acc_norm_stderr": 0.01782199409693353 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.040247784019771096, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.040247784019771096 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.037777988227480165, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.037777988227480165 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4533762057877814, + "acc_stderr": 0.028274359854894248, + "acc_norm": 0.4533762057877814, + "acc_norm_stderr": 0.028274359854894248 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.484304932735426, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.484304932735426, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5303030303030303, + "acc_stderr": 0.0355580405176393, + "acc_norm": 0.5303030303030303, + "acc_norm_stderr": 0.0355580405176393 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.040925639582376536, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.040925639582376536 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5126050420168067, + "acc_stderr": 0.032468167657521745, + "acc_norm": 0.5126050420168067, + "acc_norm_stderr": 0.032468167657521745 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5462962962962963, + "acc_stderr": 0.04812917324536823, + "acc_norm": 0.5462962962962963, + "acc_norm_stderr": 0.04812917324536823 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3793103448275862, + "acc_stderr": 0.034139638059062345, + "acc_norm": 0.3793103448275862, + "acc_norm_stderr": 0.034139638059062345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.45483870967741935, + "acc_stderr": 0.02832774309156106, + "acc_norm": 0.45483870967741935, + "acc_norm_stderr": 0.02832774309156106 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004253, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004253 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.43018867924528303, + "acc_stderr": 0.03047144586718323, + "acc_norm": 0.43018867924528303, + "acc_norm_stderr": 0.03047144586718323 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.36666666666666664, + "acc_stderr": 0.029381620726465066, + "acc_norm": 0.36666666666666664, + "acc_norm_stderr": 0.029381620726465066 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943342, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943342 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3583815028901734, + "acc_stderr": 0.03656343653353159, + "acc_norm": 0.3583815028901734, + "acc_norm_stderr": 0.03656343653353159 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149138, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149138 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651281, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651281 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.0392237829061099, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.0392237829061099 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.45987654320987653, + "acc_stderr": 0.027731022753539284, + "acc_norm": 0.45987654320987653, + "acc_norm_stderr": 0.027731022753539284 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5233160621761658, + "acc_stderr": 0.03604513672442202, + "acc_norm": 0.5233160621761658, + "acc_norm_stderr": 0.03604513672442202 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.48807339449541287, + "acc_stderr": 0.021431223617362227, + "acc_norm": 0.48807339449541287, + "acc_norm_stderr": 0.021431223617362227 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.04403438954768177, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.04403438954768177 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4673202614379085, + "acc_stderr": 0.02856869975222588, + "acc_norm": 0.4673202614379085, + "acc_norm_stderr": 0.02856869975222588 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3815789473684211, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.3815789473684211, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.01952431674486635, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.01952431674486635 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26145251396648045, + "acc_stderr": 0.014696599650364555, + "acc_norm": 0.26145251396648045, + "acc_norm_stderr": 0.014696599650364555 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39338235294117646, + "acc_stderr": 0.029674288281311172, + "acc_norm": 0.39338235294117646, + "acc_norm_stderr": 0.029674288281311172 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6033755274261603, + "acc_stderr": 0.031843998738112236, + "acc_norm": 0.6033755274261603, + "acc_norm_stderr": 0.031843998738112236 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34810951760104303, + "acc_stderr": 0.012166738993698191, + "acc_norm": 0.34810951760104303, + "acc_norm_stderr": 0.012166738993698191 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.03441190023482465, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.03441190023482465 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4303030303030303, + "acc_stderr": 0.03866225962879077, + "acc_norm": 0.4303030303030303, + "acc_norm_stderr": 0.03866225962879077 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33047735618115054, + "mc1_stderr": 0.016466769613698293, + "mc2": 0.5077406474283724, + "mc2_stderr": 0.015633659057840248 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3990554899645809, + "acc_stderr": 0.016836377292849296, + "acc_norm": 0.4474616292798111, + "acc_norm_stderr": 0.01709519030150058 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wons/mistral-7B-test-v0.3", + "model_sha": "899f2f796d3cb956b29a0a6a7463a912bd6f8367", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/wons/tigerbot-13b-test-v0_1/result_2023-12-01 04:57:49.json b/wons/tigerbot-13b-test-v0_1/result_2023-12-01 04:57:49.json new file mode 100644 index 0000000000000000000000000000000000000000..ec6b127a54cbf02089b7d105abbf8d367bf5ed5c --- /dev/null +++ b/wons/tigerbot-13b-test-v0_1/result_2023-12-01 04:57:49.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.24744027303754265, + "acc_stderr": 0.01261035266329267, + "acc_norm": 0.30119453924914674, + "acc_norm_stderr": 0.013406741767847626 + }, + "harness|ko_hellaswag|10": { + "acc": 0.32085241983668594, + "acc_stderr": 0.0046585016622776206, + "acc_norm": 0.3835889265086636, + "acc_norm_stderr": 0.0048526588767753825 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3391812865497076, + "acc_stderr": 0.036310534964889056, + "acc_norm": 0.3391812865497076, + "acc_norm_stderr": 0.036310534964889056 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.42718446601941745, + "acc_stderr": 0.04897957737781168, + "acc_norm": 0.42718446601941745, + "acc_norm_stderr": 0.04897957737781168 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.3933588761174968, + "acc_stderr": 0.01746855672450314, + "acc_norm": 0.3933588761174968, + "acc_norm_stderr": 0.01746855672450314 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.37037037037037035, + "acc_stderr": 0.041716541613545426, + "acc_norm": 0.37037037037037035, + "acc_norm_stderr": 0.041716541613545426 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.030579442773610337, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.030579442773610337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3373493975903614, + "acc_stderr": 0.03680783690727581, + "acc_norm": 0.3373493975903614, + "acc_norm_stderr": 0.03680783690727581 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.36012861736334406, + "acc_stderr": 0.027264297599804015, + "acc_norm": 0.36012861736334406, + "acc_norm_stderr": 0.027264297599804015 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.32286995515695066, + "acc_stderr": 0.031381476375754995, + "acc_norm": 0.32286995515695066, + "acc_norm_stderr": 0.031381476375754995 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3838383838383838, + "acc_stderr": 0.03464881675016339, + "acc_norm": 0.3838383838383838, + "acc_norm_stderr": 0.03464881675016339 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.32413793103448274, + "acc_stderr": 0.03900432069185555, + "acc_norm": 0.32413793103448274, + "acc_norm_stderr": 0.03900432069185555 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.1568627450980392, + "acc_stderr": 0.036186648199362466, + "acc_norm": 0.1568627450980392, + "acc_norm_stderr": 0.036186648199362466 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4327731092436975, + "acc_stderr": 0.03218358107742613, + "acc_norm": 0.4327731092436975, + "acc_norm_stderr": 0.03218358107742613 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.02491524398598784, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.02491524398598784 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3694581280788177, + "acc_stderr": 0.03395970381998575, + "acc_norm": 0.3694581280788177, + "acc_norm_stderr": 0.03395970381998575 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.3967741935483871, + "acc_stderr": 0.027831231605767948, + "acc_norm": 0.3967741935483871, + "acc_norm_stderr": 0.027831231605767948 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5170940170940171, + "acc_stderr": 0.032736940493481824, + "acc_norm": 0.5170940170940171, + "acc_norm_stderr": 0.032736940493481824 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3471698113207547, + "acc_stderr": 0.02930010170554966, + "acc_norm": 0.3471698113207547, + "acc_norm_stderr": 0.02930010170554966 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731571, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731571 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073828, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073828 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5472636815920398, + "acc_stderr": 0.03519702717576915, + "acc_norm": 0.5472636815920398, + "acc_norm_stderr": 0.03519702717576915 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.36416184971098264, + "acc_stderr": 0.03669072477416906, + "acc_norm": 0.36416184971098264, + "acc_norm_stderr": 0.03669072477416906 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101806, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101806 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.25, + "acc_stderr": 0.03621034121889507, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03621034121889507 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.3901734104046243, + "acc_stderr": 0.026261677607806653, + "acc_norm": 0.3901734104046243, + "acc_norm_stderr": 0.026261677607806653 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.03746668325470022, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.03746668325470022 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.027339546640662737, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.027339546640662737 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.42487046632124353, + "acc_stderr": 0.03567471335212541, + "acc_norm": 0.42487046632124353, + "acc_norm_stderr": 0.03567471335212541 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.20175438596491227, + "acc_stderr": 0.03775205013583639, + "acc_norm": 0.20175438596491227, + "acc_norm_stderr": 0.03775205013583639 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3541284403669725, + "acc_stderr": 0.020504729013829107, + "acc_norm": 0.3541284403669725, + "acc_norm_stderr": 0.020504729013829107 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.37254901960784315, + "acc_stderr": 0.027684181883302895, + "acc_norm": 0.37254901960784315, + "acc_norm_stderr": 0.027684181883302895 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2957516339869281, + "acc_stderr": 0.01846315413263281, + "acc_norm": 0.2957516339869281, + "acc_norm_stderr": 0.01846315413263281 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320207, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320207 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.22321428571428573, + "acc_stderr": 0.039523019677025116, + "acc_norm": 0.22321428571428573, + "acc_norm_stderr": 0.039523019677025116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.33796296296296297, + "acc_stderr": 0.03225941352631295, + "acc_norm": 0.33796296296296297, + "acc_norm_stderr": 0.03225941352631295 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2558659217877095, + "acc_stderr": 0.014593620923210746, + "acc_norm": 0.2558659217877095, + "acc_norm_stderr": 0.014593620923210746 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.024398192986654924, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.024398192986654924 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.2693877551020408, + "acc_stderr": 0.02840125202902294, + "acc_norm": 0.2693877551020408, + "acc_norm_stderr": 0.02840125202902294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25738396624472576, + "acc_stderr": 0.028458820991460295, + "acc_norm": 0.25738396624472576, + "acc_norm_stderr": 0.028458820991460295 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2561929595827901, + "acc_stderr": 0.01114917315311058, + "acc_norm": 0.2561929595827901, + "acc_norm_stderr": 0.01114917315311058 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.031321798030832904, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.031321798030832904 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.032250781083062896, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.032250781083062896 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31946144430844553, + "mc1_stderr": 0.016322644182960498, + "mc2": 0.4847809791543606, + "mc2_stderr": 0.015949221320086037 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3246753246753247, + "acc_stderr": 0.016098883939346463, + "acc_norm": 0.4132231404958678, + "acc_norm_stderr": 0.016929480234495226 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "wons/tigerbot-13b-test-v0_1", + "model_sha": "17a0e2d598004af5f685811bc1ef9ee980e56ee6", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/x2bee/POLAR-14B-DPO-v1.3/result_2024-05-23 11:59:50.json b/x2bee/POLAR-14B-DPO-v1.3/result_2024-05-23 11:59:50.json new file mode 100644 index 0000000000000000000000000000000000000000..efff09e14b9b9f4ea2f7ba0a0be76b1fe46c8632 --- /dev/null +++ b/x2bee/POLAR-14B-DPO-v1.3/result_2024-05-23 11:59:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7465870307167235, + "acc_stderr": 0.012710896778378604, + "acc_norm": 0.7807167235494881, + "acc_norm_stderr": 0.012091245787615728 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6385182234614618, + "acc_stderr": 0.004794478426382617, + "acc_norm": 0.7561242780322645, + "acc_norm_stderr": 0.004285410130466119 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6900584795321637, + "acc_stderr": 0.035469769593931624, + "acc_norm": 0.6900584795321637, + "acc_norm_stderr": 0.035469769593931624 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.046897659372781335, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.046897659372781335 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6845466155810983, + "acc_stderr": 0.016617501738763408, + "acc_norm": 0.6845466155810983, + "acc_norm_stderr": 0.016617501738763408 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04316378599511324, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04316378599511324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46808510638297873, + "acc_stderr": 0.03261936918467383, + "acc_norm": 0.46808510638297873, + "acc_norm_stderr": 0.03261936918467383 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6334405144694534, + "acc_stderr": 0.02736807824397163, + "acc_norm": 0.6334405144694534, + "acc_norm_stderr": 0.02736807824397163 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6681614349775785, + "acc_stderr": 0.03160295143776679, + "acc_norm": 0.6681614349775785, + "acc_norm_stderr": 0.03160295143776679 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.03191178226713547, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.03191178226713547 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993178, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993178 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.592436974789916, + "acc_stderr": 0.031918633744784666, + "acc_norm": 0.592436974789916, + "acc_norm_stderr": 0.031918633744784666 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5948717948717949, + "acc_stderr": 0.024890471769938142, + "acc_norm": 0.5948717948717949, + "acc_norm_stderr": 0.024890471769938142 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.04643454608906275, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.04643454608906275 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.034953345821629345, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.034953345821629345 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5806451612903226, + "acc_stderr": 0.028071588901091838, + "acc_norm": 0.5806451612903226, + "acc_norm_stderr": 0.028071588901091838 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.811965811965812, + "acc_stderr": 0.025598193686652254, + "acc_norm": 0.811965811965812, + "acc_norm_stderr": 0.025598193686652254 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.04724577405731573, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.04724577405731573 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.029723278961476664, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.029723278961476664 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.033333333333333326, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.033333333333333326 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.47398843930635837, + "acc_stderr": 0.038073017265045125, + "acc_norm": 0.47398843930635837, + "acc_norm_stderr": 0.038073017265045125 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42328042328042326, + "acc_stderr": 0.025446365634406793, + "acc_norm": 0.42328042328042326, + "acc_norm_stderr": 0.025446365634406793 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5625, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.5625, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932263, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932263 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5491329479768786, + "acc_stderr": 0.026788811931562767, + "acc_norm": 0.5491329479768786, + "acc_norm_stderr": 0.026788811931562767 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6319018404907976, + "acc_stderr": 0.03789213935838396, + "acc_norm": 0.6319018404907976, + "acc_norm_stderr": 0.03789213935838396 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.02733954664066273, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.02733954664066273 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7668393782383419, + "acc_stderr": 0.03051611137147601, + "acc_norm": 0.7668393782383419, + "acc_norm_stderr": 0.03051611137147601 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.4473684210526316, + "acc_stderr": 0.046774730044912, + "acc_norm": 0.4473684210526316, + "acc_norm_stderr": 0.046774730044912 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.726605504587156, + "acc_stderr": 0.01910929984609827, + "acc_norm": 0.726605504587156, + "acc_norm_stderr": 0.01910929984609827 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6078431372549019, + "acc_stderr": 0.027956046165424516, + "acc_norm": 0.6078431372549019, + "acc_norm_stderr": 0.027956046165424516 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.04205953933884122, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.04205953933884122 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.618421052631579, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.618421052631579, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5669934640522876, + "acc_stderr": 0.02004544247332422, + "acc_norm": 0.5669934640522876, + "acc_norm_stderr": 0.02004544247332422 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.029462189233370586, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.029462189233370586 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.5089285714285714, + "acc_stderr": 0.04745033255489123, + "acc_norm": 0.5089285714285714, + "acc_norm_stderr": 0.04745033255489123 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.03381200005643526, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.03381200005643526 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3787709497206704, + "acc_stderr": 0.016223533510365117, + "acc_norm": 0.3787709497206704, + "acc_norm_stderr": 0.016223533510365117 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.48161764705882354, + "acc_stderr": 0.03035230339535196, + "acc_norm": 0.48161764705882354, + "acc_norm_stderr": 0.03035230339535196 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6448979591836734, + "acc_stderr": 0.030635655150387634, + "acc_norm": 0.6448979591836734, + "acc_norm_stderr": 0.030635655150387634 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.729957805907173, + "acc_stderr": 0.028900721906293426, + "acc_norm": 0.729957805907173, + "acc_norm_stderr": 0.028900721906293426 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41460234680573665, + "acc_stderr": 0.012582597058908284, + "acc_norm": 0.41460234680573665, + "acc_norm_stderr": 0.012582597058908284 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6421568627450981, + "acc_stderr": 0.03364487286088298, + "acc_norm": 0.6421568627450981, + "acc_norm_stderr": 0.03364487286088298 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165635, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165635 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6328029375764994, + "mc1_stderr": 0.01687480500145318, + "mc2": 0.7522925779273922, + "mc2_stderr": 0.014568927682929578 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45218417945690675, + "acc_stderr": 0.017111567130916785, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.017119172208061504 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "x2bee/POLAR-14B-DPO-v1.3", + "model_sha": "337edbed4c86db2da27e3b0e07086134f8d27a09", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/x2bee/POLAR-14B-DPO-v1.4/result_2024-05-27 15:02:47.json b/x2bee/POLAR-14B-DPO-v1.4/result_2024-05-27 15:02:47.json new file mode 100644 index 0000000000000000000000000000000000000000..86ee29fe5edeb9174ae673e53ded8722151465a9 --- /dev/null +++ b/x2bee/POLAR-14B-DPO-v1.4/result_2024-05-27 15:02:47.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7363481228668942, + "acc_stderr": 0.012875929151297058, + "acc_norm": 0.7491467576791809, + "acc_norm_stderr": 0.012668198621315433 + }, + "harness|ko_hellaswag|10": { + "acc": 0.7228639713204541, + "acc_stderr": 0.004466695023677848, + "acc_norm": 0.7422824138617805, + "acc_norm_stderr": 0.004364838000335614 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6140350877192983, + "acc_stderr": 0.03733756969066164, + "acc_norm": 0.6140350877192983, + "acc_norm_stderr": 0.03733756969066164 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6893203883495146, + "acc_stderr": 0.045821241601615506, + "acc_norm": 0.6893203883495146, + "acc_norm_stderr": 0.045821241601615506 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6526181353767561, + "acc_stderr": 0.017026671748655728, + "acc_norm": 0.6526181353767561, + "acc_norm_stderr": 0.017026671748655728 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.032529096196131965, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.032529096196131965 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4939759036144578, + "acc_stderr": 0.03892212195333045, + "acc_norm": 0.4939759036144578, + "acc_norm_stderr": 0.03892212195333045 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5852090032154341, + "acc_stderr": 0.02798268045975956, + "acc_norm": 0.5852090032154341, + "acc_norm_stderr": 0.02798268045975956 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6412556053811659, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.6412556053811659, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6616161616161617, + "acc_stderr": 0.033711241426263014, + "acc_norm": 0.6616161616161617, + "acc_norm_stderr": 0.033711241426263014 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.041641887201693775, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.041641887201693775 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993178, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993178 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6025641025641025, + "acc_stderr": 0.024811920017903836, + "acc_norm": 0.6025641025641025, + "acc_norm_stderr": 0.024811920017903836 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.567741935483871, + "acc_stderr": 0.028181739720019413, + "acc_norm": 0.567741935483871, + "acc_norm_stderr": 0.028181739720019413 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.026453508054040356, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.026453508054040356 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5727272727272728, + "acc_stderr": 0.047381987035454834, + "acc_norm": 0.5727272727272728, + "acc_norm_stderr": 0.047381987035454834 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3962962962962963, + "acc_stderr": 0.029822619458533997, + "acc_norm": 0.3962962962962963, + "acc_norm_stderr": 0.029822619458533997 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6766169154228856, + "acc_stderr": 0.03307615947979035, + "acc_norm": 0.6766169154228856, + "acc_norm_stderr": 0.03307615947979035 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.02546714904546955, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.02546714904546955 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.588957055214724, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.588957055214724, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5771604938271605, + "acc_stderr": 0.027487472980871595, + "acc_norm": 0.5771604938271605, + "acc_norm_stderr": 0.027487472980871595 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.046446020912223177, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.046446020912223177 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7064220183486238, + "acc_stderr": 0.019525151122639663, + "acc_norm": 0.7064220183486238, + "acc_norm_stderr": 0.019525151122639663 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727061, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727061 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.545751633986928, + "acc_stderr": 0.02850980780262659, + "acc_norm": 0.545751633986928, + "acc_norm_stderr": 0.02850980780262659 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.05000000000000001, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05000000000000001 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041019, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041019 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6052631578947368, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.6052631578947368, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5392156862745098, + "acc_stderr": 0.02016552331390791, + "acc_norm": 0.5392156862745098, + "acc_norm_stderr": 0.02016552331390791 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.02860208586275942, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.02860208586275942 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875192, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875192 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.44907407407407407, + "acc_stderr": 0.03392238405321617, + "acc_norm": 0.44907407407407407, + "acc_norm_stderr": 0.03392238405321617 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3452513966480447, + "acc_stderr": 0.015901432608930354, + "acc_norm": 0.3452513966480447, + "acc_norm_stderr": 0.015901432608930354 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.030254372573976694, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.030254372573976694 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6204081632653061, + "acc_stderr": 0.031067211262872457, + "acc_norm": 0.6204081632653061, + "acc_norm_stderr": 0.031067211262872457 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6582278481012658, + "acc_stderr": 0.030874537537553617, + "acc_norm": 0.6582278481012658, + "acc_norm_stderr": 0.030874537537553617 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4152542372881356, + "acc_stderr": 0.012585471793400667, + "acc_norm": 0.4152542372881356, + "acc_norm_stderr": 0.012585471793400667 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5343137254901961, + "acc_stderr": 0.03501038327635896, + "acc_norm": 0.5343137254901961, + "acc_norm_stderr": 0.03501038327635896 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.038881769216741004, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.038881769216741004 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4663402692778458, + "mc1_stderr": 0.01746379386716811, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44037780401416765, + "acc_stderr": 0.01706769977431298, + "acc_norm": 0.44510035419126326, + "acc_norm_stderr": 0.01708641743100547 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "x2bee/POLAR-14B-DPO-v1.4", + "model_sha": "a6e64075fafaa3d5e393ff89c3cb26f9615e6de9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/x2bee/POLAR-14B-HES-DPO-v1.5/result_2024-05-29 23:53:33.json b/x2bee/POLAR-14B-HES-DPO-v1.5/result_2024-05-29 23:53:33.json new file mode 100644 index 0000000000000000000000000000000000000000..0e6de7e6e6b2da2d1f810bf4f09d0fb3e09ce527 --- /dev/null +++ b/x2bee/POLAR-14B-HES-DPO-v1.5/result_2024-05-29 23:53:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6638225255972696, + "acc_stderr": 0.013804855026205756, + "acc_norm": 0.7278156996587031, + "acc_norm_stderr": 0.013006600406423709 + }, + "harness|ko_hellaswag|10": { + "acc": 0.45648277235610435, + "acc_stderr": 0.004970846697552306, + "acc_norm": 0.6349332802230632, + "acc_norm_stderr": 0.004804649197163697 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7309941520467836, + "acc_stderr": 0.0340105262010409, + "acc_norm": 0.7309941520467836, + "acc_norm_stderr": 0.0340105262010409 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7766990291262136, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.7766990291262136, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7343550446998723, + "acc_stderr": 0.01579430248788872, + "acc_norm": 0.7343550446998723, + "acc_norm_stderr": 0.01579430248788872 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5276595744680851, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.5276595744680851, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6559485530546624, + "acc_stderr": 0.026981478043648043, + "acc_norm": 0.6559485530546624, + "acc_norm_stderr": 0.026981478043648043 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6412556053811659, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.6412556053811659, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.648854961832061, + "acc_stderr": 0.04186445163013751, + "acc_norm": 0.648854961832061, + "acc_norm_stderr": 0.04186445163013751 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.029620227874790465, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.029620227874790465 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3627450980392157, + "acc_stderr": 0.04784060704105655, + "acc_norm": 0.3627450980392157, + "acc_norm_stderr": 0.04784060704105655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6680672268907563, + "acc_stderr": 0.03058869701378364, + "acc_norm": 0.6680672268907563, + "acc_norm_stderr": 0.03058869701378364 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6384615384615384, + "acc_stderr": 0.024359581465397, + "acc_norm": 0.6384615384615384, + "acc_norm_stderr": 0.024359581465397 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6851851851851852, + "acc_stderr": 0.04489931073591312, + "acc_norm": 0.6851851851851852, + "acc_norm_stderr": 0.04489931073591312 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.46798029556650245, + "acc_stderr": 0.035107665979592154, + "acc_norm": 0.46798029556650245, + "acc_norm_stderr": 0.035107665979592154 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6548387096774193, + "acc_stderr": 0.02704574657353432, + "acc_norm": 0.6548387096774193, + "acc_norm_stderr": 0.02704574657353432 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8162393162393162, + "acc_stderr": 0.025372139671722933, + "acc_norm": 0.8162393162393162, + "acc_norm_stderr": 0.025372139671722933 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5773584905660377, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.5773584905660377, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6454545454545455, + "acc_stderr": 0.045820048415054174, + "acc_norm": 0.6454545454545455, + "acc_norm_stderr": 0.045820048415054174 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.029958249250082118, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.029958249250082118 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7263681592039801, + "acc_stderr": 0.03152439186555404, + "acc_norm": 0.7263681592039801, + "acc_norm_stderr": 0.03152439186555404 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.0380168510452446, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.0380168510452446 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.025542846817400496, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.025542846817400496 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5694444444444444, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.5694444444444444, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932263, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932263 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6098265895953757, + "acc_stderr": 0.026261677607806642, + "acc_norm": 0.6098265895953757, + "acc_norm_stderr": 0.026261677607806642 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.656441717791411, + "acc_stderr": 0.03731133519673893, + "acc_norm": 0.656441717791411, + "acc_norm_stderr": 0.03731133519673893 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.02640614597362568, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.02640614597362568 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7668393782383419, + "acc_stderr": 0.03051611137147601, + "acc_norm": 0.7668393782383419, + "acc_norm_stderr": 0.03051611137147601 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.046854730419077895, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.046854730419077895 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7853211009174312, + "acc_stderr": 0.017604304149256494, + "acc_norm": 0.7853211009174312, + "acc_norm_stderr": 0.017604304149256494 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6405228758169934, + "acc_stderr": 0.027475969910660952, + "acc_norm": 0.6405228758169934, + "acc_norm_stderr": 0.027475969910660952 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695237, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695237 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7933884297520661, + "acc_stderr": 0.03695980128098824, + "acc_norm": 0.7933884297520661, + "acc_norm_stderr": 0.03695980128098824 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6842105263157895, + "acc_stderr": 0.0378272898086547, + "acc_norm": 0.6842105263157895, + "acc_norm_stderr": 0.0378272898086547 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5964052287581699, + "acc_stderr": 0.019848280168401164, + "acc_norm": 0.5964052287581699, + "acc_norm_stderr": 0.019848280168401164 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4397163120567376, + "acc_stderr": 0.02960991207559411, + "acc_norm": 0.4397163120567376, + "acc_norm_stderr": 0.02960991207559411 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5787037037037037, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.5787037037037037, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.264804469273743, + "acc_stderr": 0.01475690648326066, + "acc_norm": 0.264804469273743, + "acc_norm_stderr": 0.01475690648326066 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.7, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.7, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.03016191193076711, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.03016191193076711 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6448979591836734, + "acc_stderr": 0.030635655150387634, + "acc_norm": 0.6448979591836734, + "acc_norm_stderr": 0.030635655150387634 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7426160337552743, + "acc_stderr": 0.028458820991460302, + "acc_norm": 0.7426160337552743, + "acc_norm_stderr": 0.028458820991460302 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.44654498044328556, + "acc_stderr": 0.012697046024399661, + "acc_norm": 0.44654498044328556, + "acc_norm_stderr": 0.012697046024399661 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6225490196078431, + "acc_stderr": 0.03402272044340703, + "acc_norm": 0.6225490196078431, + "acc_norm_stderr": 0.03402272044340703 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6303030303030303, + "acc_stderr": 0.03769430314512569, + "acc_norm": 0.6303030303030303, + "acc_norm_stderr": 0.03769430314512569 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.6634026927784578, + "mc1_stderr": 0.0165424128094949, + "mc2": 0.7515104740134964, + "mc2_stderr": 0.014200593490054807 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5147579693034239, + "acc_stderr": 0.01718286443499856, + "acc_norm": 0.526564344746163, + "acc_norm_stderr": 0.017166075717577747 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "x2bee/POLAR-14B-HES-DPO-v1.5", + "model_sha": "f0bc8e2566ba28c8232d7c690098e634ea894e8d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/x2bee/POLAR-14B-SON-SFT-v0.1/result_2024-05-27 13:52:58.json b/x2bee/POLAR-14B-SON-SFT-v0.1/result_2024-05-27 13:52:58.json new file mode 100644 index 0000000000000000000000000000000000000000..2f66115241fa10b517cbb41de3e1d69c35bdd0a9 --- /dev/null +++ b/x2bee/POLAR-14B-SON-SFT-v0.1/result_2024-05-27 13:52:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.6646757679180887, + "acc_stderr": 0.013796182947785564, + "acc_norm": 0.7244027303754266, + "acc_norm_stderr": 0.01305716965576184 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46036646086436966, + "acc_stderr": 0.004974080638364276, + "acc_norm": 0.6195976897032464, + "acc_norm_stderr": 0.004844935327599196 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7602339181286549, + "acc_stderr": 0.03274485211946956, + "acc_norm": 0.7602339181286549, + "acc_norm_stderr": 0.03274485211946956 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7766990291262136, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.7766990291262136, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7381864623243933, + "acc_stderr": 0.01572083867844526, + "acc_norm": 0.7381864623243933, + "acc_norm_stderr": 0.01572083867844526 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.04319223625811331, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.04319223625811331 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5404255319148936, + "acc_stderr": 0.032579014820998335, + "acc_norm": 0.5404255319148936, + "acc_norm_stderr": 0.032579014820998335 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.038899512528272166, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.038899512528272166 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6559485530546624, + "acc_stderr": 0.026981478043648043, + "acc_norm": 0.6559485530546624, + "acc_norm_stderr": 0.026981478043648043 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6591928251121076, + "acc_stderr": 0.0318114974705536, + "acc_norm": 0.6591928251121076, + "acc_norm_stderr": 0.0318114974705536 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6564885496183206, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.6564885496183206, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7575757575757576, + "acc_stderr": 0.030532892233932036, + "acc_norm": 0.7575757575757576, + "acc_norm_stderr": 0.030532892233932036 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5586206896551724, + "acc_stderr": 0.04137931034482757, + "acc_norm": 0.5586206896551724, + "acc_norm_stderr": 0.04137931034482757 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.03095663632856655, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.03095663632856655 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6230769230769231, + "acc_stderr": 0.024570975364225995, + "acc_norm": 0.6230769230769231, + "acc_norm_stderr": 0.024570975364225995 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384739, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384739 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.7037037037037037, + "acc_stderr": 0.04414343666854933, + "acc_norm": 0.7037037037037037, + "acc_norm_stderr": 0.04414343666854933 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4630541871921182, + "acc_stderr": 0.035083705204426656, + "acc_norm": 0.4630541871921182, + "acc_norm_stderr": 0.035083705204426656 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.603225806451613, + "acc_stderr": 0.027831231605767944, + "acc_norm": 0.603225806451613, + "acc_norm_stderr": 0.027831231605767944 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8205128205128205, + "acc_stderr": 0.025140935950335435, + "acc_norm": 0.8205128205128205, + "acc_norm_stderr": 0.025140935950335435 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5962264150943396, + "acc_stderr": 0.03019761160019795, + "acc_norm": 0.5962264150943396, + "acc_norm_stderr": 0.03019761160019795 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37407407407407406, + "acc_stderr": 0.029502861128955293, + "acc_norm": 0.37407407407407406, + "acc_norm_stderr": 0.029502861128955293 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5375722543352601, + "acc_stderr": 0.03801685104524458, + "acc_norm": 0.5375722543352601, + "acc_norm_stderr": 0.03801685104524458 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.025487187147859372, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.025487187147859372 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5902777777777778, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.5902777777777778, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.45, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.45, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932263, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932263 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6184971098265896, + "acc_stderr": 0.026152198619726803, + "acc_norm": 0.6184971098265896, + "acc_norm_stderr": 0.026152198619726803 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6441717791411042, + "acc_stderr": 0.03761521380046734, + "acc_norm": 0.6441717791411042, + "acc_norm_stderr": 0.03761521380046734 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6944444444444444, + "acc_stderr": 0.025630824975621365, + "acc_norm": 0.6944444444444444, + "acc_norm_stderr": 0.025630824975621365 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7927461139896373, + "acc_stderr": 0.029252823291803638, + "acc_norm": 0.7927461139896373, + "acc_norm_stderr": 0.029252823291803638 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.04668000738510455, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.04668000738510455 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7853211009174312, + "acc_stderr": 0.017604304149256494, + "acc_norm": 0.7853211009174312, + "acc_norm_stderr": 0.017604304149256494 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.04375888492727062, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.04375888492727062 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6437908496732027, + "acc_stderr": 0.027420477662629245, + "acc_norm": 0.6437908496732027, + "acc_norm_stderr": 0.027420477662629245 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7603305785123967, + "acc_stderr": 0.03896878985070415, + "acc_norm": 0.7603305785123967, + "acc_norm_stderr": 0.03896878985070415 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.625, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.625, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.619281045751634, + "acc_stderr": 0.019643801557924806, + "acc_norm": 0.619281045751634, + "acc_norm_stderr": 0.019643801557924806 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.45390070921985815, + "acc_stderr": 0.029700453247291467, + "acc_norm": 0.45390070921985815, + "acc_norm_stderr": 0.029700453247291467 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03388857118502326, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03388857118502326 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.3575418994413408, + "acc_stderr": 0.016029394474894893, + "acc_norm": 0.3575418994413408, + "acc_norm_stderr": 0.016029394474894893 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.03004261583271486, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.03004261583271486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6816326530612244, + "acc_stderr": 0.02982253379398204, + "acc_norm": 0.6816326530612244, + "acc_norm_stderr": 0.02982253379398204 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7468354430379747, + "acc_stderr": 0.028304657943035293, + "acc_norm": 0.7468354430379747, + "acc_norm_stderr": 0.028304657943035293 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.455019556714472, + "acc_stderr": 0.012718456618701789, + "acc_norm": 0.455019556714472, + "acc_norm_stderr": 0.012718456618701789 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.033086111132364364, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.033086111132364364 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.605875152998776, + "mc1_stderr": 0.017106588140700332, + "mc2": 0.7254831072808595, + "mc2_stderr": 0.014162522228042162 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5926800472255017, + "acc_stderr": 0.01689245669519127, + "acc_norm": 0.6269185360094451, + "acc_norm_stderr": 0.016627318275137453 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "x2bee/POLAR-14B-SON-SFT-v0.1", + "model_sha": "01286a13088332c1eda4279b5bcfa7a0a33e145f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/x2bee/POLAR-14B-v0.2/result_2024-05-02 00:34:33.json b/x2bee/POLAR-14B-v0.2/result_2024-05-02 00:34:33.json new file mode 100644 index 0000000000000000000000000000000000000000..c989cf6b4bbd700a9070f066f27bf80e367c881c --- /dev/null +++ b/x2bee/POLAR-14B-v0.2/result_2024-05-02 00:34:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.7465870307167235, + "acc_stderr": 0.012710896778378602, + "acc_norm": 0.7687713310580204, + "acc_norm_stderr": 0.012320858834772264 + }, + "harness|ko_hellaswag|10": { + "acc": 0.681736705835491, + "acc_stderr": 0.004648503177353952, + "acc_norm": 0.7999402509460267, + "acc_norm_stderr": 0.003992272261659531 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6549707602339181, + "acc_stderr": 0.036459813773888065, + "acc_norm": 0.6549707602339181, + "acc_norm_stderr": 0.036459813773888065 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6922094508301405, + "acc_stderr": 0.016506045045155633, + "acc_norm": 0.6922094508301405, + "acc_norm_stderr": 0.016506045045155633 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4595744680851064, + "acc_stderr": 0.03257901482099836, + "acc_norm": 0.4595744680851064, + "acc_norm_stderr": 0.03257901482099836 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4879518072289157, + "acc_stderr": 0.03891364495835821, + "acc_norm": 0.4879518072289157, + "acc_norm_stderr": 0.03891364495835821 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6045016077170418, + "acc_stderr": 0.027770918531427834, + "acc_norm": 0.6045016077170418, + "acc_norm_stderr": 0.027770918531427834 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6233183856502242, + "acc_stderr": 0.03252113489929188, + "acc_norm": 0.6233183856502242, + "acc_norm_stderr": 0.03252113489929188 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7222222222222222, + "acc_stderr": 0.03191178226713547, + "acc_norm": 0.7222222222222222, + "acc_norm_stderr": 0.03191178226713547 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6764705882352942, + "acc_stderr": 0.030388353551886793, + "acc_norm": 0.6764705882352942, + "acc_norm_stderr": 0.030388353551886793 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6384615384615384, + "acc_stderr": 0.024359581465397, + "acc_norm": 0.6384615384615384, + "acc_norm_stderr": 0.024359581465397 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.65, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.04668408033024931, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.04668408033024931 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4729064039408867, + "acc_stderr": 0.03512819077876105, + "acc_norm": 0.4729064039408867, + "acc_norm_stderr": 0.03512819077876105 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5709677419354838, + "acc_stderr": 0.028156036538233193, + "acc_norm": 0.5709677419354838, + "acc_norm_stderr": 0.028156036538233193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.026035386098951292, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.026035386098951292 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5547169811320755, + "acc_stderr": 0.030588052974270655, + "acc_norm": 0.5547169811320755, + "acc_norm_stderr": 0.030588052974270655 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.04607582090719976, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.04607582090719976 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251976, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251976 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6268656716417911, + "acc_stderr": 0.034198326081760065, + "acc_norm": 0.6268656716417911, + "acc_norm_stderr": 0.034198326081760065 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4497354497354497, + "acc_stderr": 0.025620857042936648, + "acc_norm": 0.4497354497354497, + "acc_norm_stderr": 0.025620857042936648 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6041666666666666, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.6041666666666666, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5664739884393064, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.5664739884393064, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6196319018404908, + "acc_stderr": 0.038142698932618374, + "acc_norm": 0.6196319018404908, + "acc_norm_stderr": 0.038142698932618374 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6574074074074074, + "acc_stderr": 0.026406145973625686, + "acc_norm": 0.6574074074074074, + "acc_norm_stderr": 0.026406145973625686 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7616580310880829, + "acc_stderr": 0.030748905363909895, + "acc_norm": 0.7616580310880829, + "acc_norm_stderr": 0.030748905363909895 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.5, + "acc_stderr": 0.047036043419179864, + "acc_norm": 0.5, + "acc_norm_stderr": 0.047036043419179864 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7211009174311926, + "acc_stderr": 0.01922746887646353, + "acc_norm": 0.7211009174311926, + "acc_norm_stderr": 0.01922746887646353 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.0442626668137991, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.0442626668137991 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5816993464052288, + "acc_stderr": 0.0282451340243873, + "acc_norm": 0.5816993464052288, + "acc_norm_stderr": 0.0282451340243873 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.73, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.73, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.6513157894736842, + "acc_stderr": 0.038781398887976104, + "acc_norm": 0.6513157894736842, + "acc_norm_stderr": 0.038781398887976104 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.020036393768352624, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.020036393768352624 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.45390070921985815, + "acc_stderr": 0.029700453247291477, + "acc_norm": 0.45390070921985815, + "acc_norm_stderr": 0.029700453247291477 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4642857142857143, + "acc_stderr": 0.04733667890053756, + "acc_norm": 0.4642857142857143, + "acc_norm_stderr": 0.04733667890053756 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.034093869469927006, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.034093869469927006 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.37206703910614525, + "acc_stderr": 0.016165847583563295, + "acc_norm": 0.37206703910614525, + "acc_norm_stderr": 0.016165847583563295 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.71, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5404411764705882, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.5404411764705882, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6122448979591837, + "acc_stderr": 0.03119223072679566, + "acc_norm": 0.6122448979591837, + "acc_norm_stderr": 0.03119223072679566 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598025, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598025 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4641460234680574, + "acc_stderr": 0.01273736131873058, + "acc_norm": 0.4641460234680574, + "acc_norm_stderr": 0.01273736131873058 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380025, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380025 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7246022031823746, + "mc1_stderr": 0.01563813566777552, + "mc2": 0.8107575910195236, + "mc2_stderr": 0.013335029489665237 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.525383707201889, + "acc_stderr": 0.017168187201429253, + "acc_norm": 0.5442739079102715, + "acc_norm_stderr": 0.017122829143292655 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "x2bee/POLAR-14B-v0.2", + "model_sha": "8d905623a3972e11260420130039c62e115cbbaa", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/x2bee/POLAR-14B-v0.5/result_2024-06-05 00:49:59.json b/x2bee/POLAR-14B-v0.5/result_2024-06-05 00:49:59.json new file mode 100644 index 0000000000000000000000000000000000000000..a9211ece8850335efb4651ab19bbe6fdfab8aae3 --- /dev/null +++ b/x2bee/POLAR-14B-v0.5/result_2024-06-05 00:49:59.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.75, + "acc_stderr": 0.012653835621466646, + "acc_norm": 0.7798634812286689, + "acc_norm_stderr": 0.012108124883460988 + }, + "harness|ko_hellaswag|10": { + "acc": 0.6500697072296355, + "acc_stderr": 0.004759729267943182, + "acc_norm": 0.775542720573591, + "acc_norm_stderr": 0.004163717220873764 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6374269005847953, + "acc_stderr": 0.036871306155620606, + "acc_norm": 0.6374269005847953, + "acc_norm_stderr": 0.036871306155620606 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6730523627075351, + "acc_stderr": 0.016774908180131484, + "acc_norm": 0.6730523627075351, + "acc_norm_stderr": 0.016774908180131484 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480864, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480864 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.594855305466238, + "acc_stderr": 0.027882383791325963, + "acc_norm": 0.594855305466238, + "acc_norm_stderr": 0.027882383791325963 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6412556053811659, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.6412556053811659, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5954198473282443, + "acc_stderr": 0.043046937953806645, + "acc_norm": 0.5954198473282443, + "acc_norm_stderr": 0.043046937953806645 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7272727272727273, + "acc_stderr": 0.03173071239071724, + "acc_norm": 0.7272727272727273, + "acc_norm_stderr": 0.03173071239071724 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.0416656757710158, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.0416656757710158 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383888, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383888 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6596638655462185, + "acc_stderr": 0.03077805742293167, + "acc_norm": 0.6596638655462185, + "acc_norm_stderr": 0.03077805742293167 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.6102564102564103, + "acc_stderr": 0.024726967886647078, + "acc_norm": 0.6102564102564103, + "acc_norm_stderr": 0.024726967886647078 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526094, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526094 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801714, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801714 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4729064039408867, + "acc_stderr": 0.03512819077876105, + "acc_norm": 0.4729064039408867, + "acc_norm_stderr": 0.03512819077876105 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5709677419354838, + "acc_stderr": 0.028156036538233193, + "acc_norm": 0.5709677419354838, + "acc_norm_stderr": 0.028156036538233193 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7735042735042735, + "acc_stderr": 0.027421007295392943, + "acc_norm": 0.7735042735042735, + "acc_norm_stderr": 0.027421007295392943 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5660377358490566, + "acc_stderr": 0.030503292013342596, + "acc_norm": 0.5660377358490566, + "acc_norm_stderr": 0.030503292013342596 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6272727272727273, + "acc_stderr": 0.04631381319425465, + "acc_norm": 0.6272727272727273, + "acc_norm_stderr": 0.04631381319425465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.0287420409039485, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.0287420409039485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.39072847682119205, + "acc_stderr": 0.039837983066598075, + "acc_norm": 0.39072847682119205, + "acc_norm_stderr": 0.039837983066598075 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.025487187147859372, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.025487187147859372 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6180555555555556, + "acc_stderr": 0.040629907841466674, + "acc_norm": 0.6180555555555556, + "acc_norm_stderr": 0.040629907841466674 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5809248554913294, + "acc_stderr": 0.026564178111422622, + "acc_norm": 0.5809248554913294, + "acc_norm_stderr": 0.026564178111422622 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6257668711656442, + "acc_stderr": 0.03802068102899615, + "acc_norm": 0.6257668711656442, + "acc_norm_stderr": 0.03802068102899615 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5987654320987654, + "acc_stderr": 0.027272582849839803, + "acc_norm": 0.5987654320987654, + "acc_norm_stderr": 0.027272582849839803 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7512953367875648, + "acc_stderr": 0.031195840877700304, + "acc_norm": 0.7512953367875648, + "acc_norm_stderr": 0.031195840877700304 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7229357798165138, + "acc_stderr": 0.019188482590169538, + "acc_norm": 0.7229357798165138, + "acc_norm_stderr": 0.019188482590169538 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5718954248366013, + "acc_stderr": 0.028332397483664278, + "acc_norm": 0.5718954248366013, + "acc_norm_stderr": 0.028332397483664278 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7520661157024794, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.7520661157024794, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.618421052631579, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.618421052631579, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5408496732026143, + "acc_stderr": 0.020160213617222516, + "acc_norm": 0.5408496732026143, + "acc_norm_stderr": 0.020160213617222516 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.45390070921985815, + "acc_stderr": 0.029700453247291463, + "acc_norm": 0.45390070921985815, + "acc_norm_stderr": 0.029700453247291463 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.44642857142857145, + "acc_stderr": 0.04718471485219588, + "acc_norm": 0.44642857142857145, + "acc_norm_stderr": 0.04718471485219588 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5416666666666666, + "acc_stderr": 0.03398110890294636, + "acc_norm": 0.5416666666666666, + "acc_norm_stderr": 0.03398110890294636 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.35195530726256985, + "acc_stderr": 0.01597266852368907, + "acc_norm": 0.35195530726256985, + "acc_norm_stderr": 0.01597266852368907 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.04688261722621503, + "acc_norm": 0.68, + "acc_norm_stderr": 0.04688261722621503 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.03035969707904612, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.03035969707904612 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6122448979591837, + "acc_stderr": 0.031192230726795656, + "acc_norm": 0.6122448979591837, + "acc_norm_stderr": 0.031192230726795656 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7215189873417721, + "acc_stderr": 0.029178682304842538, + "acc_norm": 0.7215189873417721, + "acc_norm_stderr": 0.029178682304842538 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4634941329856584, + "acc_stderr": 0.012736153390214963, + "acc_norm": 0.4634941329856584, + "acc_norm_stderr": 0.012736153390214963 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6568627450980392, + "acc_stderr": 0.03332139944668086, + "acc_norm": 0.6568627450980392, + "acc_norm_stderr": 0.03332139944668086 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.03851716319398393, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.03851716319398393 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.7833537331701347, + "mc1_stderr": 0.014421468452506978, + "mc2": 0.8572574997405501, + "mc2_stderr": 0.01200311225898601 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5159386068476978, + "acc_stderr": 0.017181617837190195, + "acc_norm": 0.5301062573789846, + "acc_norm_stderr": 0.01715916359017022 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "x2bee/POLAR-14B-v0.5", + "model_sha": "74a1ef65a8d650e5358be229def31688738d8c6a", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yanolja/Bookworm-10.7B-v0.4-DPO/result_2024-01-18 10:12:26.json b/yanolja/Bookworm-10.7B-v0.4-DPO/result_2024-01-18 10:12:26.json new file mode 100644 index 0000000000000000000000000000000000000000..9302da4583ee4a435d8f86c46b3ea6fc224e932a --- /dev/null +++ b/yanolja/Bookworm-10.7B-v0.4-DPO/result_2024-01-18 10:12:26.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.49829351535836175, + "acc_stderr": 0.014611305705056999, + "acc_norm": 0.5563139931740614, + "acc_norm_stderr": 0.014518421825670444 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4922326229834694, + "acc_stderr": 0.004989179286677388, + "acc_norm": 0.6612228639713205, + "acc_norm_stderr": 0.004723266971563377 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6257309941520468, + "acc_stderr": 0.03711601185389481, + "acc_norm": 0.6257309941520468, + "acc_norm_stderr": 0.03711601185389481 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6601941747572816, + "acc_stderr": 0.04689765937278133, + "acc_norm": 0.6601941747572816, + "acc_norm_stderr": 0.04689765937278133 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6679438058748404, + "acc_stderr": 0.01684117465529571, + "acc_norm": 0.6679438058748404, + "acc_norm_stderr": 0.01684117465529571 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5191489361702127, + "acc_stderr": 0.0326620429906468, + "acc_norm": 0.5191489361702127, + "acc_norm_stderr": 0.0326620429906468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866766, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866766 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6430868167202572, + "acc_stderr": 0.02721042037593402, + "acc_norm": 0.6430868167202572, + "acc_norm_stderr": 0.02721042037593402 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5515695067264574, + "acc_stderr": 0.03337883736255098, + "acc_norm": 0.5515695067264574, + "acc_norm_stderr": 0.03337883736255098 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7626262626262627, + "acc_stderr": 0.030313710538198917, + "acc_norm": 0.7626262626262627, + "acc_norm_stderr": 0.030313710538198917 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5517241379310345, + "acc_stderr": 0.04144311810878152, + "acc_norm": 0.5517241379310345, + "acc_norm_stderr": 0.04144311810878152 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006716, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006716 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6260504201680672, + "acc_stderr": 0.03142946637883708, + "acc_norm": 0.6260504201680672, + "acc_norm_stderr": 0.03142946637883708 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5717948717948718, + "acc_stderr": 0.025088301454694824, + "acc_norm": 0.5717948717948718, + "acc_norm_stderr": 0.025088301454694824 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.04616631111801713, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.04616631111801713 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6096774193548387, + "acc_stderr": 0.02775125663696958, + "acc_norm": 0.6096774193548387, + "acc_norm_stderr": 0.02775125663696958 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.026453508054040353, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.026453508054040353 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5622641509433962, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.5622641509433962, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.37407407407407406, + "acc_stderr": 0.02950286112895529, + "acc_norm": 0.37407407407407406, + "acc_norm_stderr": 0.02950286112895529 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7512437810945274, + "acc_stderr": 0.030567675938916714, + "acc_norm": 0.7512437810945274, + "acc_norm_stderr": 0.030567675938916714 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.038073017265045125, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.038073017265045125 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4126984126984127, + "acc_stderr": 0.025355741263055266, + "acc_norm": 0.4126984126984127, + "acc_norm_stderr": 0.025355741263055266 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5902777777777778, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.5902777777777778, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.75, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.75, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6358381502890174, + "acc_stderr": 0.025906632631016124, + "acc_norm": 0.6358381502890174, + "acc_norm_stderr": 0.025906632631016124 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6296296296296297, + "acc_stderr": 0.02686949074481525, + "acc_norm": 0.6296296296296297, + "acc_norm_stderr": 0.02686949074481525 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.03201867122877794, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.03201867122877794 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.04615186962583704, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.04615186962583704 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7009174311926606, + "acc_stderr": 0.01963041728541517, + "acc_norm": 0.7009174311926606, + "acc_norm_stderr": 0.01963041728541517 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5947712418300654, + "acc_stderr": 0.028110928492809068, + "acc_norm": 0.5947712418300654, + "acc_norm_stderr": 0.028110928492809068 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7851239669421488, + "acc_stderr": 0.03749492448709695, + "acc_norm": 0.7851239669421488, + "acc_norm_stderr": 0.03749492448709695 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.625, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.625, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.02019280827143379, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.02019280827143379 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3723404255319149, + "acc_stderr": 0.02883892147125146, + "acc_norm": 0.3723404255319149, + "acc_norm_stderr": 0.02883892147125146 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.49537037037037035, + "acc_stderr": 0.03409825519163572, + "acc_norm": 0.49537037037037035, + "acc_norm_stderr": 0.03409825519163572 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26256983240223464, + "acc_stderr": 0.014716824273017765, + "acc_norm": 0.26256983240223464, + "acc_norm_stderr": 0.014716824273017765 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.74, + "acc_stderr": 0.044084400227680814, + "acc_norm": 0.74, + "acc_norm_stderr": 0.044084400227680814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.030306257722468307, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.030306257722468307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.7142857142857143, + "acc_stderr": 0.02892058322067558, + "acc_norm": 0.7142857142857143, + "acc_norm_stderr": 0.02892058322067558 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7805907172995781, + "acc_stderr": 0.026939106581553945, + "acc_norm": 0.7805907172995781, + "acc_norm_stderr": 0.026939106581553945 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.41003911342894395, + "acc_stderr": 0.012561837621962026, + "acc_norm": 0.41003911342894395, + "acc_norm_stderr": 0.012561837621962026 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7598039215686274, + "acc_stderr": 0.02998373305591361, + "acc_norm": 0.7598039215686274, + "acc_norm_stderr": 0.02998373305591361 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6909090909090909, + "acc_stderr": 0.036085410115739666, + "acc_norm": 0.6909090909090909, + "acc_norm_stderr": 0.036085410115739666 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.32558139534883723, + "mc1_stderr": 0.01640398946990783, + "mc2": 0.4831744040544604, + "mc2_stderr": 0.015806600639339304 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5560802833530106, + "acc_stderr": 0.017081884623542543, + "acc_norm": 0.5737898465171193, + "acc_norm_stderr": 0.01700212260948925 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yanolja/Bookworm-10.7B-v0.4-DPO", + "model_sha": "5807ef01a569e3ecda619af66f98271d6bf872f7", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yanolja/EEVE-Korean-10.8B-v1.0/result_2024-04-18 12:41:31.json b/yanolja/EEVE-Korean-10.8B-v1.0/result_2024-04-18 12:41:31.json new file mode 100644 index 0000000000000000000000000000000000000000..cd183b7cf53f572b25020151705d35caf10794fe --- /dev/null +++ b/yanolja/EEVE-Korean-10.8B-v1.0/result_2024-04-18 12:41:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4684300341296928, + "acc_stderr": 0.014582236460866977, + "acc_norm": 0.5324232081911263, + "acc_norm_stderr": 0.01458063756999543 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4645488946425015, + "acc_stderr": 0.0049772234853420316, + "acc_norm": 0.6420035849432384, + "acc_norm_stderr": 0.004784312972495387 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7192982456140351, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.7192982456140351, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.04541609446503947, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.04541609446503947 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7254150702426565, + "acc_stderr": 0.015959829933084056, + "acc_norm": 0.7254150702426565, + "acc_norm_stderr": 0.015959829933084056 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5404255319148936, + "acc_stderr": 0.03257901482099834, + "acc_norm": 0.5404255319148936, + "acc_norm_stderr": 0.03257901482099834 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.57847533632287, + "acc_stderr": 0.03314190222110658, + "acc_norm": 0.57847533632287, + "acc_norm_stderr": 0.03314190222110658 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.031156269519646857, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.031156269519646857 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383887, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383887 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5615384615384615, + "acc_stderr": 0.025158266016868613, + "acc_norm": 0.5615384615384615, + "acc_norm_stderr": 0.025158266016868613 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.03502544650845872, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.03502544650845872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6290322580645161, + "acc_stderr": 0.02748054188795359, + "acc_norm": 0.6290322580645161, + "acc_norm_stderr": 0.02748054188795359 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.02704685763071666, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.02704685763071666 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5773584905660377, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.5773584905660377, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.0467375233367024, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.0467375233367024 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131137, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131137 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7213930348258707, + "acc_stderr": 0.031700561834973086, + "acc_norm": 0.7213930348258707, + "acc_norm_stderr": 0.031700561834973086 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.03807301726504513, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.03807301726504513 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.37566137566137564, + "acc_stderr": 0.024942368931159788, + "acc_norm": 0.37566137566137564, + "acc_norm_stderr": 0.024942368931159788 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5763888888888888, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.5763888888888888, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.81, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.81, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6127167630057804, + "acc_stderr": 0.026226158605124655, + "acc_norm": 0.6127167630057804, + "acc_norm_stderr": 0.026226158605124655 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6388888888888888, + "acc_stderr": 0.02672586880910079, + "acc_norm": 0.6388888888888888, + "acc_norm_stderr": 0.02672586880910079 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04644602091222317, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04644602091222317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.708256880733945, + "acc_stderr": 0.019489300968876532, + "acc_norm": 0.708256880733945, + "acc_norm_stderr": 0.019489300968876532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6535947712418301, + "acc_stderr": 0.02724561304721536, + "acc_norm": 0.6535947712418301, + "acc_norm_stderr": 0.02724561304721536 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.618421052631579, + "acc_stderr": 0.03953173377749194, + "acc_norm": 0.618421052631579, + "acc_norm_stderr": 0.03953173377749194 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5179738562091504, + "acc_stderr": 0.020214761037872404, + "acc_norm": 0.5179738562091504, + "acc_norm_stderr": 0.020214761037872404 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.39361702127659576, + "acc_stderr": 0.029144544781596147, + "acc_norm": 0.39361702127659576, + "acc_norm_stderr": 0.029144544781596147 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.04616143075028546, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.04616143075028546 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5138888888888888, + "acc_stderr": 0.03408655867977748, + "acc_norm": 0.5138888888888888, + "acc_norm_stderr": 0.03408655867977748 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2122905027932961, + "acc_stderr": 0.01367664468583173, + "acc_norm": 0.2122905027932961, + "acc_norm_stderr": 0.01367664468583173 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5551470588235294, + "acc_stderr": 0.030187532060329383, + "acc_norm": 0.5551470588235294, + "acc_norm_stderr": 0.030187532060329383 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6775510204081633, + "acc_stderr": 0.029923100563683913, + "acc_norm": 0.6775510204081633, + "acc_norm_stderr": 0.029923100563683913 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7637130801687764, + "acc_stderr": 0.02765215314415926, + "acc_norm": 0.7637130801687764, + "acc_norm_stderr": 0.02765215314415926 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.40547588005215124, + "acc_stderr": 0.012539960672377205, + "acc_norm": 0.40547588005215124, + "acc_norm_stderr": 0.012539960672377205 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7107843137254902, + "acc_stderr": 0.03182231867647553, + "acc_norm": 0.7107843137254902, + "acc_norm_stderr": 0.03182231867647553 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.01611412415688247, + "mc2": 0.45098842952998486, + "mc2_stderr": 0.015220760155000955 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6080283353010626, + "acc_stderr": 0.01678433211942408, + "acc_norm": 0.6257378984651711, + "acc_norm_stderr": 0.016637917789798746 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yanolja/EEVE-Korean-10.8B-v1.0", + "model_sha": "ea5f0880a6532c39c36d91a596e3782de7bb9543", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yanolja/EEVE-Korean-Instruct-10.8B-v1.0/result_2024-02-23 15:55:46.json b/yanolja/EEVE-Korean-Instruct-10.8B-v1.0/result_2024-02-23 15:55:46.json new file mode 100644 index 0000000000000000000000000000000000000000..8cc39f870fae8dc3303e11ebbbd4305580a788ae --- /dev/null +++ b/yanolja/EEVE-Korean-Instruct-10.8B-v1.0/result_2024-02-23 15:55:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.507679180887372, + "acc_stderr": 0.014609667440892574, + "acc_norm": 0.5520477815699659, + "acc_norm_stderr": 0.01453201149821167 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4863572993427604, + "acc_stderr": 0.004987923636628563, + "acc_norm": 0.6611232822146983, + "acc_norm_stderr": 0.004723605376936908 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.695906432748538, + "acc_stderr": 0.03528211258245233, + "acc_norm": 0.695906432748538, + "acc_norm_stderr": 0.03528211258245233 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7381864623243933, + "acc_stderr": 0.015720838678445252, + "acc_norm": 0.7381864623243933, + "acc_norm_stderr": 0.015720838678445252 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.043192236258113324, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.043192236258113324 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5234042553191489, + "acc_stderr": 0.0326501947503358, + "acc_norm": 0.5234042553191489, + "acc_norm_stderr": 0.0326501947503358 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.536144578313253, + "acc_stderr": 0.03882310850890594, + "acc_norm": 0.536144578313253, + "acc_norm_stderr": 0.03882310850890594 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6366559485530546, + "acc_stderr": 0.027316847674192714, + "acc_norm": 0.6366559485530546, + "acc_norm_stderr": 0.027316847674192714 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6233183856502242, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.6233183856502242, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.02962022787479048, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.02962022787479048 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.04655010411319617, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.04655010411319617 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6050420168067226, + "acc_stderr": 0.03175367846096624, + "acc_norm": 0.6050420168067226, + "acc_norm_stderr": 0.03175367846096624 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5692307692307692, + "acc_stderr": 0.025106820660539753, + "acc_norm": 0.5692307692307692, + "acc_norm_stderr": 0.025106820660539753 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411021, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411021 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39408866995073893, + "acc_stderr": 0.03438157967036543, + "acc_norm": 0.39408866995073893, + "acc_norm_stderr": 0.03438157967036543 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6419354838709678, + "acc_stderr": 0.02727389059430063, + "acc_norm": 0.6419354838709678, + "acc_norm_stderr": 0.02727389059430063 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8247863247863247, + "acc_stderr": 0.024904439098918214, + "acc_norm": 0.8247863247863247, + "acc_norm_stderr": 0.024904439098918214 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.03043779434298305, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.03043779434298305 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.046534298079135075, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.046534298079135075 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.736318407960199, + "acc_stderr": 0.03115715086935558, + "acc_norm": 0.736318407960199, + "acc_norm_stderr": 0.03115715086935558 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851123, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851123 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.6041666666666666, + "acc_stderr": 0.04089465449325582, + "acc_norm": 0.6041666666666666, + "acc_norm_stderr": 0.04089465449325582 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.74, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.6416184971098265, + "acc_stderr": 0.02581675679158419, + "acc_norm": 0.6416184971098265, + "acc_norm_stderr": 0.02581675679158419 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.6134969325153374, + "acc_stderr": 0.03825825548848606, + "acc_norm": 0.6134969325153374, + "acc_norm_stderr": 0.03825825548848606 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.02700252103451647, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.02700252103451647 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7772020725388601, + "acc_stderr": 0.03003114797764154, + "acc_norm": 0.7772020725388601, + "acc_norm_stderr": 0.03003114797764154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.046970851366478626, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.046970851366478626 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.7467889908256881, + "acc_stderr": 0.018644073041375043, + "acc_norm": 0.7467889908256881, + "acc_norm_stderr": 0.018644073041375043 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743744, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743744 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.027826109307283686, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.027826109307283686 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.041391127276354626, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.041391127276354626 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5921052631578947, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.5921052631578947, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5506535947712419, + "acc_stderr": 0.020123766528027266, + "acc_norm": 0.5506535947712419, + "acc_norm_stderr": 0.020123766528027266 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.4219858156028369, + "acc_stderr": 0.029462189233370586, + "acc_norm": 0.4219858156028369, + "acc_norm_stderr": 0.029462189233370586 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4375, + "acc_stderr": 0.04708567521880525, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04708567521880525 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.03395322726375797, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.03395322726375797 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.17653631284916202, + "acc_stderr": 0.012751770640520488, + "acc_norm": 0.17653631284916202, + "acc_norm_stderr": 0.012751770640520488 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.73, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.73, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.030306257722468307, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.030306257722468307 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6938775510204082, + "acc_stderr": 0.02950489645459596, + "acc_norm": 0.6938775510204082, + "acc_norm_stderr": 0.02950489645459596 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7932489451476793, + "acc_stderr": 0.0263616516683891, + "acc_norm": 0.7932489451476793, + "acc_norm_stderr": 0.0263616516683891 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4172099087353325, + "acc_stderr": 0.012593959992906424, + "acc_norm": 0.4172099087353325, + "acc_norm_stderr": 0.012593959992906424 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7647058823529411, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.7647058823529411, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7515151515151515, + "acc_stderr": 0.03374402644139405, + "acc_norm": 0.7515151515151515, + "acc_norm_stderr": 0.03374402644139405 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3537331701346389, + "mc1_stderr": 0.01673781435884615, + "mc2": 0.4913880739554196, + "mc2_stderr": 0.015660162184507626 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5218417945690673, + "acc_stderr": 0.017173944474294385, + "acc_norm": 0.5348288075560803, + "acc_norm_stderr": 0.017148598015747422 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yanolja/EEVE-Korean-Instruct-10.8B-v1.0", + "model_sha": "fb3f5e88e28b6f063f9f3a36c5ae475a31413517", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yanolja/KoSOLAR-10.7B-v0.2/result_2024-01-18 09:38:23.json b/yanolja/KoSOLAR-10.7B-v0.2/result_2024-01-18 09:38:23.json new file mode 100644 index 0000000000000000000000000000000000000000..8c9def3215858f1c84cf12072ba95dd0e659bb2b --- /dev/null +++ b/yanolja/KoSOLAR-10.7B-v0.2/result_2024-01-18 09:38:23.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44880546075085326, + "acc_stderr": 0.014534599585097665, + "acc_norm": 0.5051194539249146, + "acc_norm_stderr": 0.014610624890309154 + }, + "harness|ko_hellaswag|10": { + "acc": 0.457876916948815, + "acc_stderr": 0.0049720426020013805, + "acc_norm": 0.6228838876717785, + "acc_norm_stderr": 0.004836738514051326 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.6491228070175439, + "acc_stderr": 0.03660298834049162, + "acc_norm": 0.6491228070175439, + "acc_norm_stderr": 0.03660298834049162 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977239, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977239 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6513409961685823, + "acc_stderr": 0.01704124314349098, + "acc_norm": 0.6513409961685823, + "acc_norm_stderr": 0.01704124314349098 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750573, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750573 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.46987951807228917, + "acc_stderr": 0.03885425420866767, + "acc_norm": 0.46987951807228917, + "acc_norm_stderr": 0.03885425420866767 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6270096463022508, + "acc_stderr": 0.027466610213140112, + "acc_norm": 0.6270096463022508, + "acc_norm_stderr": 0.027466610213140112 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6188340807174888, + "acc_stderr": 0.03259625118416827, + "acc_norm": 0.6188340807174888, + "acc_norm_stderr": 0.03259625118416827 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6564885496183206, + "acc_stderr": 0.041649760719448786, + "acc_norm": 0.6564885496183206, + "acc_norm_stderr": 0.041649760719448786 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7171717171717171, + "acc_stderr": 0.03208779558786753, + "acc_norm": 0.7171717171717171, + "acc_norm_stderr": 0.03208779558786753 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728763, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728763 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5025641025641026, + "acc_stderr": 0.025350672979412184, + "acc_norm": 0.5025641025641026, + "acc_norm_stderr": 0.025350672979412184 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4482758620689655, + "acc_stderr": 0.03499113137676744, + "acc_norm": 0.4482758620689655, + "acc_norm_stderr": 0.03499113137676744 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5903225806451613, + "acc_stderr": 0.02797605491534737, + "acc_norm": 0.5903225806451613, + "acc_norm_stderr": 0.02797605491534737 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.811965811965812, + "acc_stderr": 0.025598193686652247, + "acc_norm": 0.811965811965812, + "acc_norm_stderr": 0.025598193686652247 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5207547169811321, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.5207547169811321, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.027940457136228416, + "acc_norm": 0.3, + "acc_norm_stderr": 0.027940457136228416 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7114427860696517, + "acc_stderr": 0.03203841040213321, + "acc_norm": 0.7114427860696517, + "acc_norm_stderr": 0.03203841040213321 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.48554913294797686, + "acc_stderr": 0.03810871630454764, + "acc_norm": 0.48554913294797686, + "acc_norm_stderr": 0.03810871630454764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.02510742548113727, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.02510742548113727 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5, + "acc_stderr": 0.04181210050035455, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04181210050035455 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.76, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.76, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.615606936416185, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.615606936416185, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6080246913580247, + "acc_stderr": 0.027163686038271146, + "acc_norm": 0.6080246913580247, + "acc_norm_stderr": 0.027163686038271146 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.694300518134715, + "acc_stderr": 0.033248379397581594, + "acc_norm": 0.694300518134715, + "acc_norm_stderr": 0.033248379397581594 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366596, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6660550458715596, + "acc_stderr": 0.020220554196736403, + "acc_norm": 0.6660550458715596, + "acc_norm_stderr": 0.020220554196736403 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.028074158947600656, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.028074158947600656 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083499, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083499 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.04008973785779205, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.04008973785779205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.020192808271433788, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.020192808271433788 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.02909767559946393, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.02909767559946393 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.034063153607115086, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.034063153607115086 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23575418994413408, + "acc_stderr": 0.014196375686290803, + "acc_norm": 0.23575418994413408, + "acc_norm_stderr": 0.014196375686290803 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.71, + "acc_stderr": 0.04560480215720684, + "acc_norm": 0.71, + "acc_norm_stderr": 0.04560480215720684 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4742647058823529, + "acc_stderr": 0.03033257809455504, + "acc_norm": 0.4742647058823529, + "acc_norm_stderr": 0.03033257809455504 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6530612244897959, + "acc_stderr": 0.0304725260267265, + "acc_norm": 0.6530612244897959, + "acc_norm_stderr": 0.0304725260267265 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036423, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036423 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3878748370273794, + "acc_stderr": 0.01244499830967564, + "acc_norm": 0.3878748370273794, + "acc_norm_stderr": 0.01244499830967564 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7205882352941176, + "acc_stderr": 0.03149328104507955, + "acc_norm": 0.7205882352941176, + "acc_norm_stderr": 0.03149328104507955 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.31456548347613217, + "mc1_stderr": 0.01625524199317919, + "mc2": 0.47313646366309897, + "mc2_stderr": 0.015324905862175045 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.615112160566706, + "acc_stderr": 0.016728579701498644, + "acc_norm": 0.6422668240850059, + "acc_norm_stderr": 0.016479808935749983 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yanolja/KoSOLAR-10.7B-v0.2", + "model_sha": "617e58a6d73279425f559c440086493f8bc81d10", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yanolja/KoSOLAR-10.7B-v0.3/result_2024-02-07 17:23:58.json b/yanolja/KoSOLAR-10.7B-v0.3/result_2024-02-07 17:23:58.json new file mode 100644 index 0000000000000000000000000000000000000000..77fee3e807eb4c0fe563b6899bbbabc46ad7afd2 --- /dev/null +++ b/yanolja/KoSOLAR-10.7B-v0.3/result_2024-02-07 17:23:58.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4684300341296928, + "acc_stderr": 0.014582236460866977, + "acc_norm": 0.5315699658703071, + "acc_norm_stderr": 0.014582236460866965 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4644493128858793, + "acc_stderr": 0.004977152746478588, + "acc_norm": 0.6420035849432384, + "acc_norm_stderr": 0.004784312972495387 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7192982456140351, + "acc_stderr": 0.034462962170884265, + "acc_norm": 0.7192982456140351, + "acc_norm_stderr": 0.034462962170884265 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6990291262135923, + "acc_stderr": 0.04541609446503947, + "acc_norm": 0.6990291262135923, + "acc_norm_stderr": 0.04541609446503947 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7254150702426565, + "acc_stderr": 0.01595982993308406, + "acc_norm": 0.7254150702426565, + "acc_norm_stderr": 0.01595982993308406 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.042992689054808624, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.042992689054808624 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.5446808510638298, + "acc_stderr": 0.032555253593403555, + "acc_norm": 0.5446808510638298, + "acc_norm_stderr": 0.032555253593403555 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5180722891566265, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.5180722891566265, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6141479099678456, + "acc_stderr": 0.027648149599751464, + "acc_norm": 0.6141479099678456, + "acc_norm_stderr": 0.027648149599751464 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5829596412556054, + "acc_stderr": 0.03309266936071721, + "acc_norm": 0.5829596412556054, + "acc_norm_stderr": 0.03309266936071721 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6412213740458015, + "acc_stderr": 0.04206739313864908, + "acc_norm": 0.6412213740458015, + "acc_norm_stderr": 0.04206739313864908 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7424242424242424, + "acc_stderr": 0.031156269519646857, + "acc_norm": 0.7424242424242424, + "acc_norm_stderr": 0.031156269519646857 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3431372549019608, + "acc_stderr": 0.04724007352383887, + "acc_norm": 0.3431372549019608, + "acc_norm_stderr": 0.04724007352383887 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.6512605042016807, + "acc_stderr": 0.030956636328566548, + "acc_norm": 0.6512605042016807, + "acc_norm_stderr": 0.030956636328566548 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5615384615384615, + "acc_stderr": 0.025158266016868616, + "acc_norm": 0.5615384615384615, + "acc_norm_stderr": 0.025158266016868616 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6018518518518519, + "acc_stderr": 0.04732332615978813, + "acc_norm": 0.6018518518518519, + "acc_norm_stderr": 0.04732332615978813 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.458128078817734, + "acc_stderr": 0.03505630140785741, + "acc_norm": 0.458128078817734, + "acc_norm_stderr": 0.03505630140785741 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6290322580645161, + "acc_stderr": 0.02748054188795359, + "acc_norm": 0.6290322580645161, + "acc_norm_stderr": 0.02748054188795359 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.02704685763071666, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.02704685763071666 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.030437794342983052, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.030437794342983052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6090909090909091, + "acc_stderr": 0.0467375233367024, + "acc_norm": 0.6090909090909091, + "acc_norm_stderr": 0.0467375233367024 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131137, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131137 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5260115606936416, + "acc_stderr": 0.03807301726504513, + "acc_norm": 0.5260115606936416, + "acc_norm_stderr": 0.03807301726504513 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.373015873015873, + "acc_stderr": 0.02490699045899257, + "acc_norm": 0.373015873015873, + "acc_norm_stderr": 0.02490699045899257 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5763888888888888, + "acc_stderr": 0.041321250197233685, + "acc_norm": 0.5763888888888888, + "acc_norm_stderr": 0.041321250197233685 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.8, + "acc_stderr": 0.040201512610368445, + "acc_norm": 0.8, + "acc_norm_stderr": 0.040201512610368445 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.615606936416185, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.615606936416185, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5521472392638037, + "acc_stderr": 0.03906947479456607, + "acc_norm": 0.5521472392638037, + "acc_norm_stderr": 0.03906947479456607 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6358024691358025, + "acc_stderr": 0.026774929899722324, + "acc_norm": 0.6358024691358025, + "acc_norm_stderr": 0.026774929899722324 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7305699481865285, + "acc_stderr": 0.032018671228777947, + "acc_norm": 0.7305699481865285, + "acc_norm_stderr": 0.032018671228777947 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.04644602091222317, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.04644602091222317 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.708256880733945, + "acc_stderr": 0.019489300968876532, + "acc_norm": 0.708256880733945, + "acc_norm_stderr": 0.019489300968876532 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.42063492063492064, + "acc_stderr": 0.04415438226743743, + "acc_norm": 0.42063492063492064, + "acc_norm_stderr": 0.04415438226743743 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6503267973856209, + "acc_stderr": 0.0273053080762747, + "acc_norm": 0.6503267973856209, + "acc_norm_stderr": 0.0273053080762747 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.042369647530410184, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.042369647530410184 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.625, + "acc_stderr": 0.039397364351956274, + "acc_norm": 0.625, + "acc_norm_stderr": 0.039397364351956274 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.020212274976302957, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.020212274976302957 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3900709219858156, + "acc_stderr": 0.029097675599463926, + "acc_norm": 0.3900709219858156, + "acc_norm_stderr": 0.029097675599463926 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2122905027932961, + "acc_stderr": 0.013676644685831728, + "acc_norm": 0.2122905027932961, + "acc_norm_stderr": 0.013676644685831728 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.047258156262526066, + "acc_norm": 0.67, + "acc_norm_stderr": 0.047258156262526066 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.03016191193076711, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.03016191193076711 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6816326530612244, + "acc_stderr": 0.02982253379398204, + "acc_norm": 0.6816326530612244, + "acc_norm_stderr": 0.02982253379398204 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.759493670886076, + "acc_stderr": 0.027820781981149678, + "acc_norm": 0.759493670886076, + "acc_norm_stderr": 0.027820781981149678 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.4048239895697523, + "acc_stderr": 0.012536743830953987, + "acc_norm": 0.4048239895697523, + "acc_norm_stderr": 0.012536743830953987 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7107843137254902, + "acc_stderr": 0.03182231867647553, + "acc_norm": 0.7107843137254902, + "acc_norm_stderr": 0.03182231867647553 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.7090909090909091, + "acc_stderr": 0.03546563019624336, + "acc_norm": 0.7090909090909091, + "acc_norm_stderr": 0.03546563019624336 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3047735618115055, + "mc1_stderr": 0.01611412415688247, + "mc2": 0.45106184267841365, + "mc2_stderr": 0.015222724606739416 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6080283353010626, + "acc_stderr": 0.01678433211942408, + "acc_norm": 0.6245572609208973, + "acc_norm_stderr": 0.016648411589511088 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yanolja/KoSOLAR-10.7B-v0.3", + "model_sha": "ea5f0880a6532c39c36d91a596e3782de7bb9543", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yeen214/llama2_7b_small_tuning_v1/result_2023-10-03 06:58:13.json b/yeen214/llama2_7b_small_tuning_v1/result_2023-10-03 06:58:13.json new file mode 100644 index 0000000000000000000000000000000000000000..2169cf6e3f6ce61affa2400bcef932950ebb446e --- /dev/null +++ b/yeen214/llama2_7b_small_tuning_v1/result_2023-10-03 06:58:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20563139931740615, + "acc_stderr": 0.011810745260742585, + "acc_norm": 0.25853242320819114, + "acc_norm_stderr": 0.012794553754288666 + }, + "harness|ko_hellaswag|10": { + "acc": 0.252141007767377, + "acc_stderr": 0.004333543083293473, + "acc_norm": 0.24278032264489147, + "acc_norm_stderr": 0.004278871104930363 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.25146198830409355, + "acc_stderr": 0.033275044238468436, + "acc_norm": 0.25146198830409355, + "acc_norm_stderr": 0.033275044238468436 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.1941747572815534, + "acc_stderr": 0.03916667762822584, + "acc_norm": 0.1941747572815534, + "acc_norm_stderr": 0.03916667762822584 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2720306513409962, + "acc_stderr": 0.015913367447500517, + "acc_norm": 0.2720306513409962, + "acc_norm_stderr": 0.015913367447500517 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.040943762699967946, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.040943762699967946 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2127659574468085, + "acc_stderr": 0.026754391348039787, + "acc_norm": 0.2127659574468085, + "acc_norm_stderr": 0.026754391348039787 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.20481927710843373, + "acc_stderr": 0.03141784291663925, + "acc_norm": 0.20481927710843373, + "acc_norm_stderr": 0.03141784291663925 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3086816720257235, + "acc_stderr": 0.026236965881153266, + "acc_norm": 0.3086816720257235, + "acc_norm_stderr": 0.026236965881153266 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.20179372197309417, + "acc_stderr": 0.026936111912802263, + "acc_norm": 0.20179372197309417, + "acc_norm_stderr": 0.026936111912802263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2474747474747475, + "acc_stderr": 0.030746300742124495, + "acc_norm": 0.2474747474747475, + "acc_norm_stderr": 0.030746300742124495 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.296551724137931, + "acc_stderr": 0.038061426873099935, + "acc_norm": 0.296551724137931, + "acc_norm_stderr": 0.038061426873099935 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.040925639582376556, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.040925639582376556 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.18067226890756302, + "acc_stderr": 0.024991964966600756, + "acc_norm": 0.18067226890756302, + "acc_norm_stderr": 0.024991964966600756 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.022421273612923714, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.022421273612923714 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.21296296296296297, + "acc_stderr": 0.03957835471980981, + "acc_norm": 0.21296296296296297, + "acc_norm_stderr": 0.03957835471980981 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.03178529710642749, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.03178529710642749 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.22258064516129034, + "acc_stderr": 0.02366421667164252, + "acc_norm": 0.22258064516129034, + "acc_norm_stderr": 0.02366421667164252 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.24786324786324787, + "acc_stderr": 0.028286324075564386, + "acc_norm": 0.24786324786324787, + "acc_norm_stderr": 0.028286324075564386 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2188679245283019, + "acc_stderr": 0.025447863825108597, + "acc_norm": 0.2188679245283019, + "acc_norm_stderr": 0.025447863825108597 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.21818181818181817, + "acc_stderr": 0.03955932861795833, + "acc_norm": 0.21818181818181817, + "acc_norm_stderr": 0.03955932861795833 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.026842057873833706, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.026842057873833706 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.03734535676787198, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.03734535676787198 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.2537313432835821, + "acc_stderr": 0.03076944496729602, + "acc_norm": 0.2537313432835821, + "acc_norm_stderr": 0.03076944496729602 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.24277456647398843, + "acc_stderr": 0.0326926380614177, + "acc_norm": 0.24277456647398843, + "acc_norm_stderr": 0.0326926380614177 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.24074074074074073, + "acc_stderr": 0.022019080012217897, + "acc_norm": 0.24074074074074073, + "acc_norm_stderr": 0.022019080012217897 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2745664739884393, + "acc_stderr": 0.024027745155265026, + "acc_norm": 0.2745664739884393, + "acc_norm_stderr": 0.024027745155265026 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2883435582822086, + "acc_stderr": 0.035590395316173425, + "acc_norm": 0.2883435582822086, + "acc_norm_stderr": 0.035590395316173425 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.27469135802469136, + "acc_stderr": 0.024836057868294688, + "acc_norm": 0.27469135802469136, + "acc_norm_stderr": 0.024836057868294688 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.22, + "acc_stderr": 0.041633319989322695, + "acc_norm": 0.22, + "acc_norm_stderr": 0.041633319989322695 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.02925282329180363, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.02925282329180363 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.03999423879281336, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.03999423879281336 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.20550458715596331, + "acc_stderr": 0.01732435232501601, + "acc_norm": 0.20550458715596331, + "acc_norm_stderr": 0.01732435232501601 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03670066451047181, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03670066451047181 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.024288619466046095, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.024288619466046095 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.22, + "acc_stderr": 0.04163331998932267, + "acc_norm": 0.22, + "acc_norm_stderr": 0.04163331998932267 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.3140495867768595, + "acc_stderr": 0.04236964753041019, + "acc_norm": 0.3140495867768595, + "acc_norm_stderr": 0.04236964753041019 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.29605263157894735, + "acc_stderr": 0.03715062154998905, + "acc_norm": 0.29605263157894735, + "acc_norm_stderr": 0.03715062154998905 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.27941176470588236, + "acc_stderr": 0.01815287105153882, + "acc_norm": 0.27941176470588236, + "acc_norm_stderr": 0.01815287105153882 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.26595744680851063, + "acc_stderr": 0.02635806569888059, + "acc_norm": 0.26595744680851063, + "acc_norm_stderr": 0.02635806569888059 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25, + "acc_stderr": 0.04109974682633932, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04109974682633932 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.028353212866863445, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.028353212866863445 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098426, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098426 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.1875, + "acc_stderr": 0.023709788253811766, + "acc_norm": 0.1875, + "acc_norm_stderr": 0.023709788253811766 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.22040816326530613, + "acc_stderr": 0.026537045312145294, + "acc_norm": 0.22040816326530613, + "acc_norm_stderr": 0.026537045312145294 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2742616033755274, + "acc_stderr": 0.029041333510598035, + "acc_norm": 0.2742616033755274, + "acc_norm_stderr": 0.029041333510598035 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.27053455019556716, + "acc_stderr": 0.01134599674353926, + "acc_norm": 0.27053455019556716, + "acc_norm_stderr": 0.01134599674353926 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.28921568627450983, + "acc_stderr": 0.03182231867647554, + "acc_norm": 0.28921568627450983, + "acc_norm_stderr": 0.03182231867647554 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2545454545454545, + "acc_stderr": 0.03401506715249039, + "acc_norm": 0.2545454545454545, + "acc_norm_stderr": 0.03401506715249039 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.21909424724602203, + "mc1_stderr": 0.014480038578757447, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08264462809917356, + "acc_stderr": 0.009466532659719994, + "acc_norm": 0.21959858323494688, + "acc_norm_stderr": 0.014232743085580271 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yeen214/llama2_7b_small_tuning_v1", + "model_sha": "3f9b43b4db2da4fe3785071dd52c9fc92aa0801d", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yeen214/test_llama2_7b/result_2023-10-01 02:38:38.json b/yeen214/test_llama2_7b/result_2023-10-01 02:38:38.json new file mode 100644 index 0000000000000000000000000000000000000000..a5cf8e5dbe99c26910f9bf1098e79b741799abbb --- /dev/null +++ b/yeen214/test_llama2_7b/result_2023-10-01 02:38:38.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2713310580204778, + "acc_stderr": 0.012993807727545787, + "acc_norm": 0.310580204778157, + "acc_norm_stderr": 0.013522292098053055 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3331009759012149, + "acc_stderr": 0.004703590558552501, + "acc_norm": 0.41127265484963155, + "acc_norm_stderr": 0.004910588449330016 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.47953216374269003, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.47953216374269003, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.3106796116504854, + "acc_stderr": 0.045821241601615506, + "acc_norm": 0.3106796116504854, + "acc_norm_stderr": 0.045821241601615506 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.36909323116219667, + "acc_stderr": 0.017256283109124613, + "acc_norm": 0.36909323116219667, + "acc_norm_stderr": 0.017256283109124613 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.03972552884785138, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.03972552884785138 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3574468085106383, + "acc_stderr": 0.03132941789476425, + "acc_norm": 0.3574468085106383, + "acc_norm_stderr": 0.03132941789476425 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.25903614457831325, + "acc_stderr": 0.03410646614071857, + "acc_norm": 0.25903614457831325, + "acc_norm_stderr": 0.03410646614071857 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.3279742765273312, + "acc_stderr": 0.02666441088693762, + "acc_norm": 0.3279742765273312, + "acc_norm_stderr": 0.02666441088693762 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.35874439461883406, + "acc_stderr": 0.032190792004199956, + "acc_norm": 0.35874439461883406, + "acc_norm_stderr": 0.032190792004199956 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.366412213740458, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.366412213740458, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.3282828282828283, + "acc_stderr": 0.03345678422756777, + "acc_norm": 0.3282828282828283, + "acc_norm_stderr": 0.03345678422756777 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.039966295748767186, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.039966295748767186 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.16666666666666666, + "acc_stderr": 0.03708284662416545, + "acc_norm": 0.16666666666666666, + "acc_norm_stderr": 0.03708284662416545 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.030388353551886845, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.030388353551886845 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.30512820512820515, + "acc_stderr": 0.023346335293325887, + "acc_norm": 0.30512820512820515, + "acc_norm_stderr": 0.023346335293325887 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.042923469599092816, + "acc_norm": 0.24, + "acc_norm_stderr": 0.042923469599092816 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.39814814814814814, + "acc_stderr": 0.04732332615978814, + "acc_norm": 0.39814814814814814, + "acc_norm_stderr": 0.04732332615978814 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.33497536945812806, + "acc_stderr": 0.033208527423483104, + "acc_norm": 0.33497536945812806, + "acc_norm_stderr": 0.033208527423483104 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.32903225806451614, + "acc_stderr": 0.02672949906834997, + "acc_norm": 0.32903225806451614, + "acc_norm_stderr": 0.02672949906834997 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.49145299145299143, + "acc_stderr": 0.032751303000970296, + "acc_norm": 0.49145299145299143, + "acc_norm_stderr": 0.032751303000970296 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3018867924528302, + "acc_stderr": 0.028254200344438676, + "acc_norm": 0.3018867924528302, + "acc_norm_stderr": 0.028254200344438676 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.35454545454545455, + "acc_stderr": 0.04582004841505415, + "acc_norm": 0.35454545454545455, + "acc_norm_stderr": 0.04582004841505415 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2814814814814815, + "acc_stderr": 0.027420019350945273, + "acc_norm": 0.2814814814814815, + "acc_norm_stderr": 0.027420019350945273 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2119205298013245, + "acc_stderr": 0.033367670865679766, + "acc_norm": 0.2119205298013245, + "acc_norm_stderr": 0.033367670865679766 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4577114427860697, + "acc_stderr": 0.03522865864099597, + "acc_norm": 0.4577114427860697, + "acc_norm_stderr": 0.03522865864099597 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.26011560693641617, + "acc_stderr": 0.03345036916788991, + "acc_norm": 0.26011560693641617, + "acc_norm_stderr": 0.03345036916788991 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2751322751322751, + "acc_stderr": 0.02300008685906864, + "acc_norm": 0.2751322751322751, + "acc_norm_stderr": 0.02300008685906864 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2847222222222222, + "acc_stderr": 0.03773809990686935, + "acc_norm": 0.2847222222222222, + "acc_norm_stderr": 0.03773809990686935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.37283236994219654, + "acc_stderr": 0.026033890613576288, + "acc_norm": 0.37283236994219654, + "acc_norm_stderr": 0.026033890613576288 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2822085889570552, + "acc_stderr": 0.03536117886664742, + "acc_norm": 0.2822085889570552, + "acc_norm_stderr": 0.03536117886664742 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.36728395061728397, + "acc_stderr": 0.02682280175950789, + "acc_norm": 0.36728395061728397, + "acc_norm_stderr": 0.02682280175950789 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.35751295336787564, + "acc_stderr": 0.03458816042181007, + "acc_norm": 0.35751295336787564, + "acc_norm_stderr": 0.03458816042181007 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.23684210526315788, + "acc_stderr": 0.039994238792813365, + "acc_norm": 0.23684210526315788, + "acc_norm_stderr": 0.039994238792813365 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28807339449541286, + "acc_stderr": 0.019416445892636018, + "acc_norm": 0.28807339449541286, + "acc_norm_stderr": 0.019416445892636018 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.04073524322147126, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.04073524322147126 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.027826109307283683, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.027826109307283683 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5206611570247934, + "acc_stderr": 0.04560456086387235, + "acc_norm": 0.5206611570247934, + "acc_norm_stderr": 0.04560456086387235 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3223684210526316, + "acc_stderr": 0.03803510248351586, + "acc_norm": 0.3223684210526316, + "acc_norm_stderr": 0.03803510248351586 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.018690850273595284, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.018690850273595284 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2872340425531915, + "acc_stderr": 0.026992199173064356, + "acc_norm": 0.2872340425531915, + "acc_norm_stderr": 0.026992199173064356 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3125, + "acc_stderr": 0.043994650575715215, + "acc_norm": 0.3125, + "acc_norm_stderr": 0.043994650575715215 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.01435591196476786, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.01435591196476786 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.22426470588235295, + "acc_stderr": 0.025336848563332386, + "acc_norm": 0.22426470588235295, + "acc_norm_stderr": 0.025336848563332386 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.39591836734693875, + "acc_stderr": 0.03130802899065685, + "acc_norm": 0.39591836734693875, + "acc_norm_stderr": 0.03130802899065685 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.35864978902953587, + "acc_stderr": 0.031219569445301854, + "acc_norm": 0.35864978902953587, + "acc_norm_stderr": 0.031219569445301854 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2685788787483703, + "acc_stderr": 0.011320056629121734, + "acc_norm": 0.2685788787483703, + "acc_norm_stderr": 0.011320056629121734 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.3088235294117647, + "acc_stderr": 0.03242661719827218, + "acc_norm": 0.3088235294117647, + "acc_norm_stderr": 0.03242661719827218 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.3393939393939394, + "acc_stderr": 0.036974422050315967, + "acc_norm": 0.3393939393939394, + "acc_norm_stderr": 0.036974422050315967 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006516, + "mc2": 0.4392204501367092, + "mc2_stderr": 0.01533439619345391 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.27036599763872493, + "acc_stderr": 0.015270152942068405, + "acc_norm": 0.3530106257378985, + "acc_norm_stderr": 0.016430745982427126 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yeen214/test_llama2_7b", + "model_sha": "69a4886f51ed752216cdd7f41a584d14240126f9", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yeen214/test_llama2_ko_7b/result_2023-10-03 06:58:46.json b/yeen214/test_llama2_ko_7b/result_2023-10-03 06:58:46.json new file mode 100644 index 0000000000000000000000000000000000000000..78fb38cbd290a2d6d0a9bd8dbe1fa5f1178b9fee --- /dev/null +++ b/yeen214/test_llama2_ko_7b/result_2023-10-03 06:58:46.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.21416382252559726, + "acc_stderr": 0.011988383205966496, + "acc_norm": 0.257679180887372, + "acc_norm_stderr": 0.012780770562768409 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2524397530372436, + "acc_stderr": 0.004335243434486834, + "acc_norm": 0.25323640709022105, + "acc_norm_stderr": 0.004339764434219064 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.30994152046783624, + "acc_stderr": 0.03546976959393163, + "acc_norm": 0.30994152046783624, + "acc_norm_stderr": 0.03546976959393163 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.22330097087378642, + "acc_stderr": 0.04123553189891431, + "acc_norm": 0.22330097087378642, + "acc_norm_stderr": 0.04123553189891431 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.24265644955300128, + "acc_stderr": 0.015329888940899894, + "acc_norm": 0.24265644955300128, + "acc_norm_stderr": 0.015329888940899894 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.28888888888888886, + "acc_stderr": 0.03915450630414251, + "acc_norm": 0.28888888888888886, + "acc_norm_stderr": 0.03915450630414251 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.2170212765957447, + "acc_stderr": 0.026947483121496228, + "acc_norm": 0.2170212765957447, + "acc_norm_stderr": 0.026947483121496228 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.2289156626506024, + "acc_stderr": 0.03270745277352477, + "acc_norm": 0.2289156626506024, + "acc_norm_stderr": 0.03270745277352477 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2829581993569132, + "acc_stderr": 0.025583062489984824, + "acc_norm": 0.2829581993569132, + "acc_norm_stderr": 0.025583062489984824 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.21973094170403587, + "acc_stderr": 0.027790177064383602, + "acc_norm": 0.21973094170403587, + "acc_norm_stderr": 0.027790177064383602 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.20610687022900764, + "acc_stderr": 0.03547771004159462, + "acc_norm": 0.20610687022900764, + "acc_norm_stderr": 0.03547771004159462 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.25252525252525254, + "acc_stderr": 0.030954055470365914, + "acc_norm": 0.25252525252525254, + "acc_norm_stderr": 0.030954055470365914 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2896551724137931, + "acc_stderr": 0.03780019230438014, + "acc_norm": 0.2896551724137931, + "acc_norm_stderr": 0.03780019230438014 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.04336432707993177, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.04336432707993177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.22268907563025211, + "acc_stderr": 0.027025433498882392, + "acc_norm": 0.22268907563025211, + "acc_norm_stderr": 0.027025433498882392 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.022139081103971545, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.022139081103971545 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.19, + "acc_stderr": 0.03942772444036623, + "acc_norm": 0.19, + "acc_norm_stderr": 0.03942772444036623 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.04414343666854932, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.04414343666854932 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.22167487684729065, + "acc_stderr": 0.029225575892489614, + "acc_norm": 0.22167487684729065, + "acc_norm_stderr": 0.029225575892489614 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25161290322580643, + "acc_stderr": 0.02468597928623996, + "acc_norm": 0.25161290322580643, + "acc_norm_stderr": 0.02468597928623996 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.029343114798094472, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.029343114798094472 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.24528301886792453, + "acc_stderr": 0.026480357179895702, + "acc_norm": 0.24528301886792453, + "acc_norm_stderr": 0.026480357179895702 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.17272727272727273, + "acc_stderr": 0.03620691833929219, + "acc_norm": 0.17272727272727273, + "acc_norm_stderr": 0.03620691833929219 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804724, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804724 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23383084577114427, + "acc_stderr": 0.02992941540834838, + "acc_norm": 0.23383084577114427, + "acc_norm_stderr": 0.02992941540834838 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2658959537572254, + "acc_stderr": 0.03368762932259431, + "acc_norm": 0.2658959537572254, + "acc_norm_stderr": 0.03368762932259431 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.02141168439369418, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.02141168439369418 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.03745554791462457, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.03745554791462457 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2543352601156069, + "acc_stderr": 0.02344582627654554, + "acc_norm": 0.2543352601156069, + "acc_norm_stderr": 0.02344582627654554 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.2331288343558282, + "acc_stderr": 0.0332201579577674, + "acc_norm": 0.2331288343558282, + "acc_norm_stderr": 0.0332201579577674 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2623456790123457, + "acc_stderr": 0.02447722285613511, + "acc_norm": 0.2623456790123457, + "acc_norm_stderr": 0.02447722285613511 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.30569948186528495, + "acc_stderr": 0.033248379397581594, + "acc_norm": 0.30569948186528495, + "acc_norm_stderr": 0.033248379397581594 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748141, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748141 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.21834862385321102, + "acc_stderr": 0.017712600528722727, + "acc_norm": 0.21834862385321102, + "acc_norm_stderr": 0.017712600528722727 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906045, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906045 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.023929155517351298, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.023929155517351298 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.18, + "acc_stderr": 0.038612291966536955, + "acc_norm": 0.18, + "acc_norm_stderr": 0.038612291966536955 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.03984979653302871, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.03984979653302871 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.25, + "acc_stderr": 0.03523807393012047, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03523807393012047 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2630718954248366, + "acc_stderr": 0.017812676542320657, + "acc_norm": 0.2630718954248366, + "acc_norm_stderr": 0.017812676542320657 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090203, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090203 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.30357142857142855, + "acc_stderr": 0.04364226155841044, + "acc_norm": 0.30357142857142855, + "acc_norm_stderr": 0.04364226155841044 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.031280390843298825, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.031280390843298825 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2536312849162011, + "acc_stderr": 0.014551553659369916, + "acc_norm": 0.2536312849162011, + "acc_norm_stderr": 0.014551553659369916 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.19852941176470587, + "acc_stderr": 0.024231013370541087, + "acc_norm": 0.19852941176470587, + "acc_norm_stderr": 0.024231013370541087 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.24897959183673468, + "acc_stderr": 0.02768297952296023, + "acc_norm": 0.24897959183673468, + "acc_norm_stderr": 0.02768297952296023 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2911392405063291, + "acc_stderr": 0.029571601065753374, + "acc_norm": 0.2911392405063291, + "acc_norm_stderr": 0.029571601065753374 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.25358539765319427, + "acc_stderr": 0.011111715336101138, + "acc_norm": 0.25358539765319427, + "acc_norm_stderr": 0.011111715336101138 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.03019028245350195, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.03019028245350195 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.2787878787878788, + "acc_stderr": 0.03501438706296781, + "acc_norm": 0.2787878787878788, + "acc_norm_stderr": 0.03501438706296781 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23623011015911874, + "mc1_stderr": 0.014869755015871112, + "mc2": 0.49817574202268433, + "mc2_stderr": 0.016860322660870557 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.09208972845336481, + "acc_stderr": 0.009941270233798432, + "acc_norm": 0.32231404958677684, + "acc_norm_stderr": 0.016068253615813967 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yeen214/test_llama2_ko_7b", + "model_sha": "45901e1d6ccb22f5ed8aec3f9dd366823fdd1c33", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yeheun/llama-2-koen-13b-v1.2/result_2023-11-09 07:16:51.json b/yeheun/llama-2-koen-13b-v1.2/result_2023-11-09 07:16:51.json new file mode 100644 index 0000000000000000000000000000000000000000..70c77832393dda48c59e6f24f31276c257dbfc2f --- /dev/null +++ b/yeheun/llama-2-koen-13b-v1.2/result_2023-11-09 07:16:51.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3890784982935154, + "acc_stderr": 0.014247309976045607, + "acc_norm": 0.45819112627986347, + "acc_norm_stderr": 0.014560220308714702 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4206333399721171, + "acc_stderr": 0.004926518439372259, + "acc_norm": 0.5676160127464649, + "acc_norm_stderr": 0.004943945069611462 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.49122807017543857, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.49122807017543857, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4563106796116505, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.4563106796116505, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5287356321839081, + "acc_stderr": 0.017850410794380173, + "acc_norm": 0.5287356321839081, + "acc_norm_stderr": 0.017850410794380173 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5111111111111111, + "acc_stderr": 0.043182754919779756, + "acc_norm": 0.5111111111111111, + "acc_norm_stderr": 0.043182754919779756 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421255, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421255 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.40963855421686746, + "acc_stderr": 0.038284011150790206, + "acc_norm": 0.40963855421686746, + "acc_norm_stderr": 0.038284011150790206 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4790996784565916, + "acc_stderr": 0.028373270961069414, + "acc_norm": 0.4790996784565916, + "acc_norm_stderr": 0.028373270961069414 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5022421524663677, + "acc_stderr": 0.03355746535223263, + "acc_norm": 0.5022421524663677, + "acc_norm_stderr": 0.03355746535223263 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3931034482758621, + "acc_stderr": 0.040703290137070705, + "acc_norm": 0.3931034482758621, + "acc_norm_stderr": 0.040703290137070705 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.18627450980392157, + "acc_stderr": 0.03873958714149352, + "acc_norm": 0.18627450980392157, + "acc_norm_stderr": 0.03873958714149352 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4076923076923077, + "acc_stderr": 0.02491524398598784, + "acc_norm": 0.4076923076923077, + "acc_norm_stderr": 0.02491524398598784 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3448275862068966, + "acc_stderr": 0.03344283744280458, + "acc_norm": 0.3448275862068966, + "acc_norm_stderr": 0.03344283744280458 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.47096774193548385, + "acc_stderr": 0.028396016402761005, + "acc_norm": 0.47096774193548385, + "acc_norm_stderr": 0.028396016402761005 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5811965811965812, + "acc_stderr": 0.03232128912157792, + "acc_norm": 0.5811965811965812, + "acc_norm_stderr": 0.03232128912157792 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4490566037735849, + "acc_stderr": 0.030612730713641095, + "acc_norm": 0.4490566037735849, + "acc_norm_stderr": 0.030612730713641095 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2740740740740741, + "acc_stderr": 0.027195934804085626, + "acc_norm": 0.2740740740740741, + "acc_norm_stderr": 0.027195934804085626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2847682119205298, + "acc_stderr": 0.03684881521389023, + "acc_norm": 0.2847682119205298, + "acc_norm_stderr": 0.03684881521389023 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5771144278606966, + "acc_stderr": 0.034932317774212816, + "acc_norm": 0.5771144278606966, + "acc_norm_stderr": 0.034932317774212816 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.023517294335963286, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.023517294335963286 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3472222222222222, + "acc_stderr": 0.039812405437178615, + "acc_norm": 0.3472222222222222, + "acc_norm_stderr": 0.039812405437178615 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138938, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138938 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.0277012284685426, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.0277012284685426 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.45595854922279794, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.45595854922279794, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.040493392977481404, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.040493392977481404 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5045871559633027, + "acc_stderr": 0.021436420955529424, + "acc_norm": 0.5045871559633027, + "acc_norm_stderr": 0.021436420955529424 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.028213504177824093, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.028213504177824093 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968432, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968432 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.039889037033362836, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.039889037033362836 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.01943177567703731, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.01943177567703731 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.28368794326241137, + "acc_stderr": 0.026891709428343957, + "acc_norm": 0.28368794326241137, + "acc_norm_stderr": 0.026891709428343957 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.25892857142857145, + "acc_stderr": 0.041577515398656284, + "acc_norm": 0.25892857142857145, + "acc_norm_stderr": 0.041577515398656284 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2175925925925926, + "acc_stderr": 0.028139689444859672, + "acc_norm": 0.2175925925925926, + "acc_norm_stderr": 0.028139689444859672 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3860294117647059, + "acc_stderr": 0.029573269134411124, + "acc_norm": 0.3860294117647059, + "acc_norm_stderr": 0.029573269134411124 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.0314147080258659, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.0314147080258659 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3050847457627119, + "acc_stderr": 0.011759939618085455, + "acc_norm": 0.3050847457627119, + "acc_norm_stderr": 0.011759939618085455 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.43137254901960786, + "acc_stderr": 0.03476099060501637, + "acc_norm": 0.43137254901960786, + "acc_norm_stderr": 0.03476099060501637 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5575757575757576, + "acc_stderr": 0.03878372113711275, + "acc_norm": 0.5575757575757576, + "acc_norm_stderr": 0.03878372113711275 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2607099143206854, + "mc1_stderr": 0.015368841620766368, + "mc2": 0.4100851120970672, + "mc2_stderr": 0.014797143070922393 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5029515938606848, + "acc_stderr": 0.017190054580194694, + "acc_norm": 0.5914994096812278, + "acc_norm_stderr": 0.016900062879427125 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yeheun/llama-2-koen-13b-v1.2", + "model_sha": "cb9e8ff37d427ab588d666b5c6994498a10084de", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yeheun/llama-2-koen-13b-v1.3/result_2023-11-09 19:30:39.json b/yeheun/llama-2-koen-13b-v1.3/result_2023-11-09 19:30:39.json new file mode 100644 index 0000000000000000000000000000000000000000..fdd5d53de94942b453250786e36356cb68919f62 --- /dev/null +++ b/yeheun/llama-2-koen-13b-v1.3/result_2023-11-09 19:30:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3728668941979522, + "acc_stderr": 0.014131176760131176, + "acc_norm": 0.42406143344709896, + "acc_norm_stderr": 0.014441889627464398 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39852619000199163, + "acc_stderr": 0.004885942040894558, + "acc_norm": 0.5293766182035451, + "acc_norm_stderr": 0.004981161746388229 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.049505043821289195, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.049505043821289195 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4840357598978289, + "acc_stderr": 0.017870847506081734, + "acc_norm": 0.4840357598978289, + "acc_norm_stderr": 0.017870847506081734 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37446808510638296, + "acc_stderr": 0.031639106653672915, + "acc_norm": 0.37446808510638296, + "acc_norm_stderr": 0.031639106653672915 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479636, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479636 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.42443729903536975, + "acc_stderr": 0.028071928247946205, + "acc_norm": 0.42443729903536975, + "acc_norm_stderr": 0.028071928247946205 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4663677130044843, + "acc_stderr": 0.03348180017060306, + "acc_norm": 0.4663677130044843, + "acc_norm_stderr": 0.03348180017060306 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.41414141414141414, + "acc_stderr": 0.03509438348879629, + "acc_norm": 0.41414141414141414, + "acc_norm_stderr": 0.03509438348879629 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3724137931034483, + "acc_stderr": 0.0402873153294756, + "acc_norm": 0.3724137931034483, + "acc_norm_stderr": 0.0402873153294756 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.20588235294117646, + "acc_stderr": 0.04023382273617747, + "acc_norm": 0.20588235294117646, + "acc_norm_stderr": 0.04023382273617747 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413926, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.40512820512820513, + "acc_stderr": 0.024890471769938156, + "acc_norm": 0.40512820512820513, + "acc_norm_stderr": 0.024890471769938156 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.0479372485441102, + "acc_norm": 0.35, + "acc_norm_stderr": 0.0479372485441102 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.37438423645320196, + "acc_stderr": 0.03405155380561952, + "acc_norm": 0.37438423645320196, + "acc_norm_stderr": 0.03405155380561952 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4258064516129032, + "acc_stderr": 0.028129112709165897, + "acc_norm": 0.4258064516129032, + "acc_norm_stderr": 0.028129112709165897 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.594017094017094, + "acc_stderr": 0.03217180182641087, + "acc_norm": 0.594017094017094, + "acc_norm_stderr": 0.03217180182641087 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4, + "acc_stderr": 0.030151134457776285, + "acc_norm": 0.4, + "acc_norm_stderr": 0.030151134457776285 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.04724577405731572, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.04724577405731572 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.27037037037037037, + "acc_stderr": 0.02708037281514566, + "acc_norm": 0.27037037037037037, + "acc_norm_stderr": 0.02708037281514566 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5124378109452736, + "acc_stderr": 0.0353443984853958, + "acc_norm": 0.5124378109452736, + "acc_norm_stderr": 0.0353443984853958 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.037143259063020635, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.037143259063020635 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.022860838309232072, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.022860838309232072 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4595375722543353, + "acc_stderr": 0.02683080599895224, + "acc_norm": 0.4595375722543353, + "acc_norm_stderr": 0.02683080599895224 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4049079754601227, + "acc_stderr": 0.03856672163548913, + "acc_norm": 0.4049079754601227, + "acc_norm_stderr": 0.03856672163548913 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413324, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413324 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41968911917098445, + "acc_stderr": 0.035615873276858834, + "acc_norm": 0.41968911917098445, + "acc_norm_stderr": 0.035615873276858834 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.24561403508771928, + "acc_stderr": 0.04049339297748142, + "acc_norm": 0.24561403508771928, + "acc_norm_stderr": 0.04049339297748142 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.4055045871559633, + "acc_stderr": 0.021050997991896834, + "acc_norm": 0.4055045871559633, + "acc_norm_stderr": 0.021050997991896834 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.0404061017820884, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.0404061017820884 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.027956046165424513, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.027956046165424513 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6033057851239669, + "acc_stderr": 0.04465869780531009, + "acc_norm": 0.6033057851239669, + "acc_norm_stderr": 0.04465869780531009 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3618421052631579, + "acc_stderr": 0.03910525752849724, + "acc_norm": 0.3618421052631579, + "acc_norm_stderr": 0.03910525752849724 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.32516339869281047, + "acc_stderr": 0.018950886770806315, + "acc_norm": 0.32516339869281047, + "acc_norm_stderr": 0.018950886770806315 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.29432624113475175, + "acc_stderr": 0.02718712701150381, + "acc_norm": 0.29432624113475175, + "acc_norm_stderr": 0.02718712701150381 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.03894641120044793, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.03894641120044793 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.03099866630456052, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.03099866630456052 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.33455882352941174, + "acc_stderr": 0.028661996202335314, + "acc_norm": 0.33455882352941174, + "acc_norm_stderr": 0.028661996202335314 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49387755102040815, + "acc_stderr": 0.03200682020163908, + "acc_norm": 0.49387755102040815, + "acc_norm_stderr": 0.03200682020163908 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.4767932489451477, + "acc_stderr": 0.03251215201141018, + "acc_norm": 0.4767932489451477, + "acc_norm_stderr": 0.03251215201141018 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.30378096479791394, + "acc_stderr": 0.011745787720472462, + "acc_norm": 0.30378096479791394, + "acc_norm_stderr": 0.011745787720472462 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.35784313725490197, + "acc_stderr": 0.03364487286088299, + "acc_norm": 0.35784313725490197, + "acc_norm_stderr": 0.03364487286088299 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.03898531605579419, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.03898531605579419 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22276621787025705, + "mc1_stderr": 0.014566506961396754, + "mc2": 0.38125667821834136, + "mc2_stderr": 0.015042897939213158 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.38488783943329397, + "acc_stderr": 0.016728579701498672, + "acc_norm": 0.4675324675324675, + "acc_norm_stderr": 0.017154073716682858 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yeheun/llama-2-koen-13b-v1.3", + "model_sha": "690d4d31210518d018e6be9b2a099bb7b2c60af5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yhkim9362/gemma-en-ko-7b-v0.1/result_2024-04-12 04:30:39.json b/yhkim9362/gemma-en-ko-7b-v0.1/result_2024-04-12 04:30:39.json new file mode 100644 index 0000000000000000000000000000000000000000..d442fc30ea0529ba144b43a9a03dc3b1a95d075b --- /dev/null +++ b/yhkim9362/gemma-en-ko-7b-v0.1/result_2024-04-12 04:30:39.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20392491467576793, + "acc_stderr": 0.011774262478702252, + "acc_norm": 0.24488054607508533, + "acc_norm_stderr": 0.012566273985131356 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2613025293766182, + "acc_stderr": 0.0043844652190707665, + "acc_norm": 0.26399123680541725, + "acc_norm_stderr": 0.0043989372250384145 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.035650796707083106, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.035650796707083106 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2554278416347382, + "acc_stderr": 0.015594955384455765, + "acc_norm": 0.2554278416347382, + "acc_norm_stderr": 0.015594955384455765 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.02655698211783875, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.02655698211783875 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632938, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632938 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.03208779558786751, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.03208779558786751 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2815126050420168, + "acc_stderr": 0.029213549414372153, + "acc_norm": 0.2815126050420168, + "acc_norm_stderr": 0.029213549414372153 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.024321738484602364, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.024321738484602364 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.1724137931034483, + "acc_stderr": 0.02657767218303658, + "acc_norm": 0.1724137931034483, + "acc_norm_stderr": 0.02657767218303658 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.0255606047210229, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.0255606047210229 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.20754716981132076, + "acc_stderr": 0.02495991802891127, + "acc_norm": 0.20754716981132076, + "acc_norm_stderr": 0.02495991802891127 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2, + "acc_stderr": 0.03831305140884603, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03831305140884603 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507384, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507384 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473836, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473836 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014666, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014666 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.03414014007044036, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.03414014007044036 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.021628077380196134, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.021628077380196134 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.03291099578615771, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.03291099578615771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.024383665531035457, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.024383665531035457 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.03423465100104283, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.03423465100104283 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28623853211009176, + "acc_stderr": 0.01937943662891999, + "acc_norm": 0.28623853211009176, + "acc_norm_stderr": 0.01937943662891999 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.02428861946604611, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.02428861946604611 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.03984979653302872, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.03984979653302872 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180844, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180844 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.01424263007057489, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.01424263007057489 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.0314147080258659, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.0314147080258659 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25316455696202533, + "acc_stderr": 0.028304657943035293, + "acc_norm": 0.25316455696202533, + "acc_norm_stderr": 0.028304657943035293 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24445893089960888, + "acc_stderr": 0.010976425013113895, + "acc_norm": 0.24445893089960888, + "acc_norm_stderr": 0.010976425013113895 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.03192271569548299, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.03192271569548299 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148128, + "mc2": 0.4088746013896439, + "mc2_stderr": 0.01649380732972585 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.15348288075560804, + "acc_stderr": 0.012392606565325115, + "acc_norm": 0.1912632821723731, + "acc_norm_stderr": 0.013521790445859336 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yhkim9362/gemma-en-ko-7b-v0.1", + "model_sha": "619388a424c790cd418a2c628dac8a6712dc43ed", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yhkim9362/gemma-en-ko-7b-v0.2/result_2024-04-19 08:21:17.json b/yhkim9362/gemma-en-ko-7b-v0.2/result_2024-04-19 08:21:17.json new file mode 100644 index 0000000000000000000000000000000000000000..224deb67c85934e6c20da68dc9c0cd7c21d44777 --- /dev/null +++ b/yhkim9362/gemma-en-ko-7b-v0.2/result_2024-04-19 08:21:17.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.20392491467576793, + "acc_stderr": 0.011774262478702252, + "acc_norm": 0.24488054607508533, + "acc_norm_stderr": 0.012566273985131356 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2613025293766182, + "acc_stderr": 0.0043844652190707665, + "acc_norm": 0.26399123680541725, + "acc_norm_stderr": 0.0043989372250384145 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.035650796707083106, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.035650796707083106 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.17475728155339806, + "acc_stderr": 0.037601780060266196, + "acc_norm": 0.17475728155339806, + "acc_norm_stderr": 0.037601780060266196 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.2554278416347382, + "acc_stderr": 0.015594955384455765, + "acc_norm": 0.2554278416347382, + "acc_norm_stderr": 0.015594955384455765 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.23703703703703705, + "acc_stderr": 0.03673731683969506, + "acc_norm": 0.23703703703703705, + "acc_norm_stderr": 0.03673731683969506 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.20851063829787234, + "acc_stderr": 0.02655698211783875, + "acc_norm": 0.20851063829787234, + "acc_norm_stderr": 0.02655698211783875 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.21686746987951808, + "acc_stderr": 0.03208284450356365, + "acc_norm": 0.21686746987951808, + "acc_norm_stderr": 0.03208284450356365 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.28938906752411575, + "acc_stderr": 0.025755865922632938, + "acc_norm": 0.28938906752411575, + "acc_norm_stderr": 0.025755865922632938 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.3273542600896861, + "acc_stderr": 0.03149384670994131, + "acc_norm": 0.3273542600896861, + "acc_norm_stderr": 0.03149384670994131 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.2828282828282828, + "acc_stderr": 0.03208779558786751, + "acc_norm": 0.2828282828282828, + "acc_norm_stderr": 0.03208779558786751 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2413793103448276, + "acc_stderr": 0.03565998174135302, + "acc_norm": 0.2413793103448276, + "acc_norm_stderr": 0.03565998174135302 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237654, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237654 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.2815126050420168, + "acc_stderr": 0.029213549414372153, + "acc_norm": 0.2815126050420168, + "acc_norm_stderr": 0.029213549414372153 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.358974358974359, + "acc_stderr": 0.024321738484602364, + "acc_norm": 0.358974358974359, + "acc_norm_stderr": 0.024321738484602364 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.18, + "acc_stderr": 0.03861229196653694, + "acc_norm": 0.18, + "acc_norm_stderr": 0.03861229196653694 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.23148148148148148, + "acc_stderr": 0.04077494709252626, + "acc_norm": 0.23148148148148148, + "acc_norm_stderr": 0.04077494709252626 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.1724137931034483, + "acc_stderr": 0.02657767218303658, + "acc_norm": 0.1724137931034483, + "acc_norm_stderr": 0.02657767218303658 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.2806451612903226, + "acc_stderr": 0.0255606047210229, + "acc_norm": 0.2806451612903226, + "acc_norm_stderr": 0.0255606047210229 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.19658119658119658, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.19658119658119658, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.20754716981132076, + "acc_stderr": 0.02495991802891127, + "acc_norm": 0.20754716981132076, + "acc_norm_stderr": 0.02495991802891127 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.2, + "acc_stderr": 0.03831305140884603, + "acc_norm": 0.2, + "acc_norm_stderr": 0.03831305140884603 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.02696242432507384, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.02696242432507384 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2052980132450331, + "acc_stderr": 0.03297986648473836, + "acc_norm": 0.2052980132450331, + "acc_norm_stderr": 0.03297986648473836 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.24378109452736318, + "acc_stderr": 0.030360490154014666, + "acc_norm": 0.24378109452736318, + "acc_norm_stderr": 0.030360490154014666 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.2774566473988439, + "acc_stderr": 0.03414014007044036, + "acc_norm": 0.2774566473988439, + "acc_norm_stderr": 0.03414014007044036 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2638888888888889, + "acc_stderr": 0.03685651095897532, + "acc_norm": 0.2638888888888889, + "acc_norm_stderr": 0.03685651095897532 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.2, + "acc_stderr": 0.04020151261036845, + "acc_norm": 0.2, + "acc_norm_stderr": 0.04020151261036845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.2023121387283237, + "acc_stderr": 0.021628077380196134, + "acc_norm": 0.2023121387283237, + "acc_norm_stderr": 0.021628077380196134 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.22699386503067484, + "acc_stderr": 0.03291099578615771, + "acc_norm": 0.22699386503067484, + "acc_norm_stderr": 0.03291099578615771 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.25925925925925924, + "acc_stderr": 0.024383665531035457, + "acc_norm": 0.25925925925925924, + "acc_norm_stderr": 0.024383665531035457 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.34196891191709844, + "acc_stderr": 0.03423465100104283, + "acc_norm": 0.34196891191709844, + "acc_norm_stderr": 0.03423465100104283 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.28623853211009176, + "acc_stderr": 0.01937943662891999, + "acc_norm": 0.28623853211009176, + "acc_norm_stderr": 0.01937943662891999 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.23015873015873015, + "acc_stderr": 0.037649508797906066, + "acc_norm": 0.23015873015873015, + "acc_norm_stderr": 0.037649508797906066 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.02428861946604611, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.02428861946604611 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.256198347107438, + "acc_stderr": 0.03984979653302872, + "acc_norm": 0.256198347107438, + "acc_norm_stderr": 0.03984979653302872 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.03317672787533157, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.03317672787533157 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2173202614379085, + "acc_stderr": 0.016684820929148598, + "acc_norm": 0.2173202614379085, + "acc_norm_stderr": 0.016684820929148598 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.25886524822695034, + "acc_stderr": 0.026129572527180844, + "acc_norm": 0.25886524822695034, + "acc_norm_stderr": 0.026129572527180844 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.23214285714285715, + "acc_stderr": 0.04007341809755807, + "acc_norm": 0.23214285714285715, + "acc_norm_stderr": 0.04007341809755807 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.0340470532865388, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.0340470532865388 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23798882681564246, + "acc_stderr": 0.01424263007057489, + "acc_norm": 0.23798882681564246, + "acc_norm_stderr": 0.01424263007057489 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252605, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121593, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121593 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40408163265306124, + "acc_stderr": 0.0314147080258659, + "acc_norm": 0.40408163265306124, + "acc_norm_stderr": 0.0314147080258659 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.25316455696202533, + "acc_stderr": 0.028304657943035293, + "acc_norm": 0.25316455696202533, + "acc_norm_stderr": 0.028304657943035293 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.24445893089960888, + "acc_stderr": 0.010976425013113895, + "acc_norm": 0.24445893089960888, + "acc_norm_stderr": 0.010976425013113895 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.25, + "acc_stderr": 0.03039153369274154, + "acc_norm": 0.25, + "acc_norm_stderr": 0.03039153369274154 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.21212121212121213, + "acc_stderr": 0.03192271569548299, + "acc_norm": 0.21212121212121213, + "acc_norm_stderr": 0.03192271569548299 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.24969400244798043, + "mc1_stderr": 0.015152286907148128, + "mc2": 0.4088746013896439, + "mc2_stderr": 0.01649380732972585 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.15348288075560804, + "acc_stderr": 0.012392606565325115, + "acc_norm": 0.1912632821723731, + "acc_norm_stderr": 0.013521790445859336 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yhkim9362/gemma-en-ko-7b-v0.2", + "model_sha": "fca2d9088f37829ab2a5392e8dd0ff1abfcf8087", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/youjunhyeok/Llama-3-8B-slerp-262k-sft-lora-ko-v2/result_2024-06-12 02:12:05.json b/youjunhyeok/Llama-3-8B-slerp-262k-sft-lora-ko-v2/result_2024-06-12 02:12:05.json new file mode 100644 index 0000000000000000000000000000000000000000..2cd6cf8d21d91d3de54b772924ba07cd69ee4987 --- /dev/null +++ b/youjunhyeok/Llama-3-8B-slerp-262k-sft-lora-ko-v2/result_2024-06-12 02:12:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3822525597269625, + "acc_stderr": 0.014200454049979275, + "acc_norm": 0.4232081911262799, + "acc_norm_stderr": 0.014438036220848018 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39822744473212507, + "acc_stderr": 0.004885323175701676, + "acc_norm": 0.5278828918542123, + "acc_norm_stderr": 0.004982016702445962 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4495530012771392, + "acc_stderr": 0.017788725283507337, + "acc_norm": 0.4495530012771392, + "acc_norm_stderr": 0.017788725283507337 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3851851851851852, + "acc_stderr": 0.042039210401562783, + "acc_norm": 0.3851851851851852, + "acc_norm_stderr": 0.042039210401562783 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4723404255319149, + "acc_stderr": 0.03263597118409769, + "acc_norm": 0.4723404255319149, + "acc_norm_stderr": 0.03263597118409769 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611548, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611548 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5498392282958199, + "acc_stderr": 0.02825666072336018, + "acc_norm": 0.5498392282958199, + "acc_norm_stderr": 0.02825666072336018 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47533632286995514, + "acc_stderr": 0.03351695167652628, + "acc_norm": 0.47533632286995514, + "acc_norm_stderr": 0.03351695167652628 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4351145038167939, + "acc_stderr": 0.04348208051644858, + "acc_norm": 0.4351145038167939, + "acc_norm_stderr": 0.04348208051644858 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.035360859475294805, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.035360859475294805 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4896551724137931, + "acc_stderr": 0.041657747757287644, + "acc_norm": 0.4896551724137931, + "acc_norm_stderr": 0.041657747757287644 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49743589743589745, + "acc_stderr": 0.025350672979412205, + "acc_norm": 0.49743589743589745, + "acc_norm_stderr": 0.025350672979412205 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.52, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.52, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.035025446508458714, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.035025446508458714 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5451612903225806, + "acc_stderr": 0.02832774309156108, + "acc_norm": 0.5451612903225806, + "acc_norm_stderr": 0.02832774309156108 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.782051282051282, + "acc_stderr": 0.02704685763071667, + "acc_norm": 0.782051282051282, + "acc_norm_stderr": 0.02704685763071667 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.32450331125827814, + "acc_stderr": 0.038227469376587525, + "acc_norm": 0.32450331125827814, + "acc_norm_stderr": 0.038227469376587525 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.0379401267469703, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.0379401267469703 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3544973544973545, + "acc_stderr": 0.024636830602841997, + "acc_norm": 0.3544973544973545, + "acc_norm_stderr": 0.024636830602841997 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4583333333333333, + "acc_stderr": 0.041666666666666644, + "acc_norm": 0.4583333333333333, + "acc_norm_stderr": 0.041666666666666644 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.69, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.69, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179662, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179662 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5493827160493827, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.5493827160493827, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6373056994818653, + "acc_stderr": 0.03469713791704372, + "acc_norm": 0.6373056994818653, + "acc_norm_stderr": 0.03469713791704372 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.04372748290278008, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.04372748290278008 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5798165137614679, + "acc_stderr": 0.021162420048273515, + "acc_norm": 0.5798165137614679, + "acc_norm_stderr": 0.021162420048273515 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.028580341065138296, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.028580341065138296 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212093, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212093 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5131578947368421, + "acc_stderr": 0.04067533136309172, + "acc_norm": 0.5131578947368421, + "acc_norm_stderr": 0.04067533136309172 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.46568627450980393, + "acc_stderr": 0.020180144843307293, + "acc_norm": 0.46568627450980393, + "acc_norm_stderr": 0.020180144843307293 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2994413407821229, + "acc_stderr": 0.01531825774597671, + "acc_norm": 0.2994413407821229, + "acc_norm_stderr": 0.01531825774597671 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3786764705882353, + "acc_stderr": 0.029465133639776132, + "acc_norm": 0.3786764705882353, + "acc_norm_stderr": 0.029465133639776132 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5836734693877551, + "acc_stderr": 0.03155782816556165, + "acc_norm": 0.5836734693877551, + "acc_norm_stderr": 0.03155782816556165 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.031137304297185798, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.031137304297185798 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34810951760104303, + "acc_stderr": 0.0121667389936982, + "acc_norm": 0.34810951760104303, + "acc_norm_stderr": 0.0121667389936982 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6176470588235294, + "acc_stderr": 0.0341078533890472, + "acc_norm": 0.6176470588235294, + "acc_norm_stderr": 0.0341078533890472 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.037563357751878954, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.037563357751878954 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2460220318237454, + "mc1_stderr": 0.015077219200662564, + "mc2": 0.4065509786095452, + "mc2_stderr": 0.01520733917837415 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3789846517119244, + "acc_stderr": 0.016679260684229293, + "acc_norm": 0.4935064935064935, + "acc_norm_stderr": 0.017188904359077307 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "youjunhyeok/Llama-3-8B-slerp-262k-sft-lora-ko-v2", + "model_sha": "520600efa8624bf1fadf3df272181a3d39ae852f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/youjunhyeok/Llama-3-8B-slerp-262k-sft-lora-ko/result_2024-05-23 00:08:57.json b/youjunhyeok/Llama-3-8B-slerp-262k-sft-lora-ko/result_2024-05-23 00:08:57.json new file mode 100644 index 0000000000000000000000000000000000000000..5823e996210c0be4f75d1e070fec46f4a49f39e8 --- /dev/null +++ b/youjunhyeok/Llama-3-8B-slerp-262k-sft-lora-ko/result_2024-05-23 00:08:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3395904436860068, + "acc_stderr": 0.01383903976282016, + "acc_norm": 0.39505119453924914, + "acc_norm_stderr": 0.014285898292938172 + }, + "harness|ko_hellaswag|10": { + "acc": 0.36207926707827126, + "acc_stderr": 0.004796193584930084, + "acc_norm": 0.476000796654053, + "acc_norm_stderr": 0.004984030250507298 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.42105263157894735, + "acc_stderr": 0.037867207062342145, + "acc_norm": 0.42105263157894735, + "acc_norm_stderr": 0.037867207062342145 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.4077669902912621, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.4077669902912621, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.37292464878671777, + "acc_stderr": 0.017292868269453907, + "acc_norm": 0.37292464878671777, + "acc_norm_stderr": 0.017292868269453907 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.04024778401977111, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.04024778401977111 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.0440844002276808, + "acc_norm": 0.26, + "acc_norm_stderr": 0.0440844002276808 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3446808510638298, + "acc_stderr": 0.03106898596312215, + "acc_norm": 0.3446808510638298, + "acc_norm_stderr": 0.03106898596312215 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3313253012048193, + "acc_stderr": 0.03664314777288087, + "acc_norm": 0.3313253012048193, + "acc_norm_stderr": 0.03664314777288087 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4662379421221865, + "acc_stderr": 0.02833327710956279, + "acc_norm": 0.4662379421221865, + "acc_norm_stderr": 0.02833327710956279 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.39461883408071746, + "acc_stderr": 0.03280400504755291, + "acc_norm": 0.39461883408071746, + "acc_norm_stderr": 0.03280400504755291 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4898989898989899, + "acc_stderr": 0.03561625488673745, + "acc_norm": 0.4898989898989899, + "acc_norm_stderr": 0.03561625488673745 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5310344827586206, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.5310344827586206, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3137254901960784, + "acc_stderr": 0.04617034827006717, + "acc_norm": 0.3137254901960784, + "acc_norm_stderr": 0.04617034827006717 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252604, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252604 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4236453201970443, + "acc_stderr": 0.034767257476490364, + "acc_norm": 0.4236453201970443, + "acc_norm_stderr": 0.034767257476490364 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.028422687404312107, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.028422687404312107 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6666666666666666, + "acc_stderr": 0.030882736974138656, + "acc_norm": 0.6666666666666666, + "acc_norm_stderr": 0.030882736974138656 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.41132075471698115, + "acc_stderr": 0.030285009259009805, + "acc_norm": 0.41132075471698115, + "acc_norm_stderr": 0.030285009259009805 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.3090909090909091, + "acc_stderr": 0.044262946482000985, + "acc_norm": 0.3090909090909091, + "acc_norm_stderr": 0.044262946482000985 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253252, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253252 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3443708609271523, + "acc_stderr": 0.038796870240733264, + "acc_norm": 0.3443708609271523, + "acc_norm_stderr": 0.038796870240733264 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6019900497512438, + "acc_stderr": 0.034611994290400135, + "acc_norm": 0.6019900497512438, + "acc_norm_stderr": 0.034611994290400135 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4508670520231214, + "acc_stderr": 0.03794012674697029, + "acc_norm": 0.4508670520231214, + "acc_norm_stderr": 0.03794012674697029 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.35978835978835977, + "acc_stderr": 0.024718075944129277, + "acc_norm": 0.35978835978835977, + "acc_norm_stderr": 0.024718075944129277 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723369, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723369 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562426, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562426 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.43352601156069365, + "acc_stderr": 0.026680134761679217, + "acc_norm": 0.43352601156069365, + "acc_norm_stderr": 0.026680134761679217 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.03881891213334383, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.03881891213334383 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49382716049382713, + "acc_stderr": 0.027818623962583302, + "acc_norm": 0.49382716049382713, + "acc_norm_stderr": 0.027818623962583302 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.46632124352331605, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.46632124352331605, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.02141822475426464, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.02141822475426464 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.042407993275749255, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.042407993275749255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5163398692810458, + "acc_stderr": 0.02861462475280544, + "acc_norm": 0.5163398692810458, + "acc_norm_stderr": 0.02861462475280544 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.040633027314866704, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.040633027314866704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3839869281045752, + "acc_stderr": 0.019675808135281515, + "acc_norm": 0.3839869281045752, + "acc_norm_stderr": 0.019675808135281515 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.027281608344469414, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.027281608344469414 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3482142857142857, + "acc_stderr": 0.04521829902833587, + "acc_norm": 0.3482142857142857, + "acc_norm_stderr": 0.04521829902833587 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35648148148148145, + "acc_stderr": 0.032664783315272714, + "acc_norm": 0.35648148148148145, + "acc_norm_stderr": 0.032664783315272714 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2871508379888268, + "acc_stderr": 0.015131608849963762, + "acc_norm": 0.2871508379888268, + "acc_norm_stderr": 0.015131608849963762 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.34191176470588236, + "acc_stderr": 0.028814722422254177, + "acc_norm": 0.34191176470588236, + "acc_norm_stderr": 0.028814722422254177 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5232067510548524, + "acc_stderr": 0.032512152011410174, + "acc_norm": 0.5232067510548524, + "acc_norm_stderr": 0.032512152011410174 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3246414602346806, + "acc_stderr": 0.011959089388530025, + "acc_norm": 0.3246414602346806, + "acc_norm_stderr": 0.011959089388530025 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.03903698647748441, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.03903698647748441 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.25091799265605874, + "mc1_stderr": 0.015176985027707689, + "mc2": 0.4244980182845243, + "mc2_stderr": 0.015249741624138832 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4309327036599764, + "acc_stderr": 0.01702555819604314, + "acc_norm": 0.49586776859504134, + "acc_norm_stderr": 0.01718976703213082 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "youjunhyeok/Llama-3-8B-slerp-262k-sft-lora-ko", + "model_sha": "f63e9fd9457334a4da279aaac132ecd40ce52436", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/youjunhyeok/llama3-8B-dpo-sft-v1/result_2024-06-12 02:12:16.json b/youjunhyeok/llama3-8B-dpo-sft-v1/result_2024-06-12 02:12:16.json new file mode 100644 index 0000000000000000000000000000000000000000..e68142e35e18cb0acefee0d0c31f5eea28365c26 --- /dev/null +++ b/youjunhyeok/llama3-8B-dpo-sft-v1/result_2024-06-12 02:12:16.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3796928327645051, + "acc_stderr": 0.014182119866974876, + "acc_norm": 0.44112627986348124, + "acc_norm_stderr": 0.014509747749064666 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3813981278629755, + "acc_stderr": 0.0048473726701346335, + "acc_norm": 0.49960167297351127, + "acc_norm_stderr": 0.004989779828043841 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.543859649122807, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.543859649122807, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5048543689320388, + "acc_stderr": 0.04950504382128921, + "acc_norm": 0.5048543689320388, + "acc_norm_stderr": 0.04950504382128921 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.49169859514687103, + "acc_stderr": 0.017877498991072008, + "acc_norm": 0.49169859514687103, + "acc_norm_stderr": 0.017877498991072008 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.37872340425531914, + "acc_stderr": 0.031709956060406545, + "acc_norm": 0.37872340425531914, + "acc_norm_stderr": 0.031709956060406545 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3795180722891566, + "acc_stderr": 0.03777798822748018, + "acc_norm": 0.3795180722891566, + "acc_norm_stderr": 0.03777798822748018 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4340836012861736, + "acc_stderr": 0.0281502322445356, + "acc_norm": 0.4340836012861736, + "acc_norm_stderr": 0.0281502322445356 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.44274809160305345, + "acc_stderr": 0.04356447202665069, + "acc_norm": 0.44274809160305345, + "acc_norm_stderr": 0.04356447202665069 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.42, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.4595959595959596, + "acc_stderr": 0.03550702465131342, + "acc_norm": 0.4595959595959596, + "acc_norm_stderr": 0.03550702465131342 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.503448275862069, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.503448275862069, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.39915966386554624, + "acc_stderr": 0.03181110032413926, + "acc_norm": 0.39915966386554624, + "acc_norm_stderr": 0.03181110032413926 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.39487179487179486, + "acc_stderr": 0.02478431694215637, + "acc_norm": 0.39487179487179486, + "acc_norm_stderr": 0.02478431694215637 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.03422398565657549, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.03422398565657549 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4032258064516129, + "acc_stderr": 0.027906150826041146, + "acc_norm": 0.4032258064516129, + "acc_norm_stderr": 0.027906150826041146 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6965811965811965, + "acc_stderr": 0.030118210106942652, + "acc_norm": 0.6965811965811965, + "acc_norm_stderr": 0.030118210106942652 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4377358490566038, + "acc_stderr": 0.030533338430467516, + "acc_norm": 0.4377358490566038, + "acc_norm_stderr": 0.030533338430467516 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.41818181818181815, + "acc_stderr": 0.047245774057315705, + "acc_norm": 0.41818181818181815, + "acc_norm_stderr": 0.047245774057315705 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.028406533090608463, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.028406533090608463 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887249, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887249 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983056, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.04122728707651282, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.04122728707651282 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.026864624366756643, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.026864624366756643 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.43558282208588955, + "acc_stderr": 0.03895632464138937, + "acc_norm": 0.43558282208588955, + "acc_norm_stderr": 0.03895632464138937 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.027648477877413317, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.027648477877413317 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.533678756476684, + "acc_stderr": 0.03600244069867178, + "acc_norm": 0.533678756476684, + "acc_norm_stderr": 0.03600244069867178 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2894736842105263, + "acc_stderr": 0.04266339443159394, + "acc_norm": 0.2894736842105263, + "acc_norm_stderr": 0.04266339443159394 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.47889908256880737, + "acc_stderr": 0.02141822475426464, + "acc_norm": 0.47889908256880737, + "acc_norm_stderr": 0.02141822475426464 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.04306241259127153, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.04306241259127153 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45098039215686275, + "acc_stderr": 0.028491993586171563, + "acc_norm": 0.45098039215686275, + "acc_norm_stderr": 0.028491993586171563 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624502, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624502 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.03894734487013316, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.03894734487013316 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.019691459052354154, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.019691459052354154 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.30851063829787234, + "acc_stderr": 0.027553366165101362, + "acc_norm": 0.30851063829787234, + "acc_norm_stderr": 0.027553366165101362 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.48214285714285715, + "acc_stderr": 0.047427623612430116, + "acc_norm": 0.48214285714285715, + "acc_norm_stderr": 0.047427623612430116 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.28703703703703703, + "acc_stderr": 0.030851992993257013, + "acc_norm": 0.28703703703703703, + "acc_norm_stderr": 0.030851992993257013 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.293854748603352, + "acc_stderr": 0.015235075776719613, + "acc_norm": 0.293854748603352, + "acc_norm_stderr": 0.015235075776719613 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.2610294117647059, + "acc_stderr": 0.026679252270103117, + "acc_norm": 0.2610294117647059, + "acc_norm_stderr": 0.026679252270103117 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.031591887529658504, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.031591887529658504 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2940026075619296, + "acc_stderr": 0.011636062953698607, + "acc_norm": 0.2940026075619296, + "acc_norm_stderr": 0.011636062953698607 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.03503235296367992, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.03503235296367992 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6, + "acc_stderr": 0.03825460278380026, + "acc_norm": 0.6, + "acc_norm_stderr": 0.03825460278380026 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.22766217870257038, + "mc1_stderr": 0.01467925503211107, + "mc2": 0.3870677617086723, + "mc2_stderr": 0.014892981255396504 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4380165289256198, + "acc_stderr": 0.017057753702160287, + "acc_norm": 0.4887839433293979, + "acc_norm_stderr": 0.017186028469489287 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "youjunhyeok/llama3-8B-dpo-sft-v1", + "model_sha": "d97843a3755a3289811681f5420f484075240d7b", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/youjunhyeok/llama3-8b-ko-sft-dpo-v1/result_2024-06-14 07:51:31.json b/youjunhyeok/llama3-8b-ko-sft-dpo-v1/result_2024-06-14 07:51:31.json new file mode 100644 index 0000000000000000000000000000000000000000..13648782b68d1d0d4d8d6023f78d749bf0dcc6c3 --- /dev/null +++ b/youjunhyeok/llama3-8b-ko-sft-dpo-v1/result_2024-06-14 07:51:31.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41467576791808874, + "acc_stderr": 0.01439707056440917, + "acc_norm": 0.4539249146757679, + "acc_norm_stderr": 0.014549221105171864 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3917546305516829, + "acc_stderr": 0.004871447106554935, + "acc_norm": 0.5202150965943039, + "acc_norm_stderr": 0.0049857015938980015 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4502923976608187, + "acc_stderr": 0.038158273659132366, + "acc_norm": 0.4502923976608187, + "acc_norm_stderr": 0.038158273659132366 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5728155339805825, + "acc_stderr": 0.048979577377811674, + "acc_norm": 0.5728155339805825, + "acc_norm_stderr": 0.048979577377811674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4393358876117497, + "acc_stderr": 0.017747874245683616, + "acc_norm": 0.4393358876117497, + "acc_norm_stderr": 0.017747874245683616 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.04094376269996794, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.04094376269996794 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224468, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224468 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3614457831325301, + "acc_stderr": 0.0374005938202932, + "acc_norm": 0.3614457831325301, + "acc_norm_stderr": 0.0374005938202932 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5434083601286174, + "acc_stderr": 0.028290869054197598, + "acc_norm": 0.5434083601286174, + "acc_norm_stderr": 0.028290869054197598 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.47085201793721976, + "acc_stderr": 0.03350073248773403, + "acc_norm": 0.47085201793721976, + "acc_norm_stderr": 0.03350073248773403 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.494949494949495, + "acc_stderr": 0.035621707606254015, + "acc_norm": 0.494949494949495, + "acc_norm_stderr": 0.035621707606254015 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5793103448275863, + "acc_stderr": 0.0411391498118926, + "acc_norm": 0.5793103448275863, + "acc_norm_stderr": 0.0411391498118926 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929777, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5630252100840336, + "acc_stderr": 0.03221943636566196, + "acc_norm": 0.5630252100840336, + "acc_norm_stderr": 0.03221943636566196 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.025339003010106522, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.025339003010106522 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04803752235190192, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04803752235190192 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4433497536945813, + "acc_stderr": 0.03495334582162934, + "acc_norm": 0.4433497536945813, + "acc_norm_stderr": 0.03495334582162934 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5387096774193548, + "acc_stderr": 0.028358634859836942, + "acc_norm": 0.5387096774193548, + "acc_norm_stderr": 0.028358634859836942 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7094017094017094, + "acc_stderr": 0.029745048572674064, + "acc_norm": 0.7094017094017094, + "acc_norm_stderr": 0.029745048572674064 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.42727272727272725, + "acc_stderr": 0.04738198703545483, + "acc_norm": 0.42727272727272725, + "acc_norm_stderr": 0.04738198703545483 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251976, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251976 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3973509933774834, + "acc_stderr": 0.03995524007681681, + "acc_norm": 0.3973509933774834, + "acc_norm_stderr": 0.03995524007681681 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3968253968253968, + "acc_stderr": 0.02519710107424649, + "acc_norm": 0.3968253968253968, + "acc_norm_stderr": 0.02519710107424649 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723369, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723369 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.523121387283237, + "acc_stderr": 0.026890297881303118, + "acc_norm": 0.523121387283237, + "acc_norm_stderr": 0.026890297881303118 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5493827160493827, + "acc_stderr": 0.027684721415656192, + "acc_norm": 0.5493827160493827, + "acc_norm_stderr": 0.027684721415656192 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5699481865284974, + "acc_stderr": 0.03572954333144808, + "acc_norm": 0.5699481865284974, + "acc_norm_stderr": 0.03572954333144808 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5688073394495413, + "acc_stderr": 0.021233365030319563, + "acc_norm": 0.5688073394495413, + "acc_norm_stderr": 0.021233365030319563 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.40476190476190477, + "acc_stderr": 0.043902592653775614, + "acc_norm": 0.40476190476190477, + "acc_norm_stderr": 0.043902592653775614 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089775, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089775 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6942148760330579, + "acc_stderr": 0.042059539338841226, + "acc_norm": 0.6942148760330579, + "acc_norm_stderr": 0.042059539338841226 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.434640522875817, + "acc_stderr": 0.020054269200726452, + "acc_norm": 0.434640522875817, + "acc_norm_stderr": 0.020054269200726452 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.027807990141320203, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.027807990141320203 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04547960999764376, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04547960999764376 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2748603351955307, + "acc_stderr": 0.014931316703220518, + "acc_norm": 0.2748603351955307, + "acc_norm_stderr": 0.014931316703220518 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.0498887651569859, + "acc_norm": 0.44, + "acc_norm_stderr": 0.0498887651569859 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001974, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001974 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.39705882352941174, + "acc_stderr": 0.029722152099280065, + "acc_norm": 0.39705882352941174, + "acc_norm_stderr": 0.029722152099280065 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6244897959183674, + "acc_stderr": 0.031001209039894843, + "acc_norm": 0.6244897959183674, + "acc_norm_stderr": 0.031001209039894843 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.0315918875296585, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.0315918875296585 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36571056062581486, + "acc_stderr": 0.012301028188840565, + "acc_norm": 0.36571056062581486, + "acc_norm_stderr": 0.012301028188840565 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5784313725490197, + "acc_stderr": 0.034658681963807614, + "acc_norm": 0.5784313725490197, + "acc_norm_stderr": 0.034658681963807614 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165633, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165633 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27906976744186046, + "mc1_stderr": 0.01570210709062789, + "mc2": 0.4500297227129741, + "mc2_stderr": 0.015224851129355222 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46989374262101535, + "acc_stderr": 0.01715916359017022, + "acc_norm": 0.5041322314049587, + "acc_norm_stderr": 0.017189767032130824 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "youjunhyeok/llama3-8b-ko-sft-dpo-v1", + "model_sha": "d3b3316168dd58793d615659448575ec3d6bfc17", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/youjunhyeok/llama3-8b-ko-sft-v1/result_2024-06-19 01:04:27.json b/youjunhyeok/llama3-8b-ko-sft-v1/result_2024-06-19 01:04:27.json new file mode 100644 index 0000000000000000000000000000000000000000..b53f7dd24ad54fc2d46eedadca3aba354738d141 --- /dev/null +++ b/youjunhyeok/llama3-8b-ko-sft-v1/result_2024-06-19 01:04:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39419795221843, + "acc_stderr": 0.014280522667467328, + "acc_norm": 0.4496587030716723, + "acc_norm_stderr": 0.01453714444428475 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3879705238000398, + "acc_stderr": 0.004862919176408079, + "acc_norm": 0.5084644493128859, + "acc_norm_stderr": 0.0049890663554495555 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5533980582524272, + "acc_stderr": 0.04922424153458934, + "acc_norm": 0.5533980582524272, + "acc_norm_stderr": 0.04922424153458934 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4342273307790549, + "acc_stderr": 0.017724589389677785, + "acc_norm": 0.4342273307790549, + "acc_norm_stderr": 0.017724589389677785 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04072314811876837, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04072314811876837 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4297872340425532, + "acc_stderr": 0.03236214467715564, + "acc_norm": 0.4297872340425532, + "acc_norm_stderr": 0.03236214467715564 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3493975903614458, + "acc_stderr": 0.03711725190740751, + "acc_norm": 0.3493975903614458, + "acc_norm_stderr": 0.03711725190740751 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123005, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123005 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4732824427480916, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.4732824427480916, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.6, + "acc_stderr": 0.04082482904638628, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04082482904638628 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5546218487394958, + "acc_stderr": 0.0322841062671639, + "acc_norm": 0.5546218487394958, + "acc_norm_stderr": 0.0322841062671639 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954953, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954953 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.04960449637488583, + "acc_norm": 0.58, + "acc_norm_stderr": 0.04960449637488583 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.43842364532019706, + "acc_stderr": 0.03491207857486518, + "acc_norm": 0.43842364532019706, + "acc_norm_stderr": 0.03491207857486518 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5193548387096775, + "acc_stderr": 0.028422687404312107, + "acc_norm": 0.5193548387096775, + "acc_norm_stderr": 0.028422687404312107 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7051282051282052, + "acc_stderr": 0.029872577708891183, + "acc_norm": 0.7051282051282052, + "acc_norm_stderr": 0.029872577708891183 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119898, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119898 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.43636363636363634, + "acc_stderr": 0.04750185058907297, + "acc_norm": 0.43636363636363634, + "acc_norm_stderr": 0.04750185058907297 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.029252905927251972, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.029252905927251972 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6417910447761194, + "acc_stderr": 0.03390393042268814, + "acc_norm": 0.6417910447761194, + "acc_norm_stderr": 0.03390393042268814 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4797687861271676, + "acc_stderr": 0.03809342081273958, + "acc_norm": 0.4797687861271676, + "acc_norm_stderr": 0.03809342081273958 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.4021164021164021, + "acc_stderr": 0.02525303255499769, + "acc_norm": 0.4021164021164021, + "acc_norm_stderr": 0.02525303255499769 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5277777777777778, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.5277777777777778, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.36, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5440414507772021, + "acc_stderr": 0.035944137112724366, + "acc_norm": 0.5440414507772021, + "acc_norm_stderr": 0.035944137112724366 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.34210526315789475, + "acc_stderr": 0.04462917535336937, + "acc_norm": 0.34210526315789475, + "acc_norm_stderr": 0.04462917535336937 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.581651376146789, + "acc_stderr": 0.02114954859644388, + "acc_norm": 0.581651376146789, + "acc_norm_stderr": 0.02114954859644388 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4215686274509804, + "acc_stderr": 0.019977422600227467, + "acc_norm": 0.4215686274509804, + "acc_norm_stderr": 0.019977422600227467 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.028045946942042398, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.028045946942042398 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.36607142857142855, + "acc_stderr": 0.04572372358737431, + "acc_norm": 0.36607142857142855, + "acc_norm_stderr": 0.04572372358737431 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4212962962962963, + "acc_stderr": 0.03367462138896078, + "acc_norm": 0.4212962962962963, + "acc_norm_stderr": 0.03367462138896078 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2547486033519553, + "acc_stderr": 0.014572650383409167, + "acc_norm": 0.2547486033519553, + "acc_norm_stderr": 0.014572650383409167 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3897058823529412, + "acc_stderr": 0.029624663581159696, + "acc_norm": 0.3897058823529412, + "acc_norm_stderr": 0.029624663581159696 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6204081632653061, + "acc_stderr": 0.031067211262872457, + "acc_norm": 0.6204081632653061, + "acc_norm_stderr": 0.031067211262872457 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6075949367088608, + "acc_stderr": 0.0317847187456473, + "acc_norm": 0.6075949367088608, + "acc_norm_stderr": 0.0317847187456473 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3494132985658409, + "acc_stderr": 0.012177306252786693, + "acc_norm": 0.3494132985658409, + "acc_norm_stderr": 0.012177306252786693 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5686274509803921, + "acc_stderr": 0.03476099060501636, + "acc_norm": 0.5686274509803921, + "acc_norm_stderr": 0.03476099060501636 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6242424242424243, + "acc_stderr": 0.03781887353205982, + "acc_norm": 0.6242424242424243, + "acc_norm_stderr": 0.03781887353205982 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2729498164014688, + "mc1_stderr": 0.015594753632006518, + "mc2": 0.4386432863795436, + "mc2_stderr": 0.015185460450649993 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4628099173553719, + "acc_stderr": 0.0171427361176433, + "acc_norm": 0.5017709563164109, + "acc_norm_stderr": 0.017190246276231867 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "youjunhyeok/llama3-8b-ko-sft-v1", + "model_sha": "2a1bebda98eeb40e41a486eae80e622866986650", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/youjunhyeok/llama3-koen-8b-sft-v1/result_2024-07-01 00:46:05.json b/youjunhyeok/llama3-koen-8b-sft-v1/result_2024-07-01 00:46:05.json new file mode 100644 index 0000000000000000000000000000000000000000..cd68e9fec4f565f3dbc056aa2dfec3a385474a85 --- /dev/null +++ b/youjunhyeok/llama3-koen-8b-sft-v1/result_2024-07-01 00:46:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41552901023890787, + "acc_stderr": 0.014401366641216377, + "acc_norm": 0.4564846416382253, + "acc_norm_stderr": 0.014555949760496437 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42401911969727146, + "acc_stderr": 0.004931831953800038, + "acc_norm": 0.5727942640908186, + "acc_norm_stderr": 0.004936616428922639 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.048467482539772386, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.048467482539772386 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5938697318007663, + "acc_stderr": 0.017562037406478923, + "acc_norm": 0.5938697318007663, + "acc_norm_stderr": 0.017562037406478923 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464245, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.045126085985421276, + "acc_norm": 0.28, + "acc_norm_stderr": 0.045126085985421276 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.451063829787234, + "acc_stderr": 0.03252909619613197, + "acc_norm": 0.451063829787234, + "acc_norm_stderr": 0.03252909619613197 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5466237942122186, + "acc_stderr": 0.02827435985489424, + "acc_norm": 0.5466237942122186, + "acc_norm_stderr": 0.02827435985489424 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4961832061068702, + "acc_stderr": 0.043851623256015534, + "acc_norm": 0.4961832061068702, + "acc_norm_stderr": 0.043851623256015534 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5959595959595959, + "acc_stderr": 0.034961309720561266, + "acc_norm": 0.5959595959595959, + "acc_norm_stderr": 0.034961309720561266 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237655, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237655 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5042016806722689, + "acc_stderr": 0.032477343344481116, + "acc_norm": 0.5042016806722689, + "acc_norm_stderr": 0.032477343344481116 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736125, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736125 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5370370370370371, + "acc_stderr": 0.04820403072760627, + "acc_norm": 0.5370370370370371, + "acc_norm_stderr": 0.04820403072760627 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4187192118226601, + "acc_stderr": 0.03471192860518468, + "acc_norm": 0.4187192118226601, + "acc_norm_stderr": 0.03471192860518468 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4870967741935484, + "acc_stderr": 0.028434533152681848, + "acc_norm": 0.4870967741935484, + "acc_norm_stderr": 0.028434533152681848 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7008547008547008, + "acc_stderr": 0.029996951858349483, + "acc_norm": 0.7008547008547008, + "acc_norm_stderr": 0.029996951858349483 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4716981132075472, + "acc_stderr": 0.030723535249006107, + "acc_norm": 0.4716981132075472, + "acc_norm_stderr": 0.030723535249006107 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683522, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683522 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2980132450331126, + "acc_stderr": 0.037345356767871984, + "acc_norm": 0.2980132450331126, + "acc_norm_stderr": 0.037345356767871984 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6567164179104478, + "acc_stderr": 0.03357379665433431, + "acc_norm": 0.6567164179104478, + "acc_norm_stderr": 0.03357379665433431 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.023636975996101806, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.023636975996101806 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4097222222222222, + "acc_stderr": 0.04112490974670787, + "acc_norm": 0.4097222222222222, + "acc_norm_stderr": 0.04112490974670787 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.65, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.65, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5, + "acc_stderr": 0.026919095102908273, + "acc_norm": 0.5, + "acc_norm_stderr": 0.026919095102908273 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5030674846625767, + "acc_stderr": 0.03928297078179663, + "acc_norm": 0.5030674846625767, + "acc_norm_stderr": 0.03928297078179663 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847394, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847394 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.538860103626943, + "acc_stderr": 0.035975244117345775, + "acc_norm": 0.538860103626943, + "acc_norm_stderr": 0.035975244117345775 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489362, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489362 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6293577981651376, + "acc_stderr": 0.02070745816435298, + "acc_norm": 0.6293577981651376, + "acc_norm_stderr": 0.02070745816435298 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5228758169934641, + "acc_stderr": 0.028599936776089775, + "acc_norm": 0.5228758169934641, + "acc_norm_stderr": 0.028599936776089775 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6859504132231405, + "acc_stderr": 0.04236964753041017, + "acc_norm": 0.6859504132231405, + "acc_norm_stderr": 0.04236964753041017 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.47368421052631576, + "acc_stderr": 0.040633027314866704, + "acc_norm": 0.47368421052631576, + "acc_norm_stderr": 0.040633027314866704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.45588235294117646, + "acc_stderr": 0.020148939420415738, + "acc_norm": 0.45588235294117646, + "acc_norm_stderr": 0.020148939420415738 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028121636040639875, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028121636040639875 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.37962962962962965, + "acc_stderr": 0.03309682581119035, + "acc_norm": 0.37962962962962965, + "acc_norm_stderr": 0.03309682581119035 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.23016759776536314, + "acc_stderr": 0.014078339253425819, + "acc_norm": 0.23016759776536314, + "acc_norm_stderr": 0.014078339253425819 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.43014705882352944, + "acc_stderr": 0.030074971917302875, + "acc_norm": 0.43014705882352944, + "acc_norm_stderr": 0.030074971917302875 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6455696202531646, + "acc_stderr": 0.0311373042971858, + "acc_norm": 0.6455696202531646, + "acc_norm_stderr": 0.0311373042971858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31747066492829207, + "acc_stderr": 0.01188889206880931, + "acc_norm": 0.31747066492829207, + "acc_norm_stderr": 0.01188889206880931 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.03471157907953426, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.03471157907953426 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.03859268142070263, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.03859268142070263 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28886168910648713, + "mc1_stderr": 0.015866346401384308, + "mc2": 0.4460535107578868, + "mc2_stderr": 0.0148652823789872 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.46162927981109797, + "acc_stderr": 0.01713966022184556, + "acc_norm": 0.48760330578512395, + "acc_norm_stderr": 0.017185069732676528 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "youjunhyeok/llama3-koen-8b-sft-v1", + "model_sha": "473cf8e9b9f39b3abefee4aa78d151b31c7fe7bc", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/youjunhyeok/llama3-koen-sft-dpo-v1/result_2024-07-01 00:47:44.json b/youjunhyeok/llama3-koen-sft-dpo-v1/result_2024-07-01 00:47:44.json new file mode 100644 index 0000000000000000000000000000000000000000..15d09e7cf57def1babef4caef32e2369d8a2b988 --- /dev/null +++ b/youjunhyeok/llama3-koen-sft-dpo-v1/result_2024-07-01 00:47:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4104095563139932, + "acc_stderr": 0.014374922192642662, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.014575583922019672 + }, + "harness|ko_hellaswag|10": { + "acc": 0.42690699063931487, + "acc_stderr": 0.004936176784631953, + "acc_norm": 0.5777733519219279, + "acc_norm_stderr": 0.0049290484827604515 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.631578947368421, + "acc_stderr": 0.036996580176568775, + "acc_norm": 0.631578947368421, + "acc_norm_stderr": 0.036996580176568775 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6213592233009708, + "acc_stderr": 0.04802694698258974, + "acc_norm": 0.6213592233009708, + "acc_norm_stderr": 0.04802694698258974 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5951468710089399, + "acc_stderr": 0.017553246467720267, + "acc_norm": 0.5951468710089399, + "acc_norm_stderr": 0.017553246467720267 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464245, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4759036144578313, + "acc_stderr": 0.03887971849597264, + "acc_norm": 0.4759036144578313, + "acc_norm_stderr": 0.03887971849597264 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5466237942122186, + "acc_stderr": 0.028274359854894245, + "acc_norm": 0.5466237942122186, + "acc_norm_stderr": 0.028274359854894245 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5267175572519084, + "acc_stderr": 0.04379024936553894, + "acc_norm": 0.5267175572519084, + "acc_norm_stderr": 0.04379024936553894 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.601010101010101, + "acc_stderr": 0.034889016168527305, + "acc_norm": 0.601010101010101, + "acc_norm_stderr": 0.034889016168527305 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5241379310344828, + "acc_stderr": 0.0416180850350153, + "acc_norm": 0.5241379310344828, + "acc_norm_stderr": 0.0416180850350153 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2549019607843137, + "acc_stderr": 0.043364327079931785, + "acc_norm": 0.2549019607843137, + "acc_norm_stderr": 0.043364327079931785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.03247390276569669, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.03247390276569669 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.025294608023986483, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.025294608023986483 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.53, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.53, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.0478034362693679, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.0478034362693679 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5096774193548387, + "acc_stderr": 0.02843867799890955, + "acc_norm": 0.5096774193548387, + "acc_norm_stderr": 0.02843867799890955 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6923076923076923, + "acc_stderr": 0.030236389942173095, + "acc_norm": 0.6923076923076923, + "acc_norm_stderr": 0.030236389942173095 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4830188679245283, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.4830188679245283, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794917, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794917 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.32222222222222224, + "acc_stderr": 0.028493465091028597, + "acc_norm": 0.32222222222222224, + "acc_norm_stderr": 0.028493465091028597 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.037579499229433426, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.037579499229433426 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6517412935323383, + "acc_stderr": 0.033687874661154596, + "acc_norm": 0.6517412935323383, + "acc_norm_stderr": 0.033687874661154596 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.0372424959581773, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.0372424959581773 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30952380952380953, + "acc_stderr": 0.023809523809523857, + "acc_norm": 0.30952380952380953, + "acc_norm_stderr": 0.023809523809523857 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4027777777777778, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.4027777777777778, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.35, + "acc_stderr": 0.047937248544110196, + "acc_norm": 0.35, + "acc_norm_stderr": 0.047937248544110196 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5028901734104047, + "acc_stderr": 0.02691864538323901, + "acc_norm": 0.5028901734104047, + "acc_norm_stderr": 0.02691864538323901 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.48466257668711654, + "acc_stderr": 0.03926522378708843, + "acc_norm": 0.48466257668711654, + "acc_norm_stderr": 0.03926522378708843 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5216049382716049, + "acc_stderr": 0.027794760105008722, + "acc_norm": 0.5216049382716049, + "acc_norm_stderr": 0.027794760105008722 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2719298245614035, + "acc_stderr": 0.041857744240220575, + "acc_norm": 0.2719298245614035, + "acc_norm_stderr": 0.041857744240220575 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6403669724770642, + "acc_stderr": 0.020575234660123787, + "acc_norm": 0.6403669724770642, + "acc_norm_stderr": 0.020575234660123787 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.028590752958852387, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.028590752958852387 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6694214876033058, + "acc_stderr": 0.04294340845212095, + "acc_norm": 0.6694214876033058, + "acc_norm_stderr": 0.04294340845212095 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.45394736842105265, + "acc_stderr": 0.0405164634287414, + "acc_norm": 0.45394736842105265, + "acc_norm_stderr": 0.0405164634287414 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4477124183006536, + "acc_stderr": 0.02011692534742242, + "acc_norm": 0.4477124183006536, + "acc_norm_stderr": 0.02011692534742242 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.33687943262411346, + "acc_stderr": 0.028195534873966734, + "acc_norm": 0.33687943262411346, + "acc_norm_stderr": 0.028195534873966734 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21675977653631284, + "acc_stderr": 0.013780598486443347, + "acc_norm": 0.21675977653631284, + "acc_norm_stderr": 0.013780598486443347 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4338235294117647, + "acc_stderr": 0.03010563657001664, + "acc_norm": 0.4338235294117647, + "acc_norm_stderr": 0.03010563657001664 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.40816326530612246, + "acc_stderr": 0.03146465712827423, + "acc_norm": 0.40816326530612246, + "acc_norm_stderr": 0.03146465712827423 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3246414602346806, + "acc_stderr": 0.011959089388530025, + "acc_norm": 0.3246414602346806, + "acc_norm_stderr": 0.011959089388530025 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5637254901960784, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.5637254901960784, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5757575757575758, + "acc_stderr": 0.038592681420702636, + "acc_norm": 0.5757575757575758, + "acc_norm_stderr": 0.038592681420702636 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2839657282741738, + "mc1_stderr": 0.01578537085839671, + "mc2": 0.45146547622354294, + "mc2_stderr": 0.014945688177163706 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4604486422668241, + "acc_stderr": 0.017136487626049846, + "acc_norm": 0.4899645808736718, + "acc_norm_stderr": 0.017186891286894056 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "youjunhyeok/llama3-koen-sft-dpo-v1", + "model_sha": "18c1e463db31c4b765b18f154a9f74c9de6d593f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/youjunhyeok/solar-ko-recovery-11b-chat-v1/result_2024-07-12 07:54:21.json b/youjunhyeok/solar-ko-recovery-11b-chat-v1/result_2024-07-12 07:54:21.json new file mode 100644 index 0000000000000000000000000000000000000000..a5a283768455e54b787f31c63fb590014273b5ed --- /dev/null +++ b/youjunhyeok/solar-ko-recovery-11b-chat-v1/result_2024-07-12 07:54:21.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.48464163822525597, + "acc_stderr": 0.014604496129394904, + "acc_norm": 0.537542662116041, + "acc_norm_stderr": 0.014570144495075581 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4678350926110337, + "acc_stderr": 0.004979446038824758, + "acc_norm": 0.6400119498107947, + "acc_norm_stderr": 0.004790155370993451 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7076023391812866, + "acc_stderr": 0.03488647713457922, + "acc_norm": 0.7076023391812866, + "acc_norm_stderr": 0.03488647713457922 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7254150702426565, + "acc_stderr": 0.01595982993308406, + "acc_norm": 0.7254150702426565, + "acc_norm_stderr": 0.01595982993308406 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.043097329010363554, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.043097329010363554 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.502127659574468, + "acc_stderr": 0.03268572658667493, + "acc_norm": 0.502127659574468, + "acc_norm_stderr": 0.03268572658667493 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4819277108433735, + "acc_stderr": 0.03889951252827216, + "acc_norm": 0.4819277108433735, + "acc_norm_stderr": 0.03889951252827216 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6205787781350482, + "acc_stderr": 0.027559949802347824, + "acc_norm": 0.6205787781350482, + "acc_norm_stderr": 0.027559949802347824 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6233183856502242, + "acc_stderr": 0.032521134899291884, + "acc_norm": 0.6233183856502242, + "acc_norm_stderr": 0.032521134899291884 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6030534351145038, + "acc_stderr": 0.04291135671009224, + "acc_norm": 0.6030534351145038, + "acc_norm_stderr": 0.04291135671009224 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7525252525252525, + "acc_stderr": 0.030746300742124498, + "acc_norm": 0.7525252525252525, + "acc_norm_stderr": 0.030746300742124498 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929775, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929775 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5882352941176471, + "acc_stderr": 0.031968769891957786, + "acc_norm": 0.5882352941176471, + "acc_norm_stderr": 0.031968769891957786 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5384615384615384, + "acc_stderr": 0.025275892070240648, + "acc_norm": 0.5384615384615384, + "acc_norm_stderr": 0.025275892070240648 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6111111111111112, + "acc_stderr": 0.0471282125742677, + "acc_norm": 0.6111111111111112, + "acc_norm_stderr": 0.0471282125742677 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6193548387096774, + "acc_stderr": 0.02762171783290703, + "acc_norm": 0.6193548387096774, + "acc_norm_stderr": 0.02762171783290703 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.8034188034188035, + "acc_stderr": 0.02603538609895129, + "acc_norm": 0.8034188034188035, + "acc_norm_stderr": 0.02603538609895129 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5584905660377358, + "acc_stderr": 0.030561590426731837, + "acc_norm": 0.5584905660377358, + "acc_norm_stderr": 0.030561590426731837 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.362962962962963, + "acc_stderr": 0.029318203645206865, + "acc_norm": 0.362962962962963, + "acc_norm_stderr": 0.029318203645206865 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7313432835820896, + "acc_stderr": 0.03134328358208954, + "acc_norm": 0.7313432835820896, + "acc_norm_stderr": 0.03134328358208954 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4682080924855491, + "acc_stderr": 0.03804749744364764, + "acc_norm": 0.4682080924855491, + "acc_norm_stderr": 0.03804749744364764 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.0250107491161376, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.0250107491161376 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.04155319955593146, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.04155319955593146 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.74, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.74, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5838150289017341, + "acc_stderr": 0.026538189104705484, + "acc_norm": 0.5838150289017341, + "acc_norm_stderr": 0.026538189104705484 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5828220858895705, + "acc_stderr": 0.038741028598180814, + "acc_norm": 0.5828220858895705, + "acc_norm_stderr": 0.038741028598180814 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6172839506172839, + "acc_stderr": 0.0270445381384026, + "acc_norm": 0.6172839506172839, + "acc_norm_stderr": 0.0270445381384026 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.03221024508041154, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.03221024508041154 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.38596491228070173, + "acc_stderr": 0.04579639422070434, + "acc_norm": 0.38596491228070173, + "acc_norm_stderr": 0.04579639422070434 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.691743119266055, + "acc_stderr": 0.019798366698367254, + "acc_norm": 0.691743119266055, + "acc_norm_stderr": 0.019798366698367254 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.04360314860077459, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.04360314860077459 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5915032679738562, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.5915032679738562, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514511, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514511 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5921052631578947, + "acc_stderr": 0.039993097127774734, + "acc_norm": 0.5921052631578947, + "acc_norm_stderr": 0.039993097127774734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.020212274976302954, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.020212274976302954 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.375886524822695, + "acc_stderr": 0.02889395541211589, + "acc_norm": 0.375886524822695, + "acc_norm_stderr": 0.02889395541211589 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.38425925925925924, + "acc_stderr": 0.03317354514310742, + "acc_norm": 0.38425925925925924, + "acc_norm_stderr": 0.03317354514310742 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.27039106145251396, + "acc_stderr": 0.014854993938010085, + "acc_norm": 0.27039106145251396, + "acc_norm_stderr": 0.014854993938010085 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5330882352941176, + "acc_stderr": 0.03030625772246832, + "acc_norm": 0.5330882352941176, + "acc_norm_stderr": 0.03030625772246832 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5959183673469388, + "acc_stderr": 0.031414708025865885, + "acc_norm": 0.5959183673469388, + "acc_norm_stderr": 0.031414708025865885 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7637130801687764, + "acc_stderr": 0.027652153144159263, + "acc_norm": 0.7637130801687764, + "acc_norm_stderr": 0.027652153144159263 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38005215123859193, + "acc_stderr": 0.01239732820513781, + "acc_norm": 0.38005215123859193, + "acc_norm_stderr": 0.01239732820513781 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7058823529411765, + "acc_stderr": 0.03198001660115072, + "acc_norm": 0.7058823529411765, + "acc_norm_stderr": 0.03198001660115072 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.03588624800091709, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.03588624800091709 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.28518971848225216, + "mc1_stderr": 0.015805827874454895, + "mc2": 0.4449773662359224, + "mc2_stderr": 0.015024017369373116 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6269185360094451, + "acc_stderr": 0.01662731827513745, + "acc_norm": 0.6564344746162928, + "acc_norm_stderr": 0.01632733480642913 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "youjunhyeok/solar-ko-recovery-11b-chat-v1", + "model_sha": "7591d97e7ff339f00548ff8984fb392c47a42f53", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/youjunhyeok/solar-ko-recovery-11b-chat/result_2024-07-14 23:55:44.json b/youjunhyeok/solar-ko-recovery-11b-chat/result_2024-07-14 23:55:44.json new file mode 100644 index 0000000000000000000000000000000000000000..8e1bc335e83ae028f0bc22678469de28285c02eb --- /dev/null +++ b/youjunhyeok/solar-ko-recovery-11b-chat/result_2024-07-14 23:55:44.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4948805460750853, + "acc_stderr": 0.01461062489030916, + "acc_norm": 0.5426621160409556, + "acc_norm_stderr": 0.01455810654392407 + }, + "harness|ko_hellaswag|10": { + "acc": 0.46594303923521213, + "acc_stderr": 0.004978192893406285, + "acc_norm": 0.6430989842660825, + "acc_norm_stderr": 0.004781061390873923 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7251461988304093, + "acc_stderr": 0.03424042924691583, + "acc_norm": 0.7251461988304093, + "acc_norm_stderr": 0.03424042924691583 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7087378640776699, + "acc_stderr": 0.044986763205729224, + "acc_norm": 0.7087378640776699, + "acc_norm_stderr": 0.044986763205729224 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7305236270753512, + "acc_stderr": 0.015866243073215023, + "acc_norm": 0.7305236270753512, + "acc_norm_stderr": 0.015866243073215023 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936338, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936338 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.038922121953330446, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.038922121953330446 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.617363344051447, + "acc_stderr": 0.02760468902858199, + "acc_norm": 0.617363344051447, + "acc_norm_stderr": 0.02760468902858199 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6322869955156951, + "acc_stderr": 0.03236198350928275, + "acc_norm": 0.6322869955156951, + "acc_norm_stderr": 0.03236198350928275 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6335877862595419, + "acc_stderr": 0.04225875451969638, + "acc_norm": 0.6335877862595419, + "acc_norm_stderr": 0.04225875451969638 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7727272727272727, + "acc_stderr": 0.02985751567338641, + "acc_norm": 0.7727272727272727, + "acc_norm_stderr": 0.02985751567338641 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5172413793103449, + "acc_stderr": 0.04164188720169375, + "acc_norm": 0.5172413793103449, + "acc_norm_stderr": 0.04164188720169375 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.28431372549019607, + "acc_stderr": 0.04488482852329017, + "acc_norm": 0.28431372549019607, + "acc_norm_stderr": 0.04488482852329017 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5798319327731093, + "acc_stderr": 0.03206183783236152, + "acc_norm": 0.5798319327731093, + "acc_norm_stderr": 0.03206183783236152 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5538461538461539, + "acc_stderr": 0.025203571773028326, + "acc_norm": 0.5538461538461539, + "acc_norm_stderr": 0.025203571773028326 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145631, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145631 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6193548387096774, + "acc_stderr": 0.027621717832907036, + "acc_norm": 0.6193548387096774, + "acc_norm_stderr": 0.027621717832907036 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.02645350805404036, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.02645350805404036 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5773584905660377, + "acc_stderr": 0.03040233144576954, + "acc_norm": 0.5773584905660377, + "acc_norm_stderr": 0.03040233144576954 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.6, + "acc_stderr": 0.0469237132203465, + "acc_norm": 0.6, + "acc_norm_stderr": 0.0469237132203465 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3592592592592593, + "acc_stderr": 0.02925290592725198, + "acc_norm": 0.3592592592592593, + "acc_norm_stderr": 0.02925290592725198 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3576158940397351, + "acc_stderr": 0.03913453431177258, + "acc_norm": 0.3576158940397351, + "acc_norm_stderr": 0.03913453431177258 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7064676616915423, + "acc_stderr": 0.032200241045342054, + "acc_norm": 0.7064676616915423, + "acc_norm_stderr": 0.032200241045342054 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.49710982658959535, + "acc_stderr": 0.038124005659748335, + "acc_norm": 0.49710982658959535, + "acc_norm_stderr": 0.038124005659748335 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752052, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752052 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5972222222222222, + "acc_stderr": 0.04101405519842426, + "acc_norm": 0.5972222222222222, + "acc_norm_stderr": 0.04101405519842426 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932263, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932263 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5982658959537572, + "acc_stderr": 0.026394104177643634, + "acc_norm": 0.5982658959537572, + "acc_norm_stderr": 0.026394104177643634 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5950920245398773, + "acc_stderr": 0.038566721635489125, + "acc_norm": 0.5950920245398773, + "acc_norm_stderr": 0.038566721635489125 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6234567901234568, + "acc_stderr": 0.026959344518747784, + "acc_norm": 0.6234567901234568, + "acc_norm_stderr": 0.026959344518747784 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7253886010362695, + "acc_stderr": 0.032210245080411544, + "acc_norm": 0.7253886010362695, + "acc_norm_stderr": 0.032210245080411544 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.35964912280701755, + "acc_stderr": 0.04514496132873633, + "acc_norm": 0.35964912280701755, + "acc_norm_stderr": 0.04514496132873633 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6862385321100918, + "acc_stderr": 0.01989472334146914, + "acc_norm": 0.6862385321100918, + "acc_norm_stderr": 0.01989472334146914 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.043435254289490965, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.043435254289490965 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5980392156862745, + "acc_stderr": 0.02807415894760065, + "acc_norm": 0.5980392156862745, + "acc_norm_stderr": 0.02807415894760065 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.63, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.63, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7024793388429752, + "acc_stderr": 0.04173349148083498, + "acc_norm": 0.7024793388429752, + "acc_norm_stderr": 0.04173349148083498 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5855263157894737, + "acc_stderr": 0.04008973785779205, + "acc_norm": 0.5855263157894737, + "acc_norm_stderr": 0.04008973785779205 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5147058823529411, + "acc_stderr": 0.020219083895133924, + "acc_norm": 0.5147058823529411, + "acc_norm_stderr": 0.020219083895133924 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.41843971631205673, + "acc_stderr": 0.02942799403941999, + "acc_norm": 0.41843971631205673, + "acc_norm_stderr": 0.02942799403941999 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4166666666666667, + "acc_stderr": 0.033622774366080424, + "acc_norm": 0.4166666666666667, + "acc_norm_stderr": 0.033622774366080424 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2324022346368715, + "acc_stderr": 0.01412596875467338, + "acc_norm": 0.2324022346368715, + "acc_norm_stderr": 0.01412596875467338 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.67, + "acc_stderr": 0.04725815626252607, + "acc_norm": 0.67, + "acc_norm_stderr": 0.04725815626252607 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5404411764705882, + "acc_stderr": 0.03027332507734576, + "acc_norm": 0.5404411764705882, + "acc_norm_stderr": 0.03027332507734576 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6326530612244898, + "acc_stderr": 0.03086214492108756, + "acc_norm": 0.6326530612244898, + "acc_norm_stderr": 0.03086214492108756 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7552742616033755, + "acc_stderr": 0.027985699387036423, + "acc_norm": 0.7552742616033755, + "acc_norm_stderr": 0.027985699387036423 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.394393741851369, + "acc_stderr": 0.01248214166563119, + "acc_norm": 0.394393741851369, + "acc_norm_stderr": 0.01248214166563119 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.7009803921568627, + "acc_stderr": 0.03213325717373617, + "acc_norm": 0.7009803921568627, + "acc_norm_stderr": 0.03213325717373617 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6545454545454545, + "acc_stderr": 0.037131580674819135, + "acc_norm": 0.6545454545454545, + "acc_norm_stderr": 0.037131580674819135 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2913096695226438, + "mc1_stderr": 0.01590598704818483, + "mc2": 0.44559687678850096, + "mc2_stderr": 0.014987579532119455 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.654073199527745, + "acc_stderr": 0.016353853414347568, + "acc_norm": 0.6682408500590319, + "acc_norm_stderr": 0.01618798464215732 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "youjunhyeok/solar-ko-recovery-11b-chat", + "model_sha": "6daa213641e284b12bba139d57073de3ee907fa8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/youjunhyeok/solar-ko-recovery-11b-freeze/result_2024-07-22 07:11:34.json b/youjunhyeok/solar-ko-recovery-11b-freeze/result_2024-07-22 07:11:34.json new file mode 100644 index 0000000000000000000000000000000000000000..b096abaa57250d98c779e85ee7b8d4ba5620b538 --- /dev/null +++ b/youjunhyeok/solar-ko-recovery-11b-freeze/result_2024-07-22 07:11:34.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.49402730375426623, + "acc_stderr": 0.014610348300255798, + "acc_norm": 0.5341296928327645, + "acc_norm_stderr": 0.014577311315231099 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4713204540928102, + "acc_stderr": 0.004981566295189438, + "acc_norm": 0.6469826727743477, + "acc_norm_stderr": 0.0047693133004702406 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.7076023391812866, + "acc_stderr": 0.034886477134579215, + "acc_norm": 0.7076023391812866, + "acc_norm_stderr": 0.034886477134579215 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.7378640776699029, + "acc_stderr": 0.043546310772605956, + "acc_norm": 0.7378640776699029, + "acc_norm_stderr": 0.043546310772605956 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.7394636015325671, + "acc_stderr": 0.015696008563807092, + "acc_norm": 0.7394636015325671, + "acc_norm_stderr": 0.015696008563807092 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4888888888888889, + "acc_stderr": 0.04318275491977976, + "acc_norm": 0.4888888888888889, + "acc_norm_stderr": 0.04318275491977976 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.044619604333847415, + "acc_norm": 0.27, + "acc_norm_stderr": 0.044619604333847415 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.49361702127659574, + "acc_stderr": 0.03268335899936337, + "acc_norm": 0.49361702127659574, + "acc_norm_stderr": 0.03268335899936337 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5, + "acc_stderr": 0.03892494720807614, + "acc_norm": 0.5, + "acc_norm_stderr": 0.03892494720807614 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.6366559485530546, + "acc_stderr": 0.027316847674192717, + "acc_norm": 0.6366559485530546, + "acc_norm_stderr": 0.027316847674192717 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.6457399103139013, + "acc_stderr": 0.032100621541349864, + "acc_norm": 0.6457399103139013, + "acc_norm_stderr": 0.032100621541349864 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6259541984732825, + "acc_stderr": 0.04243869242230524, + "acc_norm": 0.6259541984732825, + "acc_norm_stderr": 0.04243869242230524 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956913, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956913 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.7626262626262627, + "acc_stderr": 0.030313710538198906, + "acc_norm": 0.7626262626262627, + "acc_norm_stderr": 0.030313710538198906 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.634453781512605, + "acc_stderr": 0.031282177063684614, + "acc_norm": 0.634453781512605, + "acc_norm_stderr": 0.031282177063684614 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5846153846153846, + "acc_stderr": 0.024985354923102353, + "acc_norm": 0.5846153846153846, + "acc_norm_stderr": 0.024985354923102353 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.35, + "acc_stderr": 0.04793724854411019, + "acc_norm": 0.35, + "acc_norm_stderr": 0.04793724854411019 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6481481481481481, + "acc_stderr": 0.046166311118017146, + "acc_norm": 0.6481481481481481, + "acc_norm_stderr": 0.046166311118017146 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.6290322580645161, + "acc_stderr": 0.027480541887953586, + "acc_norm": 0.6290322580645161, + "acc_norm_stderr": 0.027480541887953586 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7777777777777778, + "acc_stderr": 0.02723601394619667, + "acc_norm": 0.7777777777777778, + "acc_norm_stderr": 0.02723601394619667 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5735849056603773, + "acc_stderr": 0.030437794342983052, + "acc_norm": 0.5735849056603773, + "acc_norm_stderr": 0.030437794342983052 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.04709306978661895, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.04709306978661895 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.028226446749683515, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.028226446749683515 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.7164179104477612, + "acc_stderr": 0.03187187537919796, + "acc_norm": 0.7164179104477612, + "acc_norm_stderr": 0.03187187537919796 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.03811890988940412, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.03811890988940412 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36507936507936506, + "acc_stderr": 0.024796060602699947, + "acc_norm": 0.36507936507936506, + "acc_norm_stderr": 0.024796060602699947 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5902777777777778, + "acc_stderr": 0.04112490974670788, + "acc_norm": 0.5902777777777778, + "acc_norm_stderr": 0.04112490974670788 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.45, + "acc_stderr": 0.05, + "acc_norm": 0.45, + "acc_norm_stderr": 0.05 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.78, + "acc_stderr": 0.04163331998932262, + "acc_norm": 0.78, + "acc_norm_stderr": 0.04163331998932262 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.615606936416185, + "acc_stderr": 0.026189666966272035, + "acc_norm": 0.615606936416185, + "acc_norm_stderr": 0.026189666966272035 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5766871165644172, + "acc_stderr": 0.03881891213334384, + "acc_norm": 0.5766871165644172, + "acc_norm_stderr": 0.03881891213334384 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.6265432098765432, + "acc_stderr": 0.026915003011380154, + "acc_norm": 0.6265432098765432, + "acc_norm_stderr": 0.026915003011380154 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709390974, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709390974 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7357512953367875, + "acc_stderr": 0.03182155050916647, + "acc_norm": 0.7357512953367875, + "acc_norm_stderr": 0.03182155050916647 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.40350877192982454, + "acc_stderr": 0.046151869625837026, + "acc_norm": 0.40350877192982454, + "acc_norm_stderr": 0.046151869625837026 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6954128440366972, + "acc_stderr": 0.01973229942035403, + "acc_norm": 0.6954128440366972, + "acc_norm_stderr": 0.01973229942035403 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.6013071895424836, + "acc_stderr": 0.028036092273891772, + "acc_norm": 0.6013071895424836, + "acc_norm_stderr": 0.028036092273891772 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.71900826446281, + "acc_stderr": 0.04103203830514512, + "acc_norm": 0.71900826446281, + "acc_norm_stderr": 0.04103203830514512 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5592105263157895, + "acc_stderr": 0.04040311062490438, + "acc_norm": 0.5592105263157895, + "acc_norm_stderr": 0.04040311062490438 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.5130718954248366, + "acc_stderr": 0.020220920829626916, + "acc_norm": 0.5130718954248366, + "acc_norm_stderr": 0.020220920829626916 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759415, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759415 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.38392857142857145, + "acc_stderr": 0.046161430750285455, + "acc_norm": 0.38392857142857145, + "acc_norm_stderr": 0.046161430750285455 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.47685185185185186, + "acc_stderr": 0.03406315360711507, + "acc_norm": 0.47685185185185186, + "acc_norm_stderr": 0.03406315360711507 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2100558659217877, + "acc_stderr": 0.013623755371333538, + "acc_norm": 0.2100558659217877, + "acc_norm_stderr": 0.013623755371333538 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.59, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.59, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.030161911930767112, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.030161911930767112 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.6530612244897959, + "acc_stderr": 0.030472526026726496, + "acc_norm": 0.6530612244897959, + "acc_norm_stderr": 0.030472526026726496 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7383966244725738, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.7383966244725738, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3956975228161669, + "acc_stderr": 0.012489290735449014, + "acc_norm": 0.3956975228161669, + "acc_norm_stderr": 0.012489290735449014 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6813725490196079, + "acc_stderr": 0.03270287181482081, + "acc_norm": 0.6813725490196079, + "acc_norm_stderr": 0.03270287181482081 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6484848484848484, + "acc_stderr": 0.037282069986826503, + "acc_norm": 0.6484848484848484, + "acc_norm_stderr": 0.037282069986826503 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2802937576499388, + "mc1_stderr": 0.01572313952460875, + "mc2": 0.4287978889432169, + "mc2_stderr": 0.014980073256947923 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.6469893742621016, + "acc_stderr": 0.016430745982427147, + "acc_norm": 0.667060212514758, + "acc_norm_stderr": 0.016202431208373815 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "youjunhyeok/solar-ko-recovery-11b-freeze", + "model_sha": "63d30b837247cb00d5f0d761fb0e5c1126e80e39", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yunconglong/DARE_TIES_13B/result_2024-05-15 16:57:15.json b/yunconglong/DARE_TIES_13B/result_2024-05-15 16:57:15.json new file mode 100644 index 0000000000000000000000000000000000000000..10f53498162ff9457cbca7f8f4d95d36eb4e6777 --- /dev/null +++ b/yunconglong/DARE_TIES_13B/result_2024-05-15 16:57:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41552901023890787, + "acc_stderr": 0.014401366641216386, + "acc_norm": 0.4658703071672355, + "acc_norm_stderr": 0.0145773113152311 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4069906393148775, + "acc_stderr": 0.004902690765066421, + "acc_norm": 0.5456084445329615, + "acc_norm_stderr": 0.004968979259738336 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4619883040935672, + "acc_stderr": 0.03823727092882307, + "acc_norm": 0.4619883040935672, + "acc_norm_stderr": 0.03823727092882307 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468537, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468537 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.041153246103369526, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.041153246103369526 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.45016077170418006, + "acc_stderr": 0.028256660723360184, + "acc_norm": 0.45016077170418006, + "acc_norm_stderr": 0.028256660723360184 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4125560538116592, + "acc_stderr": 0.03304062175449296, + "acc_norm": 0.4125560538116592, + "acc_norm_stderr": 0.03304062175449296 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4122137404580153, + "acc_stderr": 0.04317171194870255, + "acc_norm": 0.4122137404580153, + "acc_norm_stderr": 0.04317171194870255 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5606060606060606, + "acc_stderr": 0.0353608594752948, + "acc_norm": 0.5606060606060606, + "acc_norm_stderr": 0.0353608594752948 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.04158307533083286, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.04158307533083286 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5462184873949579, + "acc_stderr": 0.03233943468182088, + "acc_norm": 0.5462184873949579, + "acc_norm_stderr": 0.03233943468182088 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4564102564102564, + "acc_stderr": 0.025254485424799605, + "acc_norm": 0.4564102564102564, + "acc_norm_stderr": 0.025254485424799605 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.35960591133004927, + "acc_stderr": 0.03376458246509567, + "acc_norm": 0.35960591133004927, + "acc_norm_stderr": 0.03376458246509567 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44516129032258067, + "acc_stderr": 0.028272410186214906, + "acc_norm": 0.44516129032258067, + "acc_norm_stderr": 0.028272410186214906 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.027778835904935434, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.027778835904935434 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458003, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458003 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.337037037037037, + "acc_stderr": 0.028820884666253255, + "acc_norm": 0.337037037037037, + "acc_norm_stderr": 0.028820884666253255 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804725, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804725 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.035080801121998406, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.035080801121998406 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3915343915343915, + "acc_stderr": 0.025138091388851112, + "acc_norm": 0.3915343915343915, + "acc_norm_stderr": 0.025138091388851112 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05000000000000001, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05000000000000001 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.0276847214156562, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.0276847214156562 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384487, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384487 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5211009174311927, + "acc_stderr": 0.021418224754264643, + "acc_norm": 0.5211009174311927, + "acc_norm_stderr": 0.021418224754264643 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4542483660130719, + "acc_stderr": 0.028509807802626567, + "acc_norm": 0.4542483660130719, + "acc_norm_stderr": 0.028509807802626567 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775088, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775088 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.03977749934622074, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.03977749934622074 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38562091503267976, + "acc_stderr": 0.019691459052354143, + "acc_norm": 0.38562091503267976, + "acc_norm_stderr": 0.019691459052354143 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4351851851851852, + "acc_stderr": 0.03381200005643526, + "acc_norm": 0.4351851851851852, + "acc_norm_stderr": 0.03381200005643526 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.28268156424581004, + "acc_stderr": 0.015060381730018097, + "acc_norm": 0.28268156424581004, + "acc_norm_stderr": 0.015060381730018097 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824866, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824866 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.03172295004332331, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.03172295004332331 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34810951760104303, + "acc_stderr": 0.012166738993698197, + "acc_norm": 0.34810951760104303, + "acc_norm_stderr": 0.012166738993698197 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5212121212121212, + "acc_stderr": 0.03900828913737301, + "acc_norm": 0.5212121212121212, + "acc_norm_stderr": 0.03900828913737301 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.44063647490820074, + "mc1_stderr": 0.01737969755543745, + "mc2": 0.6228366336587989, + "mc2_stderr": 0.01607883678122142 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.44510035419126326, + "acc_stderr": 0.017086417431005467, + "acc_norm": 0.45690672963400236, + "acc_norm_stderr": 0.017126389093086784 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yunconglong/DARE_TIES_13B", + "model_sha": "74c6e4fbd272c9d897e8c93ee7de8a234f61900f", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yunconglong/MoE_13B_DPO/result_2024-07-17 20:18:33.json b/yunconglong/MoE_13B_DPO/result_2024-07-17 20:18:33.json new file mode 100644 index 0000000000000000000000000000000000000000..44157b881d8c36e1a059fbb6874a81c07b30d2a1 --- /dev/null +++ b/yunconglong/MoE_13B_DPO/result_2024-07-17 20:18:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.41723549488054607, + "acc_stderr": 0.014409825518403079, + "acc_norm": 0.46928327645051193, + "acc_norm_stderr": 0.014583792546304038 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4060944035052778, + "acc_stderr": 0.004900988997414232, + "acc_norm": 0.5427205735909182, + "acc_norm_stderr": 0.004971534874389949 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977238, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4725415070242657, + "acc_stderr": 0.01785298126663396, + "acc_norm": 0.4725415070242657, + "acc_norm_stderr": 0.01785298126663396 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.04461960433384741, + "acc_norm": 0.27, + "acc_norm_stderr": 0.04461960433384741 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.028217683556652315, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.028217683556652315 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.42152466367713004, + "acc_stderr": 0.033141902221106564, + "acc_norm": 0.42152466367713004, + "acc_norm_stderr": 0.033141902221106564 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4198473282442748, + "acc_stderr": 0.04328577215262972, + "acc_norm": 0.4198473282442748, + "acc_norm_stderr": 0.04328577215262972 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.042801058373643945, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.042801058373643945 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.542016806722689, + "acc_stderr": 0.032363611119519416, + "acc_norm": 0.542016806722689, + "acc_norm_stderr": 0.032363611119519416 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45384615384615384, + "acc_stderr": 0.025242770987126174, + "acc_norm": 0.45384615384615384, + "acc_norm_stderr": 0.025242770987126174 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7692307692307693, + "acc_stderr": 0.027601921381417604, + "acc_norm": 0.7692307692307693, + "acc_norm_stderr": 0.027601921381417604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.46037735849056605, + "acc_stderr": 0.030676096599389174, + "acc_norm": 0.46037735849056605, + "acc_norm_stderr": 0.030676096599389174 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.509090909090909, + "acc_stderr": 0.0478833976870286, + "acc_norm": 0.509090909090909, + "acc_norm_stderr": 0.0478833976870286 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131157, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131157 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804725, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804725 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.035080801121998406, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.035080801121998406 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.41040462427745666, + "acc_stderr": 0.03750757044895537, + "acc_norm": 0.41040462427745666, + "acc_norm_stderr": 0.03750757044895537 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.025075981767601684, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.025075981767601684 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.55, + "acc_stderr": 0.05000000000000001, + "acc_norm": 0.55, + "acc_norm_stderr": 0.05000000000000001 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4913294797687861, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.4913294797687861, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5214723926380368, + "acc_stderr": 0.03924746876751129, + "acc_norm": 0.5214723926380368, + "acc_norm_stderr": 0.03924746876751129 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4506172839506173, + "acc_stderr": 0.027684721415656203, + "acc_norm": 0.4506172839506173, + "acc_norm_stderr": 0.027684721415656203 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5247706422018349, + "acc_stderr": 0.021410999753635918, + "acc_norm": 0.5247706422018349, + "acc_norm_stderr": 0.021410999753635918 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4523809523809524, + "acc_stderr": 0.044518079590553275, + "acc_norm": 0.4523809523809524, + "acc_norm_stderr": 0.044518079590553275 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.45751633986928103, + "acc_stderr": 0.02852638345214263, + "acc_norm": 0.45751633986928103, + "acc_norm_stderr": 0.02852638345214263 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956911, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956911 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5867768595041323, + "acc_stderr": 0.04495087843548408, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.04495087843548408 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.03977749934622074, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.03977749934622074 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3888888888888889, + "acc_stderr": 0.019722058939618068, + "acc_norm": 0.3888888888888889, + "acc_norm_stderr": 0.019722058939618068 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3546099290780142, + "acc_stderr": 0.02853865002887864, + "acc_norm": 0.3546099290780142, + "acc_norm_stderr": 0.02853865002887864 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.39285714285714285, + "acc_stderr": 0.04635550135609976, + "acc_norm": 0.39285714285714285, + "acc_norm_stderr": 0.04635550135609976 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2849162011173184, + "acc_stderr": 0.015096222302469804, + "acc_norm": 0.2849162011173184, + "acc_norm_stderr": 0.015096222302469804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411945, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411945 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6160337552742616, + "acc_stderr": 0.031658678064106674, + "acc_norm": 0.6160337552742616, + "acc_norm_stderr": 0.031658678064106674 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3474576271186441, + "acc_stderr": 0.012161417729749805, + "acc_norm": 0.3474576271186441, + "acc_norm_stderr": 0.012161417729749805 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4394124847001224, + "mc1_stderr": 0.01737452048251371, + "mc2": 0.6185527129613153, + "mc2_stderr": 0.016093855354993487 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.448642266824085, + "acc_stderr": 0.01709943051472578, + "acc_norm": 0.4604486422668241, + "acc_norm_stderr": 0.017136487626049846 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yunconglong/MoE_13B_DPO", + "model_sha": "d8d6a47f877fee3e638a158c2bd637c0013ed4e4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B/result_2024-07-02 05:47:50.json b/yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B/result_2024-07-02 05:47:50.json new file mode 100644 index 0000000000000000000000000000000000000000..98d092bfa32162d5cfa4119ca6a0ec847a24c00c --- /dev/null +++ b/yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B/result_2024-07-02 05:47:50.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4087030716723549, + "acc_stderr": 0.014365750345427001, + "acc_norm": 0.4658703071672355, + "acc_norm_stderr": 0.0145773113152311 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4048994224258116, + "acc_stderr": 0.004898693652043317, + "acc_norm": 0.5402310296753635, + "acc_norm_stderr": 0.004973602904247791 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468537, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468537 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.028217683556652315, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.028217683556652315 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071722, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071722 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.027778835904935434, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.027778835904935434 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131157, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131157 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804725, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804725 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.02507598176760168, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.02507598176760168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5284403669724771, + "acc_stderr": 0.02140261569734805, + "acc_norm": 0.5284403669724771, + "acc_norm_stderr": 0.02140261569734805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.044359328928514664, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.044359328928514664 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762633, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762633 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.033723432716530624, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.033723432716530624 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2849162011173184, + "acc_stderr": 0.015096222302469804, + "acc_norm": 0.2849162011173184, + "acc_norm_stderr": 0.015096222302469804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411945, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411945 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.03172295004332331, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.03172295004332331 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.012134433741002574, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.012134433741002574 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4394124847001224, + "mc1_stderr": 0.017374520482513714, + "mc2": 0.6167289630163518, + "mc2_stderr": 0.01605808058193163 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4462809917355372, + "acc_stderr": 0.017090852631668336, + "acc_norm": 0.45690672963400236, + "acc_norm_stderr": 0.017126389093086784 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B", + "model_sha": "dd66c98fa56ab95e321e591f123081ab4296a032", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B/result_2024-07-13 05:29:37.json b/yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B/result_2024-07-13 05:29:37.json new file mode 100644 index 0000000000000000000000000000000000000000..98d092bfa32162d5cfa4119ca6a0ec847a24c00c --- /dev/null +++ b/yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B/result_2024-07-13 05:29:37.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4087030716723549, + "acc_stderr": 0.014365750345427001, + "acc_norm": 0.4658703071672355, + "acc_norm_stderr": 0.0145773113152311 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4048994224258116, + "acc_stderr": 0.004898693652043317, + "acc_norm": 0.5402310296753635, + "acc_norm_stderr": 0.004973602904247791 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4678362573099415, + "acc_stderr": 0.03826882417660369, + "acc_norm": 0.4678362573099415, + "acc_norm_stderr": 0.03826882417660369 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.46871008939974457, + "acc_stderr": 0.017844918090468537, + "acc_norm": 0.46871008939974457, + "acc_norm_stderr": 0.017844918090468537 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.35555555555555557, + "acc_stderr": 0.04135176749720386, + "acc_norm": 0.35555555555555557, + "acc_norm_stderr": 0.04135176749720386 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.42127659574468085, + "acc_stderr": 0.03227834510146267, + "acc_norm": 0.42127659574468085, + "acc_norm_stderr": 0.03227834510146267 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.37349397590361444, + "acc_stderr": 0.03765845117168863, + "acc_norm": 0.37349397590361444, + "acc_norm_stderr": 0.03765845117168863 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4437299035369775, + "acc_stderr": 0.028217683556652315, + "acc_norm": 0.4437299035369775, + "acc_norm_stderr": 0.028217683556652315 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4170403587443946, + "acc_stderr": 0.03309266936071722, + "acc_norm": 0.4170403587443946, + "acc_norm_stderr": 0.03309266936071722 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359183, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359183 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808777, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808777 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5378151260504201, + "acc_stderr": 0.032385469487589795, + "acc_norm": 0.5378151260504201, + "acc_norm_stderr": 0.032385469487589795 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.45897435897435895, + "acc_stderr": 0.025265525491284295, + "acc_norm": 0.45897435897435895, + "acc_norm_stderr": 0.025265525491284295 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3645320197044335, + "acc_stderr": 0.0338640574606209, + "acc_norm": 0.3645320197044335, + "acc_norm_stderr": 0.0338640574606209 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.02822949732031722, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.02822949732031722 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.027778835904935434, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.027778835904935434 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.45660377358490567, + "acc_stderr": 0.03065674869673943, + "acc_norm": 0.45660377358490567, + "acc_norm_stderr": 0.03065674869673943 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5181818181818182, + "acc_stderr": 0.04785964010794915, + "acc_norm": 0.5181818181818182, + "acc_norm_stderr": 0.04785964010794915 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34074074074074073, + "acc_stderr": 0.028897748741131157, + "acc_norm": 0.34074074074074073, + "acc_norm_stderr": 0.028897748741131157 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.24503311258278146, + "acc_stderr": 0.03511807571804725, + "acc_norm": 0.24503311258278146, + "acc_norm_stderr": 0.03511807571804725 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.572139303482587, + "acc_stderr": 0.03498541988407795, + "acc_norm": 0.572139303482587, + "acc_norm_stderr": 0.03498541988407795 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3862433862433862, + "acc_stderr": 0.02507598176760168, + "acc_norm": 0.3862433862433862, + "acc_norm_stderr": 0.02507598176760168 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3541666666666667, + "acc_stderr": 0.039994111357535424, + "acc_norm": 0.3541666666666667, + "acc_norm_stderr": 0.039994111357535424 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.54, + "acc_stderr": 0.05009082659620333, + "acc_norm": 0.54, + "acc_norm_stderr": 0.05009082659620333 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.49421965317919075, + "acc_stderr": 0.026917296179149116, + "acc_norm": 0.49421965317919075, + "acc_norm_stderr": 0.026917296179149116 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5337423312883436, + "acc_stderr": 0.039194155450484096, + "acc_norm": 0.5337423312883436, + "acc_norm_stderr": 0.039194155450484096 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44753086419753085, + "acc_stderr": 0.02766713856942271, + "acc_norm": 0.44753086419753085, + "acc_norm_stderr": 0.02766713856942271 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49222797927461137, + "acc_stderr": 0.03608003225569654, + "acc_norm": 0.49222797927461137, + "acc_norm_stderr": 0.03608003225569654 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.044346007015849245, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.044346007015849245 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5284403669724771, + "acc_stderr": 0.02140261569734805, + "acc_norm": 0.5284403669724771, + "acc_norm_stderr": 0.02140261569734805 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4365079365079365, + "acc_stderr": 0.044359328928514664, + "acc_norm": 0.4365079365079365, + "acc_norm_stderr": 0.044359328928514664 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.46078431372549017, + "acc_stderr": 0.028541722692618874, + "acc_norm": 0.46078431372549017, + "acc_norm_stderr": 0.028541722692618874 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.04412015806624504, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.04412015806624504 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.039777499346220734, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.039777499346220734 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.39215686274509803, + "acc_stderr": 0.019751726508762633, + "acc_norm": 0.39215686274509803, + "acc_norm_stderr": 0.019751726508762633 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35106382978723405, + "acc_stderr": 0.028473501272963764, + "acc_norm": 0.35106382978723405, + "acc_norm_stderr": 0.028473501272963764 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4017857142857143, + "acc_stderr": 0.04653333146973646, + "acc_norm": 0.4017857142857143, + "acc_norm_stderr": 0.04653333146973646 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.42592592592592593, + "acc_stderr": 0.033723432716530624, + "acc_norm": 0.42592592592592593, + "acc_norm_stderr": 0.033723432716530624 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2849162011173184, + "acc_stderr": 0.015096222302469804, + "acc_norm": 0.2849162011173184, + "acc_norm_stderr": 0.015096222302469804 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.35661764705882354, + "acc_stderr": 0.029097209568411945, + "acc_norm": 0.35661764705882354, + "acc_norm_stderr": 0.029097209568411945 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.49795918367346936, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.49795918367346936, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6118143459915611, + "acc_stderr": 0.03172295004332331, + "acc_norm": 0.6118143459915611, + "acc_norm_stderr": 0.03172295004332331 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.34419817470664926, + "acc_stderr": 0.012134433741002574, + "acc_norm": 0.34419817470664926, + "acc_norm_stderr": 0.012134433741002574 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4394124847001224, + "mc1_stderr": 0.017374520482513714, + "mc2": 0.6167289630163518, + "mc2_stderr": 0.01605808058193163 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4462809917355372, + "acc_stderr": 0.017090852631668336, + "acc_norm": 0.45690672963400236, + "acc_norm_stderr": 0.017126389093086784 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yunconglong/Truthful_DPO_TomGrc_FusionNet_7Bx2_MoE_13B", + "model_sha": "dd66c98fa56ab95e321e591f123081ab4296a032", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yuntaeyang/KoSOLAR-10.7B-dpo-v1.0/result_2024-01-18 07:10:45.json b/yuntaeyang/KoSOLAR-10.7B-dpo-v1.0/result_2024-01-18 07:10:45.json new file mode 100644 index 0000000000000000000000000000000000000000..a1369eada3d30cf496a6e7f1a237adf615273b4b --- /dev/null +++ b/yuntaeyang/KoSOLAR-10.7B-dpo-v1.0/result_2024-01-18 07:10:45.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.44112627986348124, + "acc_stderr": 0.014509747749064661, + "acc_norm": 0.5025597269624573, + "acc_norm_stderr": 0.014611199329843796 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4287990440151364, + "acc_stderr": 0.0049389301432344575, + "acc_norm": 0.5974905397331209, + "acc_norm_stderr": 0.004894012555642651 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5730994152046783, + "acc_stderr": 0.03793620616529916, + "acc_norm": 0.5730994152046783, + "acc_norm_stderr": 0.03793620616529916 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6310679611650486, + "acc_stderr": 0.0477761518115674, + "acc_norm": 0.6310679611650486, + "acc_norm_stderr": 0.0477761518115674 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6232439335887612, + "acc_stderr": 0.017328292907303054, + "acc_norm": 0.6232439335887612, + "acc_norm_stderr": 0.017328292907303054 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5037037037037037, + "acc_stderr": 0.04319223625811331, + "acc_norm": 0.5037037037037037, + "acc_norm_stderr": 0.04319223625811331 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.46382978723404256, + "acc_stderr": 0.032600385118357715, + "acc_norm": 0.46382978723404256, + "acc_norm_stderr": 0.032600385118357715 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4457831325301205, + "acc_stderr": 0.03869543323472101, + "acc_norm": 0.4457831325301205, + "acc_norm_stderr": 0.03869543323472101 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.594855305466238, + "acc_stderr": 0.02788238379132595, + "acc_norm": 0.594855305466238, + "acc_norm_stderr": 0.02788238379132595 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5560538116591929, + "acc_stderr": 0.03334625674242728, + "acc_norm": 0.5560538116591929, + "acc_norm_stderr": 0.03334625674242728 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.6183206106870229, + "acc_stderr": 0.04260735157644561, + "acc_norm": 0.6183206106870229, + "acc_norm_stderr": 0.04260735157644561 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.696969696969697, + "acc_stderr": 0.03274287914026868, + "acc_norm": 0.696969696969697, + "acc_norm_stderr": 0.03274287914026868 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4413793103448276, + "acc_stderr": 0.04137931034482758, + "acc_norm": 0.4413793103448276, + "acc_norm_stderr": 0.04137931034482758 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.3235294117647059, + "acc_stderr": 0.046550104113196177, + "acc_norm": 0.3235294117647059, + "acc_norm_stderr": 0.046550104113196177 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.592436974789916, + "acc_stderr": 0.03191863374478466, + "acc_norm": 0.592436974789916, + "acc_norm_stderr": 0.03191863374478466 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.5, + "acc_stderr": 0.02535100632816969, + "acc_norm": 0.5, + "acc_norm_stderr": 0.02535100632816969 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.6203703703703703, + "acc_stderr": 0.04691521224077742, + "acc_norm": 0.6203703703703703, + "acc_norm_stderr": 0.04691521224077742 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3842364532019704, + "acc_stderr": 0.0342239856565755, + "acc_norm": 0.3842364532019704, + "acc_norm_stderr": 0.0342239856565755 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5870967741935483, + "acc_stderr": 0.02800913812540038, + "acc_norm": 0.5870967741935483, + "acc_norm_stderr": 0.02800913812540038 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7948717948717948, + "acc_stderr": 0.026453508054040332, + "acc_norm": 0.7948717948717948, + "acc_norm_stderr": 0.026453508054040332 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4981132075471698, + "acc_stderr": 0.030772653642075657, + "acc_norm": 0.4981132075471698, + "acc_norm_stderr": 0.030772653642075657 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33112582781456956, + "acc_stderr": 0.038425817186598696, + "acc_norm": 0.33112582781456956, + "acc_norm_stderr": 0.038425817186598696 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6616915422885572, + "acc_stderr": 0.0334556307033919, + "acc_norm": 0.6616915422885572, + "acc_norm_stderr": 0.0334556307033919 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.38095238095238093, + "acc_stderr": 0.025010749116137602, + "acc_norm": 0.38095238095238093, + "acc_norm_stderr": 0.025010749116137602 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5208333333333334, + "acc_stderr": 0.041775789507399935, + "acc_norm": 0.5208333333333334, + "acc_norm_stderr": 0.041775789507399935 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.4, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.66, + "acc_stderr": 0.04760952285695238, + "acc_norm": 0.66, + "acc_norm_stderr": 0.04760952285695238 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382868, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382868 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5153374233128835, + "acc_stderr": 0.039265223787088424, + "acc_norm": 0.5153374233128835, + "acc_norm_stderr": 0.039265223787088424 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5740740740740741, + "acc_stderr": 0.027513747284379424, + "acc_norm": 0.5740740740740741, + "acc_norm_stderr": 0.027513747284379424 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6787564766839378, + "acc_stderr": 0.033699508685490674, + "acc_norm": 0.6787564766839378, + "acc_norm_stderr": 0.033699508685490674 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.39473684210526316, + "acc_stderr": 0.04598188057816542, + "acc_norm": 0.39473684210526316, + "acc_norm_stderr": 0.04598188057816542 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.655045871559633, + "acc_stderr": 0.020380605405066962, + "acc_norm": 0.655045871559633, + "acc_norm_stderr": 0.020380605405066962 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.04263906892795133, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.04263906892795133 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5261437908496732, + "acc_stderr": 0.028590752958852387, + "acc_norm": 0.5261437908496732, + "acc_norm_stderr": 0.028590752958852387 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.55, + "acc_stderr": 0.049999999999999996, + "acc_norm": 0.55, + "acc_norm_stderr": 0.049999999999999996 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.04063302731486671, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.04063302731486671 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.020192808271433795, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.020192808271433795 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3617021276595745, + "acc_stderr": 0.028663820147199495, + "acc_norm": 0.3617021276595745, + "acc_norm_stderr": 0.028663820147199495 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.41964285714285715, + "acc_stderr": 0.04684099321077106, + "acc_norm": 0.41964285714285715, + "acc_norm_stderr": 0.04684099321077106 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.03376922151252335, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.03376922151252335 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.15418994413407822, + "acc_stderr": 0.012078018437469352, + "acc_norm": 0.15418994413407822, + "acc_norm_stderr": 0.012078018437469352 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.62, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.62, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4411764705882353, + "acc_stderr": 0.030161911930767102, + "acc_norm": 0.4411764705882353, + "acc_norm_stderr": 0.030161911930767102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7046413502109705, + "acc_stderr": 0.029696338713422876, + "acc_norm": 0.7046413502109705, + "acc_norm_stderr": 0.029696338713422876 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3728813559322034, + "acc_stderr": 0.012350630058333362, + "acc_norm": 0.3728813559322034, + "acc_norm_stderr": 0.012350630058333362 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6274509803921569, + "acc_stderr": 0.033933885849584046, + "acc_norm": 0.6274509803921569, + "acc_norm_stderr": 0.033933885849584046 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6181818181818182, + "acc_stderr": 0.03793713171165633, + "acc_norm": 0.6181818181818182, + "acc_norm_stderr": 0.03793713171165633 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3219094247246022, + "mc1_stderr": 0.016355567611960373, + "mc2": 0.49576378600815624, + "mc2_stderr": 0.016259023004035866 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.43919716646989376, + "acc_stderr": 0.0170627757447807, + "acc_norm": 0.4675324675324675, + "acc_norm_stderr": 0.01715407371668286 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yuntaeyang/KoSOLAR-10.7B-dpo-v1.0", + "model_sha": "557eeaaaca1394fd4e560d741ff59869cfac6cc4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yuntaeyang/Llama-2-ko-instruct-13B-kor-orca-lora/result_2023-10-31 05:26:07.json b/yuntaeyang/Llama-2-ko-instruct-13B-kor-orca-lora/result_2023-10-31 05:26:07.json new file mode 100644 index 0000000000000000000000000000000000000000..08b940a1e390d9534fd23dd82557f58865f3076b --- /dev/null +++ b/yuntaeyang/Llama-2-ko-instruct-13B-kor-orca-lora/result_2023-10-31 05:26:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.4121160409556314, + "acc_stderr": 0.014383915302225396, + "acc_norm": 0.4709897610921502, + "acc_norm_stderr": 0.014586776355294324 + }, + "harness|ko_hellaswag|10": { + "acc": 0.424317864967138, + "acc_stderr": 0.004932289405608944, + "acc_norm": 0.5681139215295757, + "acc_norm_stderr": 0.0049432643398686525 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.03811079669833531, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.03811079669833531 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5683269476372924, + "acc_stderr": 0.017712228939299798, + "acc_norm": 0.5683269476372924, + "acc_norm_stderr": 0.017712228939299798 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909284, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909284 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.39574468085106385, + "acc_stderr": 0.03196758697835362, + "acc_norm": 0.39574468085106385, + "acc_norm_stderr": 0.03196758697835362 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.42771084337349397, + "acc_stderr": 0.038515976837185335, + "acc_norm": 0.42771084337349397, + "acc_norm_stderr": 0.038515976837185335 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5369774919614148, + "acc_stderr": 0.028320325830105915, + "acc_norm": 0.5369774919614148, + "acc_norm_stderr": 0.028320325830105915 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5336322869955157, + "acc_stderr": 0.033481800170603065, + "acc_norm": 0.5336322869955157, + "acc_norm_stderr": 0.033481800170603065 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.48854961832061067, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.48854961832061067, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.43, + "acc_stderr": 0.04975698519562429, + "acc_norm": 0.43, + "acc_norm_stderr": 0.04975698519562429 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413008, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413008 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.041307408795554966, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.041307408795554966 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.040925639582376556, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.040925639582376556 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.02533466708095495, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.02533466708095495 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.02837228779796295, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.02837228779796295 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6538461538461539, + "acc_stderr": 0.031166957367235903, + "acc_norm": 0.6538461538461539, + "acc_norm_stderr": 0.031166957367235903 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4641509433962264, + "acc_stderr": 0.030693675018458006, + "acc_norm": 0.4641509433962264, + "acc_norm_stderr": 0.030693675018458006 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5636363636363636, + "acc_stderr": 0.04750185058907296, + "acc_norm": 0.5636363636363636, + "acc_norm_stderr": 0.04750185058907296 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26666666666666666, + "acc_stderr": 0.026962424325073835, + "acc_norm": 0.26666666666666666, + "acc_norm_stderr": 0.026962424325073835 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5870646766169154, + "acc_stderr": 0.03481520803367348, + "acc_norm": 0.5870646766169154, + "acc_norm_stderr": 0.03481520803367348 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.02397386199899207, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.02397386199899207 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4375, + "acc_stderr": 0.04148415739394154, + "acc_norm": 0.4375, + "acc_norm_stderr": 0.04148415739394154 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5173410404624278, + "acc_stderr": 0.026902900458666647, + "acc_norm": 0.5173410404624278, + "acc_norm_stderr": 0.026902900458666647 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.588957055214724, + "acc_stderr": 0.038656978537853624, + "acc_norm": 0.588957055214724, + "acc_norm_stderr": 0.038656978537853624 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.49691358024691357, + "acc_stderr": 0.02782021415859437, + "acc_norm": 0.49691358024691357, + "acc_norm_stderr": 0.02782021415859437 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5958549222797928, + "acc_stderr": 0.0354150857888402, + "acc_norm": 0.5958549222797928, + "acc_norm_stderr": 0.0354150857888402 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2543859649122807, + "acc_stderr": 0.040969851398436716, + "acc_norm": 0.2543859649122807, + "acc_norm_stderr": 0.040969851398436716 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6256880733944954, + "acc_stderr": 0.020748959408988334, + "acc_norm": 0.6256880733944954, + "acc_norm_stderr": 0.020748959408988334 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.39869281045751637, + "acc_stderr": 0.02803609227389178, + "acc_norm": 0.39869281045751637, + "acc_norm_stderr": 0.02803609227389178 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6776859504132231, + "acc_stderr": 0.04266416363352168, + "acc_norm": 0.6776859504132231, + "acc_norm_stderr": 0.04266416363352168 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40131578947368424, + "acc_stderr": 0.03988903703336284, + "acc_norm": 0.40131578947368424, + "acc_norm_stderr": 0.03988903703336284 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.40522875816993464, + "acc_stderr": 0.01986115519382917, + "acc_norm": 0.40522875816993464, + "acc_norm_stderr": 0.01986115519382917 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3475177304964539, + "acc_stderr": 0.02840662780959095, + "acc_norm": 0.3475177304964539, + "acc_norm_stderr": 0.02840662780959095 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.0432704093257873, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.0432704093257873 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.39351851851851855, + "acc_stderr": 0.03331747876370312, + "acc_norm": 0.39351851851851855, + "acc_norm_stderr": 0.03331747876370312 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2435754189944134, + "acc_stderr": 0.014355911964767857, + "acc_norm": 0.2435754189944134, + "acc_norm_stderr": 0.014355911964767857 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.40808823529411764, + "acc_stderr": 0.02985526139348393, + "acc_norm": 0.40808823529411764, + "acc_norm_stderr": 0.02985526139348393 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5020408163265306, + "acc_stderr": 0.0320089533497105, + "acc_norm": 0.5020408163265306, + "acc_norm_stderr": 0.0320089533497105 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6371308016877637, + "acc_stderr": 0.03129920825530213, + "acc_norm": 0.6371308016877637, + "acc_norm_stderr": 0.03129920825530213 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.38005215123859193, + "acc_stderr": 0.012397328205137805, + "acc_norm": 0.38005215123859193, + "acc_norm_stderr": 0.012397328205137805 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5245098039215687, + "acc_stderr": 0.035050931943487976, + "acc_norm": 0.5245098039215687, + "acc_norm_stderr": 0.035050931943487976 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271846, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271846 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27050183598531213, + "mc1_stderr": 0.015550778332842881, + "mc2": 0.4266761138294313, + "mc2_stderr": 0.014862773603692122 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45808736717827625, + "acc_stderr": 0.017129852117911147, + "acc_norm": 0.5360094451003542, + "acc_norm_stderr": 0.017145715365486664 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yuntaeyang/Llama-2-ko-instruct-13B-kor-orca-lora", + "model_sha": "757d3a2c1ff1aa98b46727cf28922307e8a212ba", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yuntaeyang/Orca-2-7b-lora-kor/result_2023-12-10 05:02:57.json b/yuntaeyang/Orca-2-7b-lora-kor/result_2023-12-10 05:02:57.json new file mode 100644 index 0000000000000000000000000000000000000000..c0ac7ebb6003c49568eff5c314f3b3afd7b80543 --- /dev/null +++ b/yuntaeyang/Orca-2-7b-lora-kor/result_2023-12-10 05:02:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2935153583617747, + "acc_stderr": 0.013307250444941124, + "acc_norm": 0.3447098976109215, + "acc_norm_stderr": 0.01388881628678211 + }, + "harness|ko_hellaswag|10": { + "acc": 0.33031268671579367, + "acc_stderr": 0.0046936443572020495, + "acc_norm": 0.4053973312089225, + "acc_norm_stderr": 0.004899653704032836 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5242718446601942, + "acc_stderr": 0.049449010929737795, + "acc_norm": 0.5242718446601942, + "acc_norm_stderr": 0.049449010929737795 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.40485312899106, + "acc_stderr": 0.017553246467720253, + "acc_norm": 0.40485312899106, + "acc_norm_stderr": 0.017553246467720253 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.3111111111111111, + "acc_stderr": 0.03999262876617723, + "acc_norm": 0.3111111111111111, + "acc_norm_stderr": 0.03999262876617723 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.33617021276595743, + "acc_stderr": 0.030881618520676942, + "acc_norm": 0.33617021276595743, + "acc_norm_stderr": 0.030881618520676942 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611549, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611549 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.38263665594855306, + "acc_stderr": 0.027604689028581986, + "acc_norm": 0.38263665594855306, + "acc_norm_stderr": 0.027604689028581986 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.40358744394618834, + "acc_stderr": 0.03292802819330313, + "acc_norm": 0.40358744394618834, + "acc_norm_stderr": 0.03292802819330313 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.45038167938931295, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.45038167938931295, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.45454545454545453, + "acc_stderr": 0.035476014940069356, + "acc_norm": 0.45454545454545453, + "acc_norm_stderr": 0.035476014940069356 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.43448275862068964, + "acc_stderr": 0.04130740879555497, + "acc_norm": 0.43448275862068964, + "acc_norm_stderr": 0.04130740879555497 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.039505818611799616, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.039505818611799616 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.3949579831932773, + "acc_stderr": 0.03175367846096625, + "acc_norm": 0.3949579831932773, + "acc_norm_stderr": 0.03175367846096625 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.35384615384615387, + "acc_stderr": 0.024243783994062185, + "acc_norm": 0.35384615384615387, + "acc_norm_stderr": 0.024243783994062185 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456344, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456344 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.3891625615763547, + "acc_stderr": 0.034304624161038716, + "acc_norm": 0.3891625615763547, + "acc_norm_stderr": 0.034304624161038716 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.36774193548387096, + "acc_stderr": 0.027430866579973474, + "acc_norm": 0.36774193548387096, + "acc_norm_stderr": 0.027430866579973474 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.5982905982905983, + "acc_stderr": 0.03211693751051621, + "acc_norm": 0.5982905982905983, + "acc_norm_stderr": 0.03211693751051621 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.3849056603773585, + "acc_stderr": 0.029946498567699948, + "acc_norm": 0.3849056603773585, + "acc_norm_stderr": 0.029946498567699948 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.33636363636363636, + "acc_stderr": 0.04525393596302506, + "acc_norm": 0.33636363636363636, + "acc_norm_stderr": 0.04525393596302506 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31788079470198677, + "acc_stderr": 0.03802039760107903, + "acc_norm": 0.31788079470198677, + "acc_norm_stderr": 0.03802039760107903 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.4975124378109453, + "acc_stderr": 0.03535490150137289, + "acc_norm": 0.4975124378109453, + "acc_norm_stderr": 0.03535490150137289 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3352601156069364, + "acc_stderr": 0.03599586301247078, + "acc_norm": 0.3352601156069364, + "acc_norm_stderr": 0.03599586301247078 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.30687830687830686, + "acc_stderr": 0.023752928712112147, + "acc_norm": 0.30687830687830686, + "acc_norm_stderr": 0.023752928712112147 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2569444444444444, + "acc_stderr": 0.03653946969442099, + "acc_norm": 0.2569444444444444, + "acc_norm_stderr": 0.03653946969442099 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621505, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621505 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.53, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.53, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.026720034380514998, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.026720034380514998 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.3496932515337423, + "acc_stderr": 0.037466683254700206, + "acc_norm": 0.3496932515337423, + "acc_norm_stderr": 0.037466683254700206 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.41975308641975306, + "acc_stderr": 0.027460099557005135, + "acc_norm": 0.41975308641975306, + "acc_norm_stderr": 0.027460099557005135 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.41450777202072536, + "acc_stderr": 0.03555300319557672, + "acc_norm": 0.41450777202072536, + "acc_norm_stderr": 0.03555300319557672 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.3798165137614679, + "acc_stderr": 0.020808825617866244, + "acc_norm": 0.3798165137614679, + "acc_norm_stderr": 0.020808825617866244 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.35714285714285715, + "acc_stderr": 0.04285714285714281, + "acc_norm": 0.35714285714285715, + "acc_norm_stderr": 0.04285714285714281 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4150326797385621, + "acc_stderr": 0.0282135041778241, + "acc_norm": 0.4150326797385621, + "acc_norm_stderr": 0.0282135041778241 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.5785123966942148, + "acc_stderr": 0.04507732278775089, + "acc_norm": 0.5785123966942148, + "acc_norm_stderr": 0.04507732278775089 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.3881578947368421, + "acc_stderr": 0.03965842097512744, + "acc_norm": 0.3881578947368421, + "acc_norm_stderr": 0.03965842097512744 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3022875816993464, + "acc_stderr": 0.018579232711113877, + "acc_norm": 0.3022875816993464, + "acc_norm_stderr": 0.018579232711113877 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291517, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291517 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2916666666666667, + "acc_stderr": 0.030998666304560524, + "acc_norm": 0.2916666666666667, + "acc_norm_stderr": 0.030998666304560524 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.26256983240223464, + "acc_stderr": 0.014716824273017754, + "acc_norm": 0.26256983240223464, + "acc_norm_stderr": 0.014716824273017754 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.4, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.4, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.43, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.43, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.027678468642144682, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.027678468642144682 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4775510204081633, + "acc_stderr": 0.031976941187136725, + "acc_norm": 0.4775510204081633, + "acc_norm_stderr": 0.031976941187136725 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5147679324894515, + "acc_stderr": 0.032533028078777386, + "acc_norm": 0.5147679324894515, + "acc_norm_stderr": 0.032533028078777386 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.32333767926988266, + "acc_stderr": 0.011946565758447202, + "acc_norm": 0.32333767926988266, + "acc_norm_stderr": 0.011946565758447202 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.47549019607843135, + "acc_stderr": 0.03505093194348798, + "acc_norm": 0.47549019607843135, + "acc_norm_stderr": 0.03505093194348798 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.44242424242424244, + "acc_stderr": 0.03878372113711274, + "acc_norm": 0.44242424242424244, + "acc_norm_stderr": 0.03878372113711274 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326912, + "mc2": 0.4497834339134106, + "mc2_stderr": 0.01565169575316926 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.3482880755608028, + "acc_stderr": 0.016379926739148044, + "acc_norm": 0.39669421487603307, + "acc_norm_stderr": 0.016819438642971408 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yuntaeyang/Orca-2-7b-lora-kor", + "model_sha": "2ab434d0f6cd093918fdab77ee7c24c2b40adffe", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yuntaeyang/SOLAR-10.7B-Instructlora_sftt-v1.0/result_2024-01-13 09:07:33.json b/yuntaeyang/SOLAR-10.7B-Instructlora_sftt-v1.0/result_2024-01-13 09:07:33.json new file mode 100644 index 0000000000000000000000000000000000000000..0c8a593659a6ba3a97b98a25d3470ae9b0d8ab04 --- /dev/null +++ b/yuntaeyang/SOLAR-10.7B-Instructlora_sftt-v1.0/result_2024-01-13 09:07:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.35494880546075086, + "acc_stderr": 0.013983036904094087, + "acc_norm": 0.42150170648464164, + "acc_norm_stderr": 0.01443019706932602 + }, + "harness|ko_hellaswag|10": { + "acc": 0.3693487353116909, + "acc_stderr": 0.0048164212086540875, + "acc_norm": 0.47410874327823144, + "acc_norm_stderr": 0.004983087049281746 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5789473684210527, + "acc_stderr": 0.03786720706234213, + "acc_norm": 0.5789473684210527, + "acc_norm_stderr": 0.03786720706234213 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6504854368932039, + "acc_stderr": 0.047211885060971716, + "acc_norm": 0.6504854368932039, + "acc_norm_stderr": 0.047211885060971716 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.6091954022988506, + "acc_stderr": 0.01744836606706253, + "acc_norm": 0.6091954022988506, + "acc_norm_stderr": 0.01744836606706253 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4, + "acc_stderr": 0.04232073695151589, + "acc_norm": 0.4, + "acc_norm_stderr": 0.04232073695151589 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.48936170212765956, + "acc_stderr": 0.03267862331014063, + "acc_norm": 0.48936170212765956, + "acc_norm_stderr": 0.03267862331014063 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.5060240963855421, + "acc_stderr": 0.03892212195333045, + "acc_norm": 0.5060240963855421, + "acc_norm_stderr": 0.03892212195333045 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5562700964630225, + "acc_stderr": 0.028217683556652308, + "acc_norm": 0.5562700964630225, + "acc_norm_stderr": 0.028217683556652308 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.57847533632287, + "acc_stderr": 0.033141902221106564, + "acc_norm": 0.57847533632287, + "acc_norm_stderr": 0.033141902221106564 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5725190839694656, + "acc_stderr": 0.04338920305792401, + "acc_norm": 0.5725190839694656, + "acc_norm_stderr": 0.04338920305792401 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.05021167315686779, + "acc_norm": 0.48, + "acc_norm_stderr": 0.05021167315686779 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6919191919191919, + "acc_stderr": 0.032894773300986155, + "acc_norm": 0.6919191919191919, + "acc_norm_stderr": 0.032894773300986155 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.45517241379310347, + "acc_stderr": 0.04149886942192117, + "acc_norm": 0.45517241379310347, + "acc_norm_stderr": 0.04149886942192117 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5840336134453782, + "acc_stderr": 0.0320165010073961, + "acc_norm": 0.5840336134453782, + "acc_norm_stderr": 0.0320165010073961 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.48205128205128206, + "acc_stderr": 0.025334667080954963, + "acc_norm": 0.48205128205128206, + "acc_norm_stderr": 0.025334667080954963 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.04605661864718381, + "acc_norm": 0.3, + "acc_norm_stderr": 0.04605661864718381 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.04750077341199984, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.04750077341199984 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.0345245390382204, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.0345245390382204 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5225806451612903, + "acc_stderr": 0.02841498501970786, + "acc_norm": 0.5225806451612903, + "acc_norm_stderr": 0.02841498501970786 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7649572649572649, + "acc_stderr": 0.02777883590493543, + "acc_norm": 0.7649572649572649, + "acc_norm_stderr": 0.02777883590493543 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.028742040903948485, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.028742040903948485 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.31125827814569534, + "acc_stderr": 0.03780445850526732, + "acc_norm": 0.31125827814569534, + "acc_norm_stderr": 0.03780445850526732 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5970149253731343, + "acc_stderr": 0.03468343295111126, + "acc_norm": 0.5970149253731343, + "acc_norm_stderr": 0.03468343295111126 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.02475747390275205, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.02475747390275205 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.5069444444444444, + "acc_stderr": 0.04180806750294938, + "acc_norm": 0.5069444444444444, + "acc_norm_stderr": 0.04180806750294938 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.72, + "acc_stderr": 0.04512608598542127, + "acc_norm": 0.72, + "acc_norm_stderr": 0.04512608598542127 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5549132947976878, + "acc_stderr": 0.02675625512966377, + "acc_norm": 0.5549132947976878, + "acc_norm_stderr": 0.02675625512966377 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.03919415545048409, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.03919415545048409 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5925925925925926, + "acc_stderr": 0.027339546640662734, + "acc_norm": 0.5925925925925926, + "acc_norm_stderr": 0.027339546640662734 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.04725815626252606, + "acc_norm": 0.33, + "acc_norm_stderr": 0.04725815626252606 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.7098445595854922, + "acc_stderr": 0.03275264467791515, + "acc_norm": 0.7098445595854922, + "acc_norm_stderr": 0.03275264467791515 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.41228070175438597, + "acc_stderr": 0.04630653203366596, + "acc_norm": 0.41228070175438597, + "acc_norm_stderr": 0.04630653203366596 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6275229357798165, + "acc_stderr": 0.020728368457638497, + "acc_norm": 0.6275229357798165, + "acc_norm_stderr": 0.020728368457638497 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.743801652892562, + "acc_stderr": 0.0398497965330287, + "acc_norm": 0.743801652892562, + "acc_norm_stderr": 0.0398497965330287 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5723684210526315, + "acc_stderr": 0.04026097083296563, + "acc_norm": 0.5723684210526315, + "acc_norm_stderr": 0.04026097083296563 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.477124183006536, + "acc_stderr": 0.02020665318788479, + "acc_norm": 0.477124183006536, + "acc_norm_stderr": 0.02020665318788479 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.37943262411347517, + "acc_stderr": 0.028947338851614105, + "acc_norm": 0.37943262411347517, + "acc_norm_stderr": 0.028947338851614105 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.375, + "acc_stderr": 0.04595091388086298, + "acc_norm": 0.375, + "acc_norm_stderr": 0.04595091388086298 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.46296296296296297, + "acc_stderr": 0.03400603625538272, + "acc_norm": 0.46296296296296297, + "acc_norm_stderr": 0.03400603625538272 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.21564245810055865, + "acc_stderr": 0.013754835975482346, + "acc_norm": 0.21564245810055865, + "acc_norm_stderr": 0.013754835975482346 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.64, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.64, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121596, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5510204081632653, + "acc_stderr": 0.03184213866687579, + "acc_norm": 0.5510204081632653, + "acc_norm_stderr": 0.03184213866687579 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.7257383966244726, + "acc_stderr": 0.029041333510598025, + "acc_norm": 0.7257383966244726, + "acc_norm_stderr": 0.029041333510598025 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.36114732724902215, + "acc_stderr": 0.01226793547751904, + "acc_norm": 0.36114732724902215, + "acc_norm_stderr": 0.01226793547751904 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.6323529411764706, + "acc_stderr": 0.03384132045674118, + "acc_norm": 0.6323529411764706, + "acc_norm_stderr": 0.03384132045674118 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.6060606060606061, + "acc_stderr": 0.0381549430868893, + "acc_norm": 0.6060606060606061, + "acc_norm_stderr": 0.0381549430868893 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3329253365973072, + "mc1_stderr": 0.016497402382012055, + "mc2": 0.5104782245492422, + "mc2_stderr": 0.01606754411299208 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45336481700118064, + "acc_stderr": 0.01711541822522687, + "acc_norm": 0.4887839433293979, + "acc_norm_stderr": 0.017186028469489283 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yuntaeyang/SOLAR-10.7B-Instructlora_sftt-v1.0", + "model_sha": "2856d5c868b35f1b5efb8df787d244a06dc9fded", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yuntaeyang/Yi-6B-ko-dpo/result_2024-01-03 05:07:07.json b/yuntaeyang/Yi-6B-ko-dpo/result_2024-01-03 05:07:07.json new file mode 100644 index 0000000000000000000000000000000000000000..37ce385fd14c1f8f479224f64a100afa272ce624 --- /dev/null +++ b/yuntaeyang/Yi-6B-ko-dpo/result_2024-01-03 05:07:07.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3626279863481229, + "acc_stderr": 0.014049106564955002, + "acc_norm": 0.4249146757679181, + "acc_norm_stderr": 0.014445698968520769 + }, + "harness|ko_hellaswag|10": { + "acc": 0.40131447918741286, + "acc_stderr": 0.0048916267180972774, + "acc_norm": 0.5344552877912766, + "acc_norm_stderr": 0.004977919906875375 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602967, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602967 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5339805825242718, + "acc_stderr": 0.0493929144727348, + "acc_norm": 0.5339805825242718, + "acc_norm_stderr": 0.0493929144727348 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5466155810983397, + "acc_stderr": 0.017802087135850304, + "acc_norm": 0.5466155810983397, + "acc_norm_stderr": 0.017802087135850304 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464243, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464243 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3829787234042553, + "acc_stderr": 0.03177821250236922, + "acc_norm": 0.3829787234042553, + "acc_norm_stderr": 0.03177821250236922 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.037998574544796354, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.037998574544796354 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5112540192926045, + "acc_stderr": 0.028390897396863533, + "acc_norm": 0.5112540192926045, + "acc_norm_stderr": 0.028390897396863533 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4798206278026906, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.4798206278026906, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5190839694656488, + "acc_stderr": 0.043820947055509867, + "acc_norm": 0.5190839694656488, + "acc_norm_stderr": 0.043820947055509867 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.04943110704237103, + "acc_norm": 0.41, + "acc_norm_stderr": 0.04943110704237103 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5909090909090909, + "acc_stderr": 0.03502975799413007, + "acc_norm": 0.5909090909090909, + "acc_norm_stderr": 0.03502975799413007 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.032145368597886394, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.032145368597886394 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47692307692307695, + "acc_stderr": 0.025323990861736118, + "acc_norm": 0.47692307692307695, + "acc_norm_stderr": 0.025323990861736118 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5092592592592593, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.5092592592592593, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.45320197044334976, + "acc_stderr": 0.03502544650845872, + "acc_norm": 0.45320197044334976, + "acc_norm_stderr": 0.03502544650845872 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962956, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962956 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6794871794871795, + "acc_stderr": 0.030572811310299604, + "acc_norm": 0.6794871794871795, + "acc_norm_stderr": 0.030572811310299604 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47547169811320755, + "acc_stderr": 0.030735822206205608, + "acc_norm": 0.47547169811320755, + "acc_norm_stderr": 0.030735822206205608 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969115, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969115 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.304635761589404, + "acc_stderr": 0.03757949922943343, + "acc_norm": 0.304635761589404, + "acc_norm_stderr": 0.03757949922943343 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673281, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673281 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4046242774566474, + "acc_stderr": 0.03742461193887249, + "acc_norm": 0.4046242774566474, + "acc_norm_stderr": 0.03742461193887249 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.36243386243386244, + "acc_stderr": 0.024757473902752045, + "acc_norm": 0.36243386243386244, + "acc_norm_stderr": 0.024757473902752045 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.28, + "acc_stderr": 0.04512608598542128, + "acc_norm": 0.28, + "acc_norm_stderr": 0.04512608598542128 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.52, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.52, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5057803468208093, + "acc_stderr": 0.02691729617914911, + "acc_norm": 0.5057803468208093, + "acc_norm_stderr": 0.02691729617914911 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4662576687116564, + "acc_stderr": 0.03919415545048409, + "acc_norm": 0.4662576687116564, + "acc_norm_stderr": 0.03919415545048409 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777797, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777797 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.33, + "acc_stderr": 0.047258156262526045, + "acc_norm": 0.33, + "acc_norm_stderr": 0.047258156262526045 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5595854922279793, + "acc_stderr": 0.035827245300360945, + "acc_norm": 0.5595854922279793, + "acc_norm_stderr": 0.035827245300360945 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2631578947368421, + "acc_stderr": 0.04142439719489361, + "acc_norm": 0.2631578947368421, + "acc_norm_stderr": 0.04142439719489361 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5853211009174312, + "acc_stderr": 0.021122903208602592, + "acc_norm": 0.5853211009174312, + "acc_norm_stderr": 0.021122903208602592 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.028146405993096358, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.028146405993096358 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6198347107438017, + "acc_stderr": 0.04431324501968431, + "acc_norm": 0.6198347107438017, + "acc_norm_stderr": 0.04431324501968431 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4199346405228758, + "acc_stderr": 0.019966811178256483, + "acc_norm": 0.4199346405228758, + "acc_norm_stderr": 0.019966811178256483 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.36524822695035464, + "acc_stderr": 0.028723863853281278, + "acc_norm": 0.36524822695035464, + "acc_norm_stderr": 0.028723863853281278 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.044328040552915185, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.044328040552915185 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3194444444444444, + "acc_stderr": 0.03179876342176852, + "acc_norm": 0.3194444444444444, + "acc_norm_stderr": 0.03179876342176852 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2335195530726257, + "acc_stderr": 0.014149575348976259, + "acc_norm": 0.2335195530726257, + "acc_norm_stderr": 0.014149575348976259 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.5, + "acc_stderr": 0.050251890762960605, + "acc_norm": 0.5, + "acc_norm_stderr": 0.050251890762960605 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4522058823529412, + "acc_stderr": 0.03023375855159646, + "acc_norm": 0.4522058823529412, + "acc_norm_stderr": 0.03023375855159646 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.42448979591836733, + "acc_stderr": 0.031642094879429414, + "acc_norm": 0.42448979591836733, + "acc_norm_stderr": 0.031642094879429414 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851859, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851859 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33572359843546284, + "acc_stderr": 0.012061304157664621, + "acc_norm": 0.33572359843546284, + "acc_norm_stderr": 0.012061304157664621 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5294117647058824, + "acc_stderr": 0.03503235296367993, + "acc_norm": 0.5294117647058824, + "acc_norm_stderr": 0.03503235296367993 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5515151515151515, + "acc_stderr": 0.038835659779569286, + "acc_norm": 0.5515151515151515, + "acc_norm_stderr": 0.038835659779569286 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.27539779681762544, + "mc1_stderr": 0.015638135667775527, + "mc2": 0.41652946342026925, + "mc2_stderr": 0.015241115429020723 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5076741440377804, + "acc_stderr": 0.017188329219654276, + "acc_norm": 0.5631641086186541, + "acc_norm_stderr": 0.017052633559856076 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yuntaeyang/Yi-6B-ko-dpo", + "model_sha": "5fbca212dbe901ab10fd6ee11cab176bc9fbaba5", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yuntaeyang/Yi-Ko-6B-lora/result_2023-12-27 10:01:00.json b/yuntaeyang/Yi-Ko-6B-lora/result_2023-12-27 10:01:00.json new file mode 100644 index 0000000000000000000000000000000000000000..ee38c1a2fb46c6e72d00aed7d3af3152170dc588 --- /dev/null +++ b/yuntaeyang/Yi-Ko-6B-lora/result_2023-12-27 10:01:00.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3720136518771331, + "acc_stderr": 0.014124597881844465, + "acc_norm": 0.4121160409556314, + "acc_norm_stderr": 0.014383915302225402 + }, + "harness|ko_hellaswag|10": { + "acc": 0.39294961163114916, + "acc_stderr": 0.004874076250521576, + "acc_norm": 0.5160326628161721, + "acc_norm_stderr": 0.004987215542259674 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.038342347441649924, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.038342347441649924 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.04882840548212238, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.04882840548212238 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5555555555555556, + "acc_stderr": 0.017769250583533253, + "acc_norm": 0.5555555555555556, + "acc_norm_stderr": 0.017769250583533253 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.043163785995113245, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.043163785995113245 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.3702127659574468, + "acc_stderr": 0.03156564682236784, + "acc_norm": 0.3702127659574468, + "acc_norm_stderr": 0.03156564682236784 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.038099730845402184, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.038099730845402184 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5241157556270096, + "acc_stderr": 0.028365041542564584, + "acc_norm": 0.5241157556270096, + "acc_norm_stderr": 0.028365041542564584 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.45739910313901344, + "acc_stderr": 0.033435777055830646, + "acc_norm": 0.45739910313901344, + "acc_norm_stderr": 0.033435777055830646 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.549618320610687, + "acc_stderr": 0.04363643698524779, + "acc_norm": 0.549618320610687, + "acc_norm_stderr": 0.04363643698524779 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03427308652999935, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03427308652999935 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.496551724137931, + "acc_stderr": 0.04166567577101579, + "acc_norm": 0.496551724137931, + "acc_norm_stderr": 0.04166567577101579 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.48739495798319327, + "acc_stderr": 0.03246816765752174, + "acc_norm": 0.48739495798319327, + "acc_norm_stderr": 0.03246816765752174 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.49230769230769234, + "acc_stderr": 0.025348006031534795, + "acc_norm": 0.49230769230769234, + "acc_norm_stderr": 0.025348006031534795 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.6, + "acc_stderr": 0.049236596391733084, + "acc_norm": 0.6, + "acc_norm_stderr": 0.049236596391733084 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4039408866995074, + "acc_stderr": 0.034524539038220385, + "acc_norm": 0.4039408866995074, + "acc_norm_stderr": 0.034524539038220385 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5064516129032258, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.5064516129032258, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7435897435897436, + "acc_stderr": 0.028605953702004243, + "acc_norm": 0.7435897435897436, + "acc_norm_stderr": 0.028605953702004243 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.47924528301886793, + "acc_stderr": 0.030746349975723463, + "acc_norm": 0.47924528301886793, + "acc_norm_stderr": 0.030746349975723463 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5, + "acc_stderr": 0.04789131426105757, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04789131426105757 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.2851851851851852, + "acc_stderr": 0.027528599210340492, + "acc_norm": 0.2851851851851852, + "acc_norm_stderr": 0.027528599210340492 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257375, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257375 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5621890547263682, + "acc_stderr": 0.0350808011219984, + "acc_norm": 0.5621890547263682, + "acc_norm_stderr": 0.0350808011219984 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3930635838150289, + "acc_stderr": 0.03724249595817731, + "acc_norm": 0.3930635838150289, + "acc_norm_stderr": 0.03724249595817731 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3412698412698413, + "acc_stderr": 0.02441923496681907, + "acc_norm": 0.3412698412698413, + "acc_norm_stderr": 0.02441923496681907 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111503, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111503 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.25, + "acc_stderr": 0.04351941398892446, + "acc_norm": 0.25, + "acc_norm_stderr": 0.04351941398892446 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.476878612716763, + "acc_stderr": 0.026890297881303128, + "acc_norm": 0.476878612716763, + "acc_norm_stderr": 0.026890297881303128 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4723926380368098, + "acc_stderr": 0.039223782906109894, + "acc_norm": 0.4723926380368098, + "acc_norm_stderr": 0.039223782906109894 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4876543209876543, + "acc_stderr": 0.027812262269327242, + "acc_norm": 0.4876543209876543, + "acc_norm_stderr": 0.027812262269327242 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6010362694300518, + "acc_stderr": 0.03533999094065696, + "acc_norm": 0.6010362694300518, + "acc_norm_stderr": 0.03533999094065696 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3684210526315789, + "acc_stderr": 0.04537815354939391, + "acc_norm": 0.3684210526315789, + "acc_norm_stderr": 0.04537815354939391 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.636697247706422, + "acc_stderr": 0.020620603919625804, + "acc_norm": 0.636697247706422, + "acc_norm_stderr": 0.020620603919625804 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.30158730158730157, + "acc_stderr": 0.041049472699033945, + "acc_norm": 0.30158730158730157, + "acc_norm_stderr": 0.041049472699033945 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.47058823529411764, + "acc_stderr": 0.02858034106513829, + "acc_norm": 0.47058823529411764, + "acc_norm_stderr": 0.02858034106513829 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.7107438016528925, + "acc_stderr": 0.04139112727635463, + "acc_norm": 0.7107438016528925, + "acc_norm_stderr": 0.04139112727635463 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.5, + "acc_stderr": 0.04068942293855797, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04068942293855797 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4084967320261438, + "acc_stderr": 0.019886221037501876, + "acc_norm": 0.4084967320261438, + "acc_norm_stderr": 0.019886221037501876 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.29464285714285715, + "acc_stderr": 0.043270409325787317, + "acc_norm": 0.29464285714285715, + "acc_norm_stderr": 0.043270409325787317 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.3611111111111111, + "acc_stderr": 0.032757734861009996, + "acc_norm": 0.3611111111111111, + "acc_norm_stderr": 0.032757734861009996 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.29608938547486036, + "acc_stderr": 0.01526867731760228, + "acc_norm": 0.29608938547486036, + "acc_norm_stderr": 0.01526867731760228 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.6, + "acc_stderr": 0.04923659639173309, + "acc_norm": 0.6, + "acc_norm_stderr": 0.04923659639173309 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4632352941176471, + "acc_stderr": 0.03029061918048569, + "acc_norm": 0.4632352941176471, + "acc_norm_stderr": 0.03029061918048569 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5864978902953587, + "acc_stderr": 0.03205649904851858, + "acc_norm": 0.5864978902953587, + "acc_norm_stderr": 0.03205649904851858 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214938, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214938 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5878787878787879, + "acc_stderr": 0.038435669935887165, + "acc_norm": 0.5878787878787879, + "acc_norm_stderr": 0.038435669935887165 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.01605899902610062, + "mc2": 0.44032674532682686, + "mc2_stderr": 0.015044649657068107 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5348288075560803, + "acc_stderr": 0.017148598015747422, + "acc_norm": 0.6068476977567887, + "acc_norm_stderr": 0.016793262801287075 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yuntaeyang/Yi-Ko-6B-lora", + "model_sha": "faf11c288d664426c5592f32741ff49ae20667c4", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/yuntaeyang/lion-7b-lora-kor/result_2023-12-15 04:03:18.json b/yuntaeyang/lion-7b-lora-kor/result_2023-12-15 04:03:18.json new file mode 100644 index 0000000000000000000000000000000000000000..0ee00b6c5910c5e0344869d5b027180733face8e --- /dev/null +++ b/yuntaeyang/lion-7b-lora-kor/result_2023-12-15 04:03:18.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.2158703071672355, + "acc_stderr": 0.012022975360030662, + "acc_norm": 0.27559726962457337, + "acc_norm_stderr": 0.013057169655761834 + }, + "harness|ko_hellaswag|10": { + "acc": 0.2523401712806214, + "acc_stderr": 0.004334676952703859, + "acc_norm": 0.24297948615813583, + "acc_norm_stderr": 0.004280062838446546 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.21052631578947367, + "acc_stderr": 0.0312678171466318, + "acc_norm": 0.21052631578947367, + "acc_norm_stderr": 0.0312678171466318 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.2524271844660194, + "acc_stderr": 0.04301250399690877, + "acc_norm": 0.2524271844660194, + "acc_norm_stderr": 0.04301250399690877 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.28735632183908044, + "acc_stderr": 0.0161824107306827, + "acc_norm": 0.28735632183908044, + "acc_norm_stderr": 0.0161824107306827 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.2518518518518518, + "acc_stderr": 0.03749850709174021, + "acc_norm": 0.2518518518518518, + "acc_norm_stderr": 0.03749850709174021 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.32340425531914896, + "acc_stderr": 0.03057944277361034, + "acc_norm": 0.32340425531914896, + "acc_norm_stderr": 0.03057944277361034 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3192771084337349, + "acc_stderr": 0.03629335329947859, + "acc_norm": 0.3192771084337349, + "acc_norm_stderr": 0.03629335329947859 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.2733118971061093, + "acc_stderr": 0.025311765975426115, + "acc_norm": 0.2733118971061093, + "acc_norm_stderr": 0.025311765975426115 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.37668161434977576, + "acc_stderr": 0.03252113489929189, + "acc_norm": 0.37668161434977576, + "acc_norm_stderr": 0.03252113489929189 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.22900763358778625, + "acc_stderr": 0.036853466317118506, + "acc_norm": 0.22900763358778625, + "acc_norm_stderr": 0.036853466317118506 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768077, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768077 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.21717171717171718, + "acc_stderr": 0.02937661648494563, + "acc_norm": 0.21717171717171718, + "acc_norm_stderr": 0.02937661648494563 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.2206896551724138, + "acc_stderr": 0.03455930201924811, + "acc_norm": 0.2206896551724138, + "acc_norm_stderr": 0.03455930201924811 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.19607843137254902, + "acc_stderr": 0.03950581861179962, + "acc_norm": 0.19607843137254902, + "acc_norm_stderr": 0.03950581861179962 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.23109243697478993, + "acc_stderr": 0.027381406927868973, + "acc_norm": 0.23109243697478993, + "acc_norm_stderr": 0.027381406927868973 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.2205128205128205, + "acc_stderr": 0.021020672680827912, + "acc_norm": 0.2205128205128205, + "acc_norm_stderr": 0.021020672680827912 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.24, + "acc_stderr": 0.04292346959909283, + "acc_norm": 0.24, + "acc_norm_stderr": 0.04292346959909283 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.2962962962962963, + "acc_stderr": 0.044143436668549335, + "acc_norm": 0.2962962962962963, + "acc_norm_stderr": 0.044143436668549335 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.270935960591133, + "acc_stderr": 0.031270907132976984, + "acc_norm": 0.270935960591133, + "acc_norm_stderr": 0.031270907132976984 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.25483870967741934, + "acc_stderr": 0.024790118459332208, + "acc_norm": 0.25483870967741934, + "acc_norm_stderr": 0.024790118459332208 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.2564102564102564, + "acc_stderr": 0.02860595370200424, + "acc_norm": 0.2564102564102564, + "acc_norm_stderr": 0.02860595370200424 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.2679245283018868, + "acc_stderr": 0.027257260322494845, + "acc_norm": 0.2679245283018868, + "acc_norm_stderr": 0.027257260322494845 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.34545454545454546, + "acc_stderr": 0.04554619617541054, + "acc_norm": 0.34545454545454546, + "acc_norm_stderr": 0.04554619617541054 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.26296296296296295, + "acc_stderr": 0.02684205787383371, + "acc_norm": 0.26296296296296295, + "acc_norm_stderr": 0.02684205787383371 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.1986754966887417, + "acc_stderr": 0.03257847384436776, + "acc_norm": 0.1986754966887417, + "acc_norm_stderr": 0.03257847384436776 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.23880597014925373, + "acc_stderr": 0.030147775935409224, + "acc_norm": 0.23880597014925373, + "acc_norm_stderr": 0.030147775935409224 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.20809248554913296, + "acc_stderr": 0.030952890217749895, + "acc_norm": 0.20809248554913296, + "acc_norm_stderr": 0.030952890217749895 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.2222222222222222, + "acc_stderr": 0.03476590104304134, + "acc_norm": 0.2222222222222222, + "acc_norm_stderr": 0.03476590104304134 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.21, + "acc_stderr": 0.040936018074033256, + "acc_norm": 0.21, + "acc_norm_stderr": 0.040936018074033256 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.24566473988439305, + "acc_stderr": 0.02317629820399201, + "acc_norm": 0.24566473988439305, + "acc_norm_stderr": 0.02317629820399201 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.24539877300613497, + "acc_stderr": 0.03380939813943353, + "acc_norm": 0.24539877300613497, + "acc_norm_stderr": 0.03380939813943353 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.2654320987654321, + "acc_stderr": 0.02456922360046085, + "acc_norm": 0.2654320987654321, + "acc_norm_stderr": 0.02456922360046085 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816506, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816506 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.20725388601036268, + "acc_stderr": 0.029252823291803627, + "acc_norm": 0.20725388601036268, + "acc_norm_stderr": 0.029252823291803627 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.23669724770642203, + "acc_stderr": 0.018224078117299095, + "acc_norm": 0.23669724770642203, + "acc_norm_stderr": 0.018224078117299095 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.1984126984126984, + "acc_stderr": 0.03567016675276862, + "acc_norm": 0.1984126984126984, + "acc_norm_stderr": 0.03567016675276862 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.22875816993464052, + "acc_stderr": 0.02405102973991225, + "acc_norm": 0.22875816993464052, + "acc_norm_stderr": 0.02405102973991225 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.26, + "acc_stderr": 0.044084400227680794, + "acc_norm": 0.26, + "acc_norm_stderr": 0.044084400227680794 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.24793388429752067, + "acc_stderr": 0.039418975265163025, + "acc_norm": 0.24793388429752067, + "acc_norm_stderr": 0.039418975265163025 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.18421052631578946, + "acc_stderr": 0.031546980450822305, + "acc_norm": 0.18421052631578946, + "acc_norm_stderr": 0.031546980450822305 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.2565359477124183, + "acc_stderr": 0.017667841612378984, + "acc_norm": 0.2565359477124183, + "acc_norm_stderr": 0.017667841612378984 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2553191489361702, + "acc_stderr": 0.02601199293090201, + "acc_norm": 0.2553191489361702, + "acc_norm_stderr": 0.02601199293090201 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.2857142857142857, + "acc_stderr": 0.04287858751340456, + "acc_norm": 0.2857142857142857, + "acc_norm_stderr": 0.04287858751340456 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.16203703703703703, + "acc_stderr": 0.025130453652268455, + "acc_norm": 0.16203703703703703, + "acc_norm_stderr": 0.025130453652268455 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.15, + "acc_stderr": 0.0358870281282637, + "acc_norm": 0.15, + "acc_norm_stderr": 0.0358870281282637 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.23, + "acc_stderr": 0.04229525846816507, + "acc_norm": 0.23, + "acc_norm_stderr": 0.04229525846816507 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.20220588235294118, + "acc_stderr": 0.02439819298665492, + "acc_norm": 0.20220588235294118, + "acc_norm_stderr": 0.02439819298665492 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.17142857142857143, + "acc_stderr": 0.024127463462650146, + "acc_norm": 0.17142857142857143, + "acc_norm_stderr": 0.024127463462650146 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.2616033755274262, + "acc_stderr": 0.028609516716994934, + "acc_norm": 0.2616033755274262, + "acc_norm_stderr": 0.028609516716994934 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.2392438070404172, + "acc_stderr": 0.010896123652676653, + "acc_norm": 0.2392438070404172, + "acc_norm_stderr": 0.010896123652676653 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.23529411764705882, + "acc_stderr": 0.029771775228145638, + "acc_norm": 0.23529411764705882, + "acc_norm_stderr": 0.029771775228145638 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.24242424242424243, + "acc_stderr": 0.033464098810559534, + "acc_norm": 0.24242424242424243, + "acc_norm_stderr": 0.033464098810559534 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.23133414932680538, + "mc1_stderr": 0.01476194517486267, + "mc2": NaN, + "mc2_stderr": NaN + }, + "harness|ko_commongen_v2|2": { + "acc": 0.08500590318772137, + "acc_stderr": 0.009588452201257215, + "acc_norm": 0.21251475796930341, + "acc_norm_stderr": 0.014064703386174932 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "yuntaeyang/lion-7b-lora-kor", + "model_sha": "e28cf28b0967a027f504fa87645616dcae67d502", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/zhengr/MixTAO-7Bx2-MoE-v8.1/result_2024-05-13 16:30:33.json b/zhengr/MixTAO-7Bx2-MoE-v8.1/result_2024-05-13 16:30:33.json new file mode 100644 index 0000000000000000000000000000000000000000..3a6aacde0a17e34d5c71691965d51846850b8db5 --- /dev/null +++ b/zhengr/MixTAO-7Bx2-MoE-v8.1/result_2024-05-13 16:30:33.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39590443686006827, + "acc_stderr": 0.014291228393536587, + "acc_norm": 0.46501706484641636, + "acc_norm_stderr": 0.014575583922019675 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4039036048595897, + "acc_stderr": 0.004896757857022549, + "acc_norm": 0.5380402310296754, + "acc_norm_stderr": 0.004975319435777093 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.4853801169590643, + "acc_stderr": 0.038331852752130205, + "acc_norm": 0.4853801169590643, + "acc_norm_stderr": 0.038331852752130205 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410768, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410768 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.4776500638569604, + "acc_stderr": 0.017862091778507876, + "acc_norm": 0.4776500638569604, + "acc_norm_stderr": 0.017862091778507876 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353228, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353228 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.26, + "acc_stderr": 0.04408440022768079, + "acc_norm": 0.26, + "acc_norm_stderr": 0.04408440022768079 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.037891344246115496, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.037891344246115496 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4565916398713826, + "acc_stderr": 0.0282908690541976, + "acc_norm": 0.4565916398713826, + "acc_norm_stderr": 0.0282908690541976 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4304932735426009, + "acc_stderr": 0.033231973029429394, + "acc_norm": 0.4304932735426009, + "acc_norm_stderr": 0.033231973029429394 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5808080808080808, + "acc_stderr": 0.03515520728670417, + "acc_norm": 0.5808080808080808, + "acc_norm_stderr": 0.03515520728670417 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.42758620689655175, + "acc_stderr": 0.04122737111370331, + "acc_norm": 0.42758620689655175, + "acc_norm_stderr": 0.04122737111370331 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.04389869956808778, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.04389869956808778 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.03244980849990029, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.03244980849990029 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.02533900301010653, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.02533900301010653 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939098, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939098 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.44193548387096776, + "acc_stderr": 0.02825155790684974, + "acc_norm": 0.44193548387096776, + "acc_norm_stderr": 0.02825155790684974 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7521367521367521, + "acc_stderr": 0.0282863240755644, + "acc_norm": 0.7521367521367521, + "acc_norm_stderr": 0.0282863240755644 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4679245283018868, + "acc_stderr": 0.030709486992556545, + "acc_norm": 0.4679245283018868, + "acc_norm_stderr": 0.030709486992556545 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4818181818181818, + "acc_stderr": 0.04785964010794916, + "acc_norm": 0.4818181818181818, + "acc_norm_stderr": 0.04785964010794916 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.029045600290616258, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.029045600290616258 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2781456953642384, + "acc_stderr": 0.03658603262763744, + "acc_norm": 0.2781456953642384, + "acc_norm_stderr": 0.03658603262763744 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5572139303482587, + "acc_stderr": 0.03512310964123936, + "acc_norm": 0.5572139303482587, + "acc_norm_stderr": 0.03512310964123936 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4161849710982659, + "acc_stderr": 0.03758517775404947, + "acc_norm": 0.4161849710982659, + "acc_norm_stderr": 0.03758517775404947 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3835978835978836, + "acc_stderr": 0.025043757318520196, + "acc_norm": 0.3835978835978836, + "acc_norm_stderr": 0.025043757318520196 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3402777777777778, + "acc_stderr": 0.03962135573486219, + "acc_norm": 0.3402777777777778, + "acc_norm_stderr": 0.03962135573486219 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.047609522856952344, + "acc_norm": 0.34, + "acc_norm_stderr": 0.047609522856952344 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.51, + "acc_stderr": 0.050241839379569095, + "acc_norm": 0.51, + "acc_norm_stderr": 0.050241839379569095 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5144508670520231, + "acc_stderr": 0.02690784985628254, + "acc_norm": 0.5144508670520231, + "acc_norm_stderr": 0.02690784985628254 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.5460122699386503, + "acc_stderr": 0.0391170190467718, + "acc_norm": 0.5460122699386503, + "acc_norm_stderr": 0.0391170190467718 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.44135802469135804, + "acc_stderr": 0.02762873715566878, + "acc_norm": 0.44135802469135804, + "acc_norm_stderr": 0.02762873715566878 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.48704663212435234, + "acc_stderr": 0.036072280610477486, + "acc_norm": 0.48704663212435234, + "acc_norm_stderr": 0.036072280610477486 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.0433913832257986, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.0433913832257986 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5339449541284403, + "acc_stderr": 0.02138786335035399, + "acc_norm": 0.5339449541284403, + "acc_norm_stderr": 0.02138786335035399 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.4603174603174603, + "acc_stderr": 0.04458029125470973, + "acc_norm": 0.4603174603174603, + "acc_norm_stderr": 0.04458029125470973 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.48366013071895425, + "acc_stderr": 0.028614624752805407, + "acc_norm": 0.48366013071895425, + "acc_norm_stderr": 0.028614624752805407 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777472, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777472 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.3954248366013072, + "acc_stderr": 0.019780465954777515, + "acc_norm": 0.3954248366013072, + "acc_norm_stderr": 0.019780465954777515 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.35815602836879434, + "acc_stderr": 0.028602085862759412, + "acc_norm": 0.35815602836879434, + "acc_norm_stderr": 0.028602085862759412 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.4107142857142857, + "acc_stderr": 0.04669510663875192, + "acc_norm": 0.4107142857142857, + "acc_norm_stderr": 0.04669510663875192 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.41203703703703703, + "acc_stderr": 0.03356787758160834, + "acc_norm": 0.41203703703703703, + "acc_norm_stderr": 0.03356787758160834 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2860335195530726, + "acc_stderr": 0.015113972129062136, + "acc_norm": 0.2860335195530726, + "acc_norm_stderr": 0.015113972129062136 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.68, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.68, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.3492647058823529, + "acc_stderr": 0.028959755196824866, + "acc_norm": 0.3492647058823529, + "acc_norm_stderr": 0.028959755196824866 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.5183673469387755, + "acc_stderr": 0.03198761546763127, + "acc_norm": 0.5183673469387755, + "acc_norm_stderr": 0.03198761546763127 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.35267275097783574, + "acc_stderr": 0.012203286846053886, + "acc_norm": 0.35267275097783574, + "acc_norm_stderr": 0.012203286846053886 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.03508637358630572, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.03508637358630572 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5151515151515151, + "acc_stderr": 0.03902551007374448, + "acc_norm": 0.5151515151515151, + "acc_norm_stderr": 0.03902551007374448 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.4357405140758874, + "mc1_stderr": 0.017358345398863124, + "mc2": 0.6087917688450574, + "mc2_stderr": 0.016139468783932145 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4462809917355372, + "acc_stderr": 0.017090852631668336, + "acc_norm": 0.45336481700118064, + "acc_norm_stderr": 0.017115418225226865 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "zhengr/MixTAO-7Bx2-MoE-v8.1", + "model_sha": "16bf59fbaa8825a6c65ab31a9a6608957aa95bfd", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/zomd/AISquare-Instruct-llama2-koen-13b-v0.9.17/result_2023-12-06 13:30:05.json b/zomd/AISquare-Instruct-llama2-koen-13b-v0.9.17/result_2023-12-06 13:30:05.json new file mode 100644 index 0000000000000000000000000000000000000000..0d54fa13084abbe992e6cf04479d9dfef16b71a7 --- /dev/null +++ b/zomd/AISquare-Instruct-llama2-koen-13b-v0.9.17/result_2023-12-06 13:30:05.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.39078498293515357, + "acc_stderr": 0.014258563880513785, + "acc_norm": 0.4513651877133106, + "acc_norm_stderr": 0.014542104569955265 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4239195379406493, + "acc_stderr": 0.0049316790599193755, + "acc_norm": 0.5747858992232623, + "acc_norm_stderr": 0.004933650697000603 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.43859649122807015, + "acc_stderr": 0.038057975055904594, + "acc_norm": 0.43859649122807015, + "acc_norm_stderr": 0.038057975055904594 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.44660194174757284, + "acc_stderr": 0.04922424153458933, + "acc_norm": 0.44660194174757284, + "acc_norm_stderr": 0.04922424153458933 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5210727969348659, + "acc_stderr": 0.017864076786212907, + "acc_norm": 0.5210727969348659, + "acc_norm_stderr": 0.017864076786212907 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4148148148148148, + "acc_stderr": 0.04256193767901407, + "acc_norm": 0.4148148148148148, + "acc_norm_stderr": 0.04256193767901407 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206824, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206824 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4, + "acc_stderr": 0.03202563076101735, + "acc_norm": 0.4, + "acc_norm_stderr": 0.03202563076101735 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4397590361445783, + "acc_stderr": 0.03864139923699122, + "acc_norm": 0.4397590361445783, + "acc_norm_stderr": 0.03864139923699122 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.49517684887459806, + "acc_stderr": 0.02839677044411129, + "acc_norm": 0.49517684887459806, + "acc_norm_stderr": 0.02839677044411129 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5201793721973094, + "acc_stderr": 0.033530461674123, + "acc_norm": 0.5201793721973094, + "acc_norm_stderr": 0.033530461674123 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.42748091603053434, + "acc_stderr": 0.043389203057924, + "acc_norm": 0.42748091603053434, + "acc_norm_stderr": 0.043389203057924 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.38, + "acc_stderr": 0.048783173121456316, + "acc_norm": 0.38, + "acc_norm_stderr": 0.048783173121456316 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5252525252525253, + "acc_stderr": 0.03557806245087314, + "acc_norm": 0.5252525252525253, + "acc_norm_stderr": 0.03557806245087314 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.3586206896551724, + "acc_stderr": 0.03996629574876718, + "acc_norm": 0.3586206896551724, + "acc_norm_stderr": 0.03996629574876718 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.21568627450980393, + "acc_stderr": 0.04092563958237657, + "acc_norm": 0.21568627450980393, + "acc_norm_stderr": 0.04092563958237657 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.42436974789915966, + "acc_stderr": 0.032104790510157764, + "acc_norm": 0.42436974789915966, + "acc_norm_stderr": 0.032104790510157764 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4153846153846154, + "acc_stderr": 0.024985354923102315, + "acc_norm": 0.4153846153846154, + "acc_norm_stderr": 0.024985354923102315 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.47, + "acc_stderr": 0.05016135580465919, + "acc_norm": 0.47, + "acc_norm_stderr": 0.05016135580465919 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.39901477832512317, + "acc_stderr": 0.03445487686264715, + "acc_norm": 0.39901477832512317, + "acc_norm_stderr": 0.03445487686264715 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.43870967741935485, + "acc_stderr": 0.028229497320317213, + "acc_norm": 0.43870967741935485, + "acc_norm_stderr": 0.028229497320317213 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6196581196581197, + "acc_stderr": 0.031804252043840985, + "acc_norm": 0.6196581196581197, + "acc_norm_stderr": 0.031804252043840985 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.44150943396226416, + "acc_stderr": 0.030561590426731833, + "acc_norm": 0.44150943396226416, + "acc_norm_stderr": 0.030561590426731833 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.4727272727272727, + "acc_stderr": 0.04782001791380063, + "acc_norm": 0.4727272727272727, + "acc_norm_stderr": 0.04782001791380063 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24814814814814815, + "acc_stderr": 0.0263357394040558, + "acc_norm": 0.24814814814814815, + "acc_norm_stderr": 0.0263357394040558 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.03710185726119995, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.03710185726119995 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5174129353233831, + "acc_stderr": 0.03533389234739245, + "acc_norm": 0.5174129353233831, + "acc_norm_stderr": 0.03533389234739245 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3815028901734104, + "acc_stderr": 0.03703851193099522, + "acc_norm": 0.3815028901734104, + "acc_norm_stderr": 0.03703851193099522 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2566137566137566, + "acc_stderr": 0.022494510767503154, + "acc_norm": 0.2566137566137566, + "acc_norm_stderr": 0.022494510767503154 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.039420826399272135, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.039420826399272135 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.046482319871173156, + "acc_norm": 0.31, + "acc_norm_stderr": 0.046482319871173156 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.4884393063583815, + "acc_stderr": 0.026911898686377927, + "acc_norm": 0.4884393063583815, + "acc_norm_stderr": 0.026911898686377927 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4601226993865031, + "acc_stderr": 0.0391585729143697, + "acc_norm": 0.4601226993865031, + "acc_norm_stderr": 0.0391585729143697 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4537037037037037, + "acc_stderr": 0.027701228468542602, + "acc_norm": 0.4537037037037037, + "acc_norm_stderr": 0.027701228468542602 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.49740932642487046, + "acc_stderr": 0.03608390745384488, + "acc_norm": 0.49740932642487046, + "acc_norm_stderr": 0.03608390745384488 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.04227054451232199, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.04227054451232199 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5009174311926605, + "acc_stderr": 0.021437287056051208, + "acc_norm": 0.5009174311926605, + "acc_norm_stderr": 0.021437287056051208 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.03932537680392871, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.03932537680392871 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.027634176689602653, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.027634176689602653 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068383, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068383 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.35526315789473684, + "acc_stderr": 0.038947344870133176, + "acc_norm": 0.35526315789473684, + "acc_norm_stderr": 0.038947344870133176 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.38235294117647056, + "acc_stderr": 0.019659922493623333, + "acc_norm": 0.38235294117647056, + "acc_norm_stderr": 0.019659922493623333 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.32978723404255317, + "acc_stderr": 0.0280459469420424, + "acc_norm": 0.32978723404255317, + "acc_norm_stderr": 0.0280459469420424 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2777777777777778, + "acc_stderr": 0.030546745264953195, + "acc_norm": 0.2777777777777778, + "acc_norm_stderr": 0.030546745264953195 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.47, + "acc_stderr": 0.050161355804659205, + "acc_norm": 0.47, + "acc_norm_stderr": 0.050161355804659205 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464622, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464622 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5822784810126582, + "acc_stderr": 0.032103530322412685, + "acc_norm": 0.5822784810126582, + "acc_norm_stderr": 0.032103530322412685 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.31421121251629724, + "acc_stderr": 0.011855911587048231, + "acc_norm": 0.31421121251629724, + "acc_norm_stderr": 0.011855911587048231 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.49696969696969695, + "acc_stderr": 0.03904272341431857, + "acc_norm": 0.49696969696969695, + "acc_norm_stderr": 0.03904272341431857 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2766217870257038, + "mc1_stderr": 0.015659605755326905, + "mc2": 0.43914355060529525, + "mc2_stderr": 0.014998093589375303 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.4462809917355372, + "acc_stderr": 0.017090852631668336, + "acc_norm": 0.58913813459268, + "acc_norm_stderr": 0.01691497276784105 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "zomd/AISquare-Instruct-llama2-koen-13b-v0.9.17", + "model_sha": "b35507f09656e2ba312bd9e0c491455aceee9a7e", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/zomd/AISquare-Instruct-llama2-koen-13b-v0.9.19/result_2023-12-06 13:30:15.json b/zomd/AISquare-Instruct-llama2-koen-13b-v0.9.19/result_2023-12-06 13:30:15.json new file mode 100644 index 0000000000000000000000000000000000000000..7bf5fce07eda03017cd13b784b7a0eea8eaf7326 --- /dev/null +++ b/zomd/AISquare-Instruct-llama2-koen-13b-v0.9.19/result_2023-12-06 13:30:15.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.3993174061433447, + "acc_stderr": 0.014312094557946704, + "acc_norm": 0.45563139931740615, + "acc_norm_stderr": 0.014553749939306866 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4344752041426011, + "acc_stderr": 0.004946748608271345, + "acc_norm": 0.5835490938060147, + "acc_norm_stderr": 0.004919626380645508 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.45614035087719296, + "acc_stderr": 0.03820042586602966, + "acc_norm": 0.45614035087719296, + "acc_norm_stderr": 0.03820042586602966 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5436893203883495, + "acc_stderr": 0.049318019942204146, + "acc_norm": 0.5436893203883495, + "acc_norm_stderr": 0.049318019942204146 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5466155810983397, + "acc_stderr": 0.017802087135850308, + "acc_norm": 0.5466155810983397, + "acc_norm_stderr": 0.017802087135850308 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4074074074074074, + "acc_stderr": 0.04244633238353229, + "acc_norm": 0.4074074074074074, + "acc_norm_stderr": 0.04244633238353229 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.27, + "acc_stderr": 0.0446196043338474, + "acc_norm": 0.27, + "acc_norm_stderr": 0.0446196043338474 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39156626506024095, + "acc_stderr": 0.03799857454479637, + "acc_norm": 0.39156626506024095, + "acc_norm_stderr": 0.03799857454479637 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.515695067264574, + "acc_stderr": 0.0335412657542081, + "acc_norm": 0.515695067264574, + "acc_norm_stderr": 0.0335412657542081 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.4580152671755725, + "acc_stderr": 0.04369802690578756, + "acc_norm": 0.4580152671755725, + "acc_norm_stderr": 0.04369802690578756 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.41, + "acc_stderr": 0.049431107042371025, + "acc_norm": 0.41, + "acc_norm_stderr": 0.049431107042371025 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.5656565656565656, + "acc_stderr": 0.03531505879359182, + "acc_norm": 0.5656565656565656, + "acc_norm_stderr": 0.03531505879359182 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.35172413793103446, + "acc_stderr": 0.03979236637497412, + "acc_norm": 0.35172413793103446, + "acc_norm_stderr": 0.03979236637497412 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.22549019607843138, + "acc_stderr": 0.041583075330832865, + "acc_norm": 0.22549019607843138, + "acc_norm_stderr": 0.041583075330832865 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4369747899159664, + "acc_stderr": 0.032219436365661956, + "acc_norm": 0.4369747899159664, + "acc_norm_stderr": 0.032219436365661956 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.43846153846153846, + "acc_stderr": 0.02515826601686855, + "acc_norm": 0.43846153846153846, + "acc_norm_stderr": 0.02515826601686855 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.049756985195624284, + "acc_norm": 0.57, + "acc_norm_stderr": 0.049756985195624284 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.5, + "acc_stderr": 0.04833682445228318, + "acc_norm": 0.5, + "acc_norm_stderr": 0.04833682445228318 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.42857142857142855, + "acc_stderr": 0.034819048444388045, + "acc_norm": 0.42857142857142855, + "acc_norm_stderr": 0.034819048444388045 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4612903225806452, + "acc_stderr": 0.02835863485983692, + "acc_norm": 0.4612903225806452, + "acc_norm_stderr": 0.02835863485983692 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.6410256410256411, + "acc_stderr": 0.03142616993791924, + "acc_norm": 0.6410256410256411, + "acc_norm_stderr": 0.03142616993791924 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.4528301886792453, + "acc_stderr": 0.03063562795796182, + "acc_norm": 0.4528301886792453, + "acc_norm_stderr": 0.03063562795796182 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5545454545454546, + "acc_stderr": 0.047605488214603246, + "acc_norm": 0.5545454545454546, + "acc_norm_stderr": 0.047605488214603246 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.24444444444444444, + "acc_stderr": 0.026202766534652148, + "acc_norm": 0.24444444444444444, + "acc_norm_stderr": 0.026202766534652148 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.2913907284768212, + "acc_stderr": 0.037101857261199946, + "acc_norm": 0.2913907284768212, + "acc_norm_stderr": 0.037101857261199946 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5671641791044776, + "acc_stderr": 0.03503490923673282, + "acc_norm": 0.5671641791044776, + "acc_norm_stderr": 0.03503490923673282 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.3872832369942196, + "acc_stderr": 0.03714325906302064, + "acc_norm": 0.3872832369942196, + "acc_norm_stderr": 0.03714325906302064 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.2804232804232804, + "acc_stderr": 0.02313528797432564, + "acc_norm": 0.2804232804232804, + "acc_norm_stderr": 0.02313528797432564 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.3680555555555556, + "acc_stderr": 0.04032999053960718, + "acc_norm": 0.3680555555555556, + "acc_norm_stderr": 0.04032999053960718 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.63, + "acc_stderr": 0.04852365870939101, + "acc_norm": 0.63, + "acc_norm_stderr": 0.04852365870939101 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.47109826589595377, + "acc_stderr": 0.026874085883518348, + "acc_norm": 0.47109826589595377, + "acc_norm_stderr": 0.026874085883518348 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4785276073619632, + "acc_stderr": 0.0392474687675113, + "acc_norm": 0.4785276073619632, + "acc_norm_stderr": 0.0392474687675113 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.027801656212323667, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.027801656212323667 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.5284974093264249, + "acc_stderr": 0.036025735712884414, + "acc_norm": 0.5284974093264249, + "acc_norm_stderr": 0.036025735712884414 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.2807017543859649, + "acc_stderr": 0.042270544512321984, + "acc_norm": 0.2807017543859649, + "acc_norm_stderr": 0.042270544512321984 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.5834862385321101, + "acc_stderr": 0.021136376504030878, + "acc_norm": 0.5834862385321101, + "acc_norm_stderr": 0.021136376504030878 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.042163702135578345, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.042163702135578345 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.02807415894760065, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.02807415894760065 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.39, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.39, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6115702479338843, + "acc_stderr": 0.04449270350068382, + "acc_norm": 0.6115702479338843, + "acc_norm_stderr": 0.04449270350068382 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.40789473684210525, + "acc_stderr": 0.03999309712777473, + "acc_norm": 0.40789473684210525, + "acc_norm_stderr": 0.03999309712777473 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.369281045751634, + "acc_stderr": 0.019524316744866346, + "acc_norm": 0.369281045751634, + "acc_norm_stderr": 0.019524316744866346 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3191489361702128, + "acc_stderr": 0.0278079901413202, + "acc_norm": 0.3191489361702128, + "acc_norm_stderr": 0.0278079901413202 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.21428571428571427, + "acc_stderr": 0.038946411200447915, + "acc_norm": 0.21428571428571427, + "acc_norm_stderr": 0.038946411200447915 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.35185185185185186, + "acc_stderr": 0.032568505702936464, + "acc_norm": 0.35185185185185186, + "acc_norm_stderr": 0.032568505702936464 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2424581005586592, + "acc_stderr": 0.01433352205921789, + "acc_norm": 0.2424581005586592, + "acc_norm_stderr": 0.01433352205921789 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.048523658709391, + "acc_norm": 0.37, + "acc_norm_stderr": 0.048523658709391 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.44, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.44, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4007352941176471, + "acc_stderr": 0.029768263528933102, + "acc_norm": 0.4007352941176471, + "acc_norm_stderr": 0.029768263528933102 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4897959183673469, + "acc_stderr": 0.03200255347893782, + "acc_norm": 0.4897959183673469, + "acc_norm_stderr": 0.03200255347893782 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.620253164556962, + "acc_stderr": 0.031591887529658504, + "acc_norm": 0.620253164556962, + "acc_norm_stderr": 0.031591887529658504 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3344198174706649, + "acc_stderr": 0.012049668983214941, + "acc_norm": 0.3344198174706649, + "acc_norm_stderr": 0.012049668983214941 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5196078431372549, + "acc_stderr": 0.03506612560524866, + "acc_norm": 0.5196078431372549, + "acc_norm_stderr": 0.03506612560524866 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5333333333333333, + "acc_stderr": 0.03895658065271847, + "acc_norm": 0.5333333333333333, + "acc_norm_stderr": 0.03895658065271847 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.2864137086903305, + "mc1_stderr": 0.01582614243950234, + "mc2": 0.4624651498702573, + "mc2_stderr": 0.015067280556431393 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.45454545454545453, + "acc_stderr": 0.017119172208061504, + "acc_norm": 0.5584415584415584, + "acc_norm_stderr": 0.017072525875563106 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "zomd/AISquare-Instruct-llama2-koen-13b-v0.9.19", + "model_sha": "280db5f17c8ff2e8b48af84c6dacad00bc5da667", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/zomd/AISquare-Instruct-yi-ko-6b-v0.9.27/result_2023-12-21 04:52:13.json b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.27/result_2023-12-21 04:52:13.json new file mode 100644 index 0000000000000000000000000000000000000000..e30907c597f9542e3baba474da1c940c96d90ce8 --- /dev/null +++ b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.27/result_2023-12-21 04:52:13.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36860068259385664, + "acc_stderr": 0.014097810678042192, + "acc_norm": 0.44283276450511944, + "acc_norm_stderr": 0.014515573873348906 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41814379605656243, + "acc_stderr": 0.004922459820434776, + "acc_norm": 0.5555666201951802, + "acc_norm_stderr": 0.004958872288442148 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.52046783625731, + "acc_stderr": 0.038316105328219316, + "acc_norm": 0.52046783625731, + "acc_norm_stderr": 0.038316105328219316 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.048657775704107696, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.048657775704107696 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5632183908045977, + "acc_stderr": 0.01773647083780069, + "acc_norm": 0.5632183908045977, + "acc_norm_stderr": 0.01773647083780069 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4666666666666667, + "acc_stderr": 0.04309732901036354, + "acc_norm": 0.4666666666666667, + "acc_norm_stderr": 0.04309732901036354 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.39759036144578314, + "acc_stderr": 0.03809973084540219, + "acc_norm": 0.39759036144578314, + "acc_norm_stderr": 0.03809973084540219 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5016077170418006, + "acc_stderr": 0.02839794490780661, + "acc_norm": 0.5016077170418006, + "acc_norm_stderr": 0.02839794490780661 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.03427308652999937, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.03427308652999937 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.47586206896551725, + "acc_stderr": 0.041618085035015295, + "acc_norm": 0.47586206896551725, + "acc_norm_stderr": 0.041618085035015295 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.27450980392156865, + "acc_stderr": 0.044405219061793275, + "acc_norm": 0.27450980392156865, + "acc_norm_stderr": 0.044405219061793275 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.4957983193277311, + "acc_stderr": 0.0324773433444811, + "acc_norm": 0.4957983193277311, + "acc_norm_stderr": 0.0324773433444811 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4846153846153846, + "acc_stderr": 0.02533900301010653, + "acc_norm": 0.4846153846153846, + "acc_norm_stderr": 0.02533900301010653 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.04826217294139894, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.04826217294139894 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4645161290322581, + "acc_stderr": 0.028372287797962952, + "acc_norm": 0.4645161290322581, + "acc_norm_stderr": 0.028372287797962952 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7264957264957265, + "acc_stderr": 0.02920254015343118, + "acc_norm": 0.7264957264957265, + "acc_norm_stderr": 0.02920254015343118 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5132075471698113, + "acc_stderr": 0.030762134874500482, + "acc_norm": 0.5132075471698113, + "acc_norm_stderr": 0.030762134874500482 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34814814814814815, + "acc_stderr": 0.02904560029061626, + "acc_norm": 0.34814814814814815, + "acc_norm_stderr": 0.02904560029061626 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.038615575462551684, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.038615575462551684 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6467661691542289, + "acc_stderr": 0.03379790611796777, + "acc_norm": 0.6467661691542289, + "acc_norm_stderr": 0.03379790611796777 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4624277456647399, + "acc_stderr": 0.038016851045244604, + "acc_norm": 0.4624277456647399, + "acc_norm_stderr": 0.038016851045244604 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.335978835978836, + "acc_stderr": 0.024326310529149145, + "acc_norm": 0.335978835978836, + "acc_norm_stderr": 0.024326310529149145 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4236111111111111, + "acc_stderr": 0.04132125019723368, + "acc_norm": 0.4236111111111111, + "acc_norm_stderr": 0.04132125019723368 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5086705202312138, + "acc_stderr": 0.02691504735536981, + "acc_norm": 0.5086705202312138, + "acc_norm_stderr": 0.02691504735536981 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.39263803680981596, + "acc_stderr": 0.03836740907831027, + "acc_norm": 0.39263803680981596, + "acc_norm_stderr": 0.03836740907831027 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.02781862396258329, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.02781862396258329 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6113989637305699, + "acc_stderr": 0.03517739796373132, + "acc_norm": 0.6113989637305699, + "acc_norm_stderr": 0.03517739796373132 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.30701754385964913, + "acc_stderr": 0.043391383225798594, + "acc_norm": 0.30701754385964913, + "acc_norm_stderr": 0.043391383225798594 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6146788990825688, + "acc_stderr": 0.020865850852794108, + "acc_norm": 0.6146788990825688, + "acc_norm_stderr": 0.020865850852794108 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.29365079365079366, + "acc_stderr": 0.040735243221471255, + "acc_norm": 0.29365079365079366, + "acc_norm_stderr": 0.040735243221471255 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5098039215686274, + "acc_stderr": 0.028624412550167958, + "acc_norm": 0.5098039215686274, + "acc_norm_stderr": 0.028624412550167958 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6528925619834711, + "acc_stderr": 0.04345724570292534, + "acc_norm": 0.6528925619834711, + "acc_norm_stderr": 0.04345724570292534 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.019835176484375387, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.019835176484375387 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510906, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510906 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2446927374301676, + "acc_stderr": 0.014378169884098431, + "acc_norm": 0.2446927374301676, + "acc_norm_stderr": 0.014378169884098431 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.38, + "acc_stderr": 0.04878317312145632, + "acc_norm": 0.38, + "acc_norm_stderr": 0.04878317312145632 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4264705882352941, + "acc_stderr": 0.03004261583271486, + "acc_norm": 0.4264705882352941, + "acc_norm_stderr": 0.03004261583271486 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4857142857142857, + "acc_stderr": 0.03199615232806287, + "acc_norm": 0.4857142857142857, + "acc_norm_stderr": 0.03199615232806287 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.5991561181434599, + "acc_stderr": 0.031900803894732356, + "acc_norm": 0.5991561181434599, + "acc_norm_stderr": 0.031900803894732356 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3226857887874837, + "acc_stderr": 0.011940264193195986, + "acc_norm": 0.3226857887874837, + "acc_norm_stderr": 0.011940264193195986 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3084455324357405, + "mc1_stderr": 0.01616803938315687, + "mc2": 0.46606263672117376, + "mc2_stderr": 0.01539063853395172 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5171192443919717, + "acc_stderr": 0.017180275246085633, + "acc_norm": 0.5572609208972845, + "acc_norm_stderr": 0.017077254131556224 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "zomd/AISquare-Instruct-yi-ko-6b-v0.9.27", + "model_sha": "5f39720e3ac0bcebcdeb3328044f8d85da2aa35c", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/zomd/AISquare-Instruct-yi-ko-6b-v0.9.28/result_2023-12-21 04:52:20.json b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.28/result_2023-12-21 04:52:20.json new file mode 100644 index 0000000000000000000000000000000000000000..a07ff69241afd0fe669e86aac4146311a6d1adce --- /dev/null +++ b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.28/result_2023-12-21 04:52:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.014150631435111728, + "acc_norm": 0.4445392491467577, + "acc_norm_stderr": 0.01452122640562708 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4130651264688309, + "acc_stderr": 0.0049137803474988756, + "acc_norm": 0.5542720573590918, + "acc_norm_stderr": 0.004960299952519406 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5263157894736842, + "acc_stderr": 0.03829509868994727, + "acc_norm": 0.5263157894736842, + "acc_norm_stderr": 0.03829509868994727 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.6019417475728155, + "acc_stderr": 0.04846748253977239, + "acc_norm": 0.6019417475728155, + "acc_norm_stderr": 0.04846748253977239 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5759897828863346, + "acc_stderr": 0.017672263329084222, + "acc_norm": 0.5759897828863346, + "acc_norm_stderr": 0.017672263329084222 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45925925925925926, + "acc_stderr": 0.04304979692464244, + "acc_norm": 0.45925925925925926, + "acc_norm_stderr": 0.04304979692464244 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.29, + "acc_stderr": 0.04560480215720683, + "acc_norm": 0.29, + "acc_norm_stderr": 0.04560480215720683 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.425531914893617, + "acc_stderr": 0.03232146916224469, + "acc_norm": 0.425531914893617, + "acc_norm_stderr": 0.03232146916224469 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.4036144578313253, + "acc_stderr": 0.03819486140758397, + "acc_norm": 0.4036144578313253, + "acc_norm_stderr": 0.03819486140758397 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5176848874598071, + "acc_stderr": 0.028380322849077138, + "acc_norm": 0.5176848874598071, + "acc_norm_stderr": 0.028380322849077138 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5343511450381679, + "acc_stderr": 0.04374928560599738, + "acc_norm": 0.5343511450381679, + "acc_norm_stderr": 0.04374928560599738 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6515151515151515, + "acc_stderr": 0.03394853965156402, + "acc_norm": 0.6515151515151515, + "acc_norm_stderr": 0.03394853965156402 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4827586206896552, + "acc_stderr": 0.04164188720169377, + "acc_norm": 0.4827586206896552, + "acc_norm_stderr": 0.04164188720169377 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.2647058823529412, + "acc_stderr": 0.043898699568087785, + "acc_norm": 0.2647058823529412, + "acc_norm_stderr": 0.043898699568087785 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5084033613445378, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.5084033613445378, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4794871794871795, + "acc_stderr": 0.025329663163489943, + "acc_norm": 0.4794871794871795, + "acc_norm_stderr": 0.025329663163489943 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.58, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.58, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.48148148148148145, + "acc_stderr": 0.04830366024635331, + "acc_norm": 0.48148148148148145, + "acc_norm_stderr": 0.04830366024635331 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.41379310344827586, + "acc_stderr": 0.03465304488406796, + "acc_norm": 0.41379310344827586, + "acc_norm_stderr": 0.03465304488406796 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.5032258064516129, + "acc_stderr": 0.02844341422643831, + "acc_norm": 0.5032258064516129, + "acc_norm_stderr": 0.02844341422643831 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5018867924528302, + "acc_stderr": 0.030772653642075664, + "acc_norm": 0.5018867924528302, + "acc_norm_stderr": 0.030772653642075664 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5454545454545454, + "acc_stderr": 0.04769300568972745, + "acc_norm": 0.5454545454545454, + "acc_norm_stderr": 0.04769300568972745 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.3037037037037037, + "acc_stderr": 0.028037929969114986, + "acc_norm": 0.3037037037037037, + "acc_norm_stderr": 0.028037929969114986 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3708609271523179, + "acc_stderr": 0.03943966699183629, + "acc_norm": 0.3708609271523179, + "acc_norm_stderr": 0.03943966699183629 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6218905472636815, + "acc_stderr": 0.03428867848778658, + "acc_norm": 0.6218905472636815, + "acc_norm_stderr": 0.03428867848778658 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.44508670520231214, + "acc_stderr": 0.03789401760283648, + "acc_norm": 0.44508670520231214, + "acc_norm_stderr": 0.03789401760283648 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3492063492063492, + "acc_stderr": 0.024552292209342654, + "acc_norm": 0.3492063492063492, + "acc_norm_stderr": 0.024552292209342654 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111502, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111502 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.64, + "acc_stderr": 0.048241815132442176, + "acc_norm": 0.64, + "acc_norm_stderr": 0.048241815132442176 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756653, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756653 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5061728395061729, + "acc_stderr": 0.027818623962583295, + "acc_norm": 0.5061728395061729, + "acc_norm_stderr": 0.027818623962583295 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.046882617226215034, + "acc_norm": 0.32, + "acc_norm_stderr": 0.046882617226215034 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6580310880829016, + "acc_stderr": 0.03423465100104283, + "acc_norm": 0.6580310880829016, + "acc_norm_stderr": 0.03423465100104283 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3157894736842105, + "acc_stderr": 0.043727482902780085, + "acc_norm": 0.3157894736842105, + "acc_norm_stderr": 0.043727482902780085 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6091743119266055, + "acc_stderr": 0.020920058346111072, + "acc_norm": 0.6091743119266055, + "acc_norm_stderr": 0.020920058346111072 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.31746031746031744, + "acc_stderr": 0.04163453031302859, + "acc_norm": 0.31746031746031744, + "acc_norm_stderr": 0.04163453031302859 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4869281045751634, + "acc_stderr": 0.028620130800700246, + "acc_norm": 0.4869281045751634, + "acc_norm_stderr": 0.028620130800700246 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.59, + "acc_stderr": 0.04943110704237102, + "acc_norm": 0.59, + "acc_norm_stderr": 0.04943110704237102 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6446280991735537, + "acc_stderr": 0.0436923632657398, + "acc_norm": 0.6446280991735537, + "acc_norm_stderr": 0.0436923632657398 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4868421052631579, + "acc_stderr": 0.04067533136309173, + "acc_norm": 0.4868421052631579, + "acc_norm_stderr": 0.04067533136309173 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.42320261437908496, + "acc_stderr": 0.019987809769482064, + "acc_norm": 0.42320261437908496, + "acc_norm_stderr": 0.019987809769482064 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.2978723404255319, + "acc_stderr": 0.02728160834446941, + "acc_norm": 0.2978723404255319, + "acc_norm_stderr": 0.02728160834446941 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.3392857142857143, + "acc_stderr": 0.04493949068613541, + "acc_norm": 0.3392857142857143, + "acc_norm_stderr": 0.04493949068613541 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.30092592592592593, + "acc_stderr": 0.03128039084329883, + "acc_norm": 0.30092592592592593, + "acc_norm_stderr": 0.03128039084329883 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.2994413407821229, + "acc_stderr": 0.015318257745976708, + "acc_norm": 0.2994413407821229, + "acc_norm_stderr": 0.015318257745976708 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.51, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.51, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.4485294117647059, + "acc_stderr": 0.030211479609121596, + "acc_norm": 0.4485294117647059, + "acc_norm_stderr": 0.030211479609121596 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.46530612244897956, + "acc_stderr": 0.03193207024425314, + "acc_norm": 0.46530612244897956, + "acc_norm_stderr": 0.03193207024425314 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33376792698826596, + "acc_stderr": 0.012043812655846144, + "acc_norm": 0.33376792698826596, + "acc_norm_stderr": 0.012043812655846144 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5735294117647058, + "acc_stderr": 0.034711579079534254, + "acc_norm": 0.5735294117647058, + "acc_norm_stderr": 0.034711579079534254 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5818181818181818, + "acc_stderr": 0.038517163193983926, + "acc_norm": 0.5818181818181818, + "acc_norm_stderr": 0.038517163193983926 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.33047735618115054, + "mc1_stderr": 0.016466769613698293, + "mc2": 0.48174146468286283, + "mc2_stderr": 0.015361505993239164 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5478158205430933, + "acc_stderr": 0.017111567130916796, + "acc_norm": 0.5867768595041323, + "acc_norm_stderr": 0.016929480234495226 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "zomd/AISquare-Instruct-yi-ko-6b-v0.9.28", + "model_sha": "ce572a092b5b580fb2a2e6c34bd038a6c1f209da", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/zomd/AISquare-Instruct-yi-ko-6b-v0.9.29/result_2023-12-22 04:28:57.json b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.29/result_2023-12-22 04:28:57.json new file mode 100644 index 0000000000000000000000000000000000000000..31c44519d35c454dc719043f3530c208f3aa3199 --- /dev/null +++ b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.29/result_2023-12-22 04:28:57.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.37542662116040953, + "acc_stderr": 0.014150631435111728, + "acc_norm": 0.439419795221843, + "acc_norm_stderr": 0.014503747823580122 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41674965146385184, + "acc_stderr": 0.004920130733271777, + "acc_norm": 0.552778331009759, + "acc_norm_stderr": 0.004961904949171396 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5321637426900585, + "acc_stderr": 0.03826882417660368, + "acc_norm": 0.5321637426900585, + "acc_norm_stderr": 0.03826882417660368 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5825242718446602, + "acc_stderr": 0.048828405482122375, + "acc_norm": 0.5825242718446602, + "acc_norm_stderr": 0.048828405482122375 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5644955300127714, + "acc_stderr": 0.017730589927926598, + "acc_norm": 0.5644955300127714, + "acc_norm_stderr": 0.017730589927926598 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.4740740740740741, + "acc_stderr": 0.04313531696750574, + "acc_norm": 0.4740740740740741, + "acc_norm_stderr": 0.04313531696750574 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.31, + "acc_stderr": 0.04648231987117316, + "acc_norm": 0.31, + "acc_norm_stderr": 0.04648231987117316 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.41702127659574467, + "acc_stderr": 0.03223276266711712, + "acc_norm": 0.41702127659574467, + "acc_norm_stderr": 0.03223276266711712 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.3855421686746988, + "acc_stderr": 0.03789134424611549, + "acc_norm": 0.3855421686746988, + "acc_norm_stderr": 0.03789134424611549 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.4919614147909968, + "acc_stderr": 0.028394421370984538, + "acc_norm": 0.4919614147909968, + "acc_norm_stderr": 0.028394421370984538 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.5067264573991032, + "acc_stderr": 0.03355476596234354, + "acc_norm": 0.5067264573991032, + "acc_norm_stderr": 0.03355476596234354 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5114503816793893, + "acc_stderr": 0.043841400240780176, + "acc_norm": 0.5114503816793893, + "acc_norm_stderr": 0.043841400240780176 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.42, + "acc_stderr": 0.049604496374885836, + "acc_norm": 0.42, + "acc_norm_stderr": 0.049604496374885836 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6262626262626263, + "acc_stderr": 0.03446897738659332, + "acc_norm": 0.6262626262626263, + "acc_norm_stderr": 0.03446897738659332 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.4689655172413793, + "acc_stderr": 0.04158632762097828, + "acc_norm": 0.4689655172413793, + "acc_norm_stderr": 0.04158632762097828 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.24509803921568626, + "acc_stderr": 0.04280105837364395, + "acc_norm": 0.24509803921568626, + "acc_norm_stderr": 0.04280105837364395 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.49159663865546216, + "acc_stderr": 0.0324739027656967, + "acc_norm": 0.49159663865546216, + "acc_norm_stderr": 0.0324739027656967 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.4948717948717949, + "acc_stderr": 0.025349672906838667, + "acc_norm": 0.4948717948717949, + "acc_norm_stderr": 0.025349672906838667 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.56, + "acc_stderr": 0.04988876515698589, + "acc_norm": 0.56, + "acc_norm_stderr": 0.04988876515698589 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.3, + "acc_stderr": 0.046056618647183814, + "acc_norm": 0.3, + "acc_norm_stderr": 0.046056618647183814 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.048262172941398944, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.048262172941398944 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4806451612903226, + "acc_stderr": 0.0284226874043121, + "acc_norm": 0.4806451612903226, + "acc_norm_stderr": 0.0284226874043121 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5169811320754717, + "acc_stderr": 0.030755120364119905, + "acc_norm": 0.5169811320754717, + "acc_norm_stderr": 0.030755120364119905 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5363636363636364, + "acc_stderr": 0.04776449162396197, + "acc_norm": 0.5363636363636364, + "acc_norm_stderr": 0.04776449162396197 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.34444444444444444, + "acc_stderr": 0.02897264888484427, + "acc_norm": 0.34444444444444444, + "acc_norm_stderr": 0.02897264888484427 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.33774834437086093, + "acc_stderr": 0.03861557546255168, + "acc_norm": 0.33774834437086093, + "acc_norm_stderr": 0.03861557546255168 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.5920398009950248, + "acc_stderr": 0.03475116365194092, + "acc_norm": 0.5920398009950248, + "acc_norm_stderr": 0.03475116365194092 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.4393063583815029, + "acc_stderr": 0.037842719328874674, + "acc_norm": 0.4393063583815029, + "acc_norm_stderr": 0.037842719328874674 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983053, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983053 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4305555555555556, + "acc_stderr": 0.04140685639111502, + "acc_norm": 0.4305555555555556, + "acc_norm_stderr": 0.04140685639111502 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.29, + "acc_stderr": 0.045604802157206845, + "acc_norm": 0.29, + "acc_norm_stderr": 0.045604802157206845 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5202312138728323, + "acc_stderr": 0.026897049996382875, + "acc_norm": 0.5202312138728323, + "acc_norm_stderr": 0.026897049996382875 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.4233128834355828, + "acc_stderr": 0.038818912133343826, + "acc_norm": 0.4233128834355828, + "acc_norm_stderr": 0.038818912133343826 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.5185185185185185, + "acc_stderr": 0.027801656212323674, + "acc_norm": 0.5185185185185185, + "acc_norm_stderr": 0.027801656212323674 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.616580310880829, + "acc_stderr": 0.03508984236295342, + "acc_norm": 0.616580310880829, + "acc_norm_stderr": 0.03508984236295342 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6055045871559633, + "acc_stderr": 0.02095464210858749, + "acc_norm": 0.6055045871559633, + "acc_norm_stderr": 0.02095464210858749 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2698412698412698, + "acc_stderr": 0.03970158273235173, + "acc_norm": 0.2698412698412698, + "acc_norm_stderr": 0.03970158273235173 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.5065359477124183, + "acc_stderr": 0.028627470550556047, + "acc_norm": 0.5065359477124183, + "acc_norm_stderr": 0.028627470550556047 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.043913262867240704, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.043913262867240704 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.48026315789473684, + "acc_stderr": 0.040657710025626036, + "acc_norm": 0.48026315789473684, + "acc_norm_stderr": 0.040657710025626036 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4019607843137255, + "acc_stderr": 0.019835176484375383, + "acc_norm": 0.4019607843137255, + "acc_norm_stderr": 0.019835176484375383 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3120567375886525, + "acc_stderr": 0.02764012054516993, + "acc_norm": 0.3120567375886525, + "acc_norm_stderr": 0.02764012054516993 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.33035714285714285, + "acc_stderr": 0.04464285714285713, + "acc_norm": 0.33035714285714285, + "acc_norm_stderr": 0.04464285714285713 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510906, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510906 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24692737430167597, + "acc_stderr": 0.014422292204808848, + "acc_norm": 0.24692737430167597, + "acc_norm_stderr": 0.014422292204808848 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.37, + "acc_stderr": 0.04852365870939099, + "acc_norm": 0.37, + "acc_norm_stderr": 0.04852365870939099 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.49, + "acc_stderr": 0.05024183937956912, + "acc_norm": 0.49, + "acc_norm_stderr": 0.05024183937956912 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.41911764705882354, + "acc_stderr": 0.029972807170464622, + "acc_norm": 0.41911764705882354, + "acc_norm_stderr": 0.029972807170464622 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.4326530612244898, + "acc_stderr": 0.03171752824062664, + "acc_norm": 0.4326530612244898, + "acc_norm_stderr": 0.03171752824062664 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6244725738396625, + "acc_stderr": 0.03152256243091157, + "acc_norm": 0.6244725738396625, + "acc_norm_stderr": 0.03152256243091157 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.3161668839634941, + "acc_stderr": 0.011875780894386583, + "acc_norm": 0.3161668839634941, + "acc_norm_stderr": 0.011875780894386583 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5588235294117647, + "acc_stderr": 0.034849415144292316, + "acc_norm": 0.5588235294117647, + "acc_norm_stderr": 0.034849415144292316 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.593939393939394, + "acc_stderr": 0.03834816355401181, + "acc_norm": 0.593939393939394, + "acc_norm_stderr": 0.03834816355401181 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3011015911872705, + "mc1_stderr": 0.016058999026100623, + "mc2": 0.45625171579944035, + "mc2_stderr": 0.015423026113357257 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5076741440377804, + "acc_stderr": 0.017188329219654273, + "acc_norm": 0.5560802833530106, + "acc_norm_stderr": 0.017081884623542543 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "zomd/AISquare-Instruct-yi-ko-6b-v0.9.29", + "model_sha": "caa53097be14c57d052d6d29841524fa96630892", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/zomd/AISquare-Instruct-yi-ko-6b-v0.9.30/result_2023-12-22 04:29:27.json b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.30/result_2023-12-22 04:29:27.json new file mode 100644 index 0000000000000000000000000000000000000000..8e73fb0af41ee4bb87d2da87132b55b95925cb11 --- /dev/null +++ b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.30/result_2023-12-22 04:29:27.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36860068259385664, + "acc_stderr": 0.014097810678042194, + "acc_norm": 0.43856655290102387, + "acc_norm_stderr": 0.014500682618212862 + }, + "harness|ko_hellaswag|10": { + "acc": 0.4140609440350528, + "acc_stderr": 0.004915524600627968, + "acc_norm": 0.5542720573590918, + "acc_norm_stderr": 0.0049602999525194084 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5798212005108557, + "acc_stderr": 0.017650651363078026, + "acc_norm": 0.5798212005108557, + "acc_norm_stderr": 0.017650651363078026 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.03240038086792747, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.03240038086792747 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.034273086529999344, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.034273086529999344 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851302, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851302 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983056, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.62, + "acc_stderr": 0.04878317312145633, + "acc_norm": 0.62, + "acc_norm_stderr": 0.04878317312145633 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756656, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756656 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4691358024691358, + "acc_stderr": 0.027767689606833942, + "acc_norm": 0.4691358024691358, + "acc_norm_stderr": 0.027767689606833942 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.035260770955482405, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.035260770955482405 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6128440366972477, + "acc_stderr": 0.02088423199264345, + "acc_norm": 0.6128440366972477, + "acc_norm_stderr": 0.02088423199264345 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.019910377463105932, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.019910377463105932 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291517, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291517 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510923, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510923 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859936, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859936 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.03121956944530185, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.03121956944530185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33833116036505867, + "acc_stderr": 0.012084265626344208, + "acc_norm": 0.33833116036505867, + "acc_norm_stderr": 0.012084265626344208 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5637254901960784, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.5637254901960784, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3243574051407589, + "mc1_stderr": 0.01638797677964794, + "mc2": 0.4709332458329521, + "mc2_stderr": 0.015392858575324787 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5596221959858324, + "acc_stderr": 0.017067699774312974, + "acc_norm": 0.6056670602125147, + "acc_norm_stderr": 0.016802090674893223 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "zomd/AISquare-Instruct-yi-ko-6b-v0.9.30", + "model_sha": "b51ca94bdc9879721faaa5c3759774c892fd15d8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file diff --git a/zomd/AISquare-Instruct-yi-ko-6b-v0.9.31/result_2024-01-09 01:42:20.json b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.31/result_2024-01-09 01:42:20.json new file mode 100644 index 0000000000000000000000000000000000000000..08349a66bee50ca11dcbc2e7649983232e870f80 --- /dev/null +++ b/zomd/AISquare-Instruct-yi-ko-6b-v0.9.31/result_2024-01-09 01:42:20.json @@ -0,0 +1,444 @@ +{ + "results": { + "harness|ko_arc_challenge|25": { + "acc": 0.36860068259385664, + "acc_stderr": 0.014097810678042194, + "acc_norm": 0.43856655290102387, + "acc_norm_stderr": 0.014500682618212862 + }, + "harness|ko_hellaswag|10": { + "acc": 0.41416052579167495, + "acc_stderr": 0.004915697886906119, + "acc_norm": 0.5541724756024696, + "acc_norm_stderr": 0.004960408362133245 + }, + "harness|ko_mmlu_world_religions|5": { + "acc": 0.5087719298245614, + "acc_stderr": 0.03834234744164993, + "acc_norm": 0.5087719298245614, + "acc_norm_stderr": 0.03834234744164993 + }, + "harness|ko_mmlu_management|5": { + "acc": 0.5922330097087378, + "acc_stderr": 0.04865777570410769, + "acc_norm": 0.5922330097087378, + "acc_norm_stderr": 0.04865777570410769 + }, + "harness|ko_mmlu_miscellaneous|5": { + "acc": 0.5798212005108557, + "acc_stderr": 0.017650651363078026, + "acc_norm": 0.5798212005108557, + "acc_norm_stderr": 0.017650651363078026 + }, + "harness|ko_mmlu_anatomy|5": { + "acc": 0.45185185185185184, + "acc_stderr": 0.04299268905480863, + "acc_norm": 0.45185185185185184, + "acc_norm_stderr": 0.04299268905480863 + }, + "harness|ko_mmlu_abstract_algebra|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695236, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695236 + }, + "harness|ko_mmlu_conceptual_physics|5": { + "acc": 0.4340425531914894, + "acc_stderr": 0.03240038086792747, + "acc_norm": 0.4340425531914894, + "acc_norm_stderr": 0.03240038086792747 + }, + "harness|ko_mmlu_virology|5": { + "acc": 0.41566265060240964, + "acc_stderr": 0.038367221765980515, + "acc_norm": 0.41566265060240964, + "acc_norm_stderr": 0.038367221765980515 + }, + "harness|ko_mmlu_philosophy|5": { + "acc": 0.5273311897106109, + "acc_stderr": 0.02835563356832818, + "acc_norm": 0.5273311897106109, + "acc_norm_stderr": 0.02835563356832818 + }, + "harness|ko_mmlu_human_aging|5": { + "acc": 0.4977578475336323, + "acc_stderr": 0.033557465352232634, + "acc_norm": 0.4977578475336323, + "acc_norm_stderr": 0.033557465352232634 + }, + "harness|ko_mmlu_human_sexuality|5": { + "acc": 0.5419847328244275, + "acc_stderr": 0.04369802690578757, + "acc_norm": 0.5419847328244275, + "acc_norm_stderr": 0.04369802690578757 + }, + "harness|ko_mmlu_medical_genetics|5": { + "acc": 0.46, + "acc_stderr": 0.05009082659620332, + "acc_norm": 0.46, + "acc_norm_stderr": 0.05009082659620332 + }, + "harness|ko_mmlu_high_school_geography|5": { + "acc": 0.6363636363636364, + "acc_stderr": 0.034273086529999344, + "acc_norm": 0.6363636363636364, + "acc_norm_stderr": 0.034273086529999344 + }, + "harness|ko_mmlu_electrical_engineering|5": { + "acc": 0.5103448275862069, + "acc_stderr": 0.04165774775728762, + "acc_norm": 0.5103448275862069, + "acc_norm_stderr": 0.04165774775728762 + }, + "harness|ko_mmlu_college_physics|5": { + "acc": 0.29411764705882354, + "acc_stderr": 0.04533838195929776, + "acc_norm": 0.29411764705882354, + "acc_norm_stderr": 0.04533838195929776 + }, + "harness|ko_mmlu_high_school_microeconomics|5": { + "acc": 0.5210084033613446, + "acc_stderr": 0.032449808499900284, + "acc_norm": 0.5210084033613446, + "acc_norm_stderr": 0.032449808499900284 + }, + "harness|ko_mmlu_high_school_macroeconomics|5": { + "acc": 0.47435897435897434, + "acc_stderr": 0.02531764972644865, + "acc_norm": 0.47435897435897434, + "acc_norm_stderr": 0.02531764972644865 + }, + "harness|ko_mmlu_computer_security|5": { + "acc": 0.57, + "acc_stderr": 0.04975698519562428, + "acc_norm": 0.57, + "acc_norm_stderr": 0.04975698519562428 + }, + "harness|ko_mmlu_global_facts|5": { + "acc": 0.32, + "acc_stderr": 0.04688261722621504, + "acc_norm": 0.32, + "acc_norm_stderr": 0.04688261722621504 + }, + "harness|ko_mmlu_jurisprudence|5": { + "acc": 0.49074074074074076, + "acc_stderr": 0.04832853553437055, + "acc_norm": 0.49074074074074076, + "acc_norm_stderr": 0.04832853553437055 + }, + "harness|ko_mmlu_high_school_chemistry|5": { + "acc": 0.4088669950738916, + "acc_stderr": 0.034590588158832314, + "acc_norm": 0.4088669950738916, + "acc_norm_stderr": 0.034590588158832314 + }, + "harness|ko_mmlu_high_school_biology|5": { + "acc": 0.4935483870967742, + "acc_stderr": 0.02844163823354051, + "acc_norm": 0.4935483870967742, + "acc_norm_stderr": 0.02844163823354051 + }, + "harness|ko_mmlu_marketing|5": { + "acc": 0.7136752136752137, + "acc_stderr": 0.029614323690456648, + "acc_norm": 0.7136752136752137, + "acc_norm_stderr": 0.029614323690456648 + }, + "harness|ko_mmlu_clinical_knowledge|5": { + "acc": 0.5056603773584906, + "acc_stderr": 0.030770900763851302, + "acc_norm": 0.5056603773584906, + "acc_norm_stderr": 0.030770900763851302 + }, + "harness|ko_mmlu_public_relations|5": { + "acc": 0.5272727272727272, + "acc_stderr": 0.04782001791380061, + "acc_norm": 0.5272727272727272, + "acc_norm_stderr": 0.04782001791380061 + }, + "harness|ko_mmlu_high_school_mathematics|5": { + "acc": 0.31851851851851853, + "acc_stderr": 0.02840653309060846, + "acc_norm": 0.31851851851851853, + "acc_norm_stderr": 0.02840653309060846 + }, + "harness|ko_mmlu_high_school_physics|5": { + "acc": 0.3509933774834437, + "acc_stderr": 0.03896981964257374, + "acc_norm": 0.3509933774834437, + "acc_norm_stderr": 0.03896981964257374 + }, + "harness|ko_mmlu_sociology|5": { + "acc": 0.6169154228855721, + "acc_stderr": 0.034375193373382504, + "acc_norm": 0.6169154228855721, + "acc_norm_stderr": 0.034375193373382504 + }, + "harness|ko_mmlu_college_medicine|5": { + "acc": 0.45664739884393063, + "acc_stderr": 0.03798106566014498, + "acc_norm": 0.45664739884393063, + "acc_norm_stderr": 0.03798106566014498 + }, + "harness|ko_mmlu_elementary_mathematics|5": { + "acc": 0.3386243386243386, + "acc_stderr": 0.024373197867983056, + "acc_norm": 0.3386243386243386, + "acc_norm_stderr": 0.024373197867983056 + }, + "harness|ko_mmlu_college_biology|5": { + "acc": 0.4444444444444444, + "acc_stderr": 0.041553199555931467, + "acc_norm": 0.4444444444444444, + "acc_norm_stderr": 0.041553199555931467 + }, + "harness|ko_mmlu_college_chemistry|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_us_foreign_policy|5": { + "acc": 0.61, + "acc_stderr": 0.04902071300001975, + "acc_norm": 0.61, + "acc_norm_stderr": 0.04902071300001975 + }, + "harness|ko_mmlu_moral_disputes|5": { + "acc": 0.5317919075144508, + "acc_stderr": 0.026864624366756656, + "acc_norm": 0.5317919075144508, + "acc_norm_stderr": 0.026864624366756656 + }, + "harness|ko_mmlu_logical_fallacies|5": { + "acc": 0.44785276073619634, + "acc_stderr": 0.03906947479456601, + "acc_norm": 0.44785276073619634, + "acc_norm_stderr": 0.03906947479456601 + }, + "harness|ko_mmlu_prehistory|5": { + "acc": 0.4722222222222222, + "acc_stderr": 0.027777777777777804, + "acc_norm": 0.4722222222222222, + "acc_norm_stderr": 0.027777777777777804 + }, + "harness|ko_mmlu_college_mathematics|5": { + "acc": 0.36, + "acc_stderr": 0.04824181513244218, + "acc_norm": 0.36, + "acc_norm_stderr": 0.04824181513244218 + }, + "harness|ko_mmlu_high_school_government_and_politics|5": { + "acc": 0.6062176165803109, + "acc_stderr": 0.035260770955482405, + "acc_norm": 0.6062176165803109, + "acc_norm_stderr": 0.035260770955482405 + }, + "harness|ko_mmlu_econometrics|5": { + "acc": 0.3333333333333333, + "acc_stderr": 0.04434600701584925, + "acc_norm": 0.3333333333333333, + "acc_norm_stderr": 0.04434600701584925 + }, + "harness|ko_mmlu_high_school_psychology|5": { + "acc": 0.6146788990825688, + "acc_stderr": 0.020865850852794098, + "acc_norm": 0.6146788990825688, + "acc_norm_stderr": 0.020865850852794098 + }, + "harness|ko_mmlu_formal_logic|5": { + "acc": 0.2619047619047619, + "acc_stderr": 0.0393253768039287, + "acc_norm": 0.2619047619047619, + "acc_norm_stderr": 0.0393253768039287 + }, + "harness|ko_mmlu_nutrition|5": { + "acc": 0.4803921568627451, + "acc_stderr": 0.028607893699576063, + "acc_norm": 0.4803921568627451, + "acc_norm_stderr": 0.028607893699576063 + }, + "harness|ko_mmlu_business_ethics|5": { + "acc": 0.61, + "acc_stderr": 0.049020713000019756, + "acc_norm": 0.61, + "acc_norm_stderr": 0.049020713000019756 + }, + "harness|ko_mmlu_international_law|5": { + "acc": 0.628099173553719, + "acc_stderr": 0.044120158066245044, + "acc_norm": 0.628099173553719, + "acc_norm_stderr": 0.044120158066245044 + }, + "harness|ko_mmlu_astronomy|5": { + "acc": 0.4934210526315789, + "acc_stderr": 0.040685900502249704, + "acc_norm": 0.4934210526315789, + "acc_norm_stderr": 0.040685900502249704 + }, + "harness|ko_mmlu_professional_psychology|5": { + "acc": 0.4117647058823529, + "acc_stderr": 0.019910377463105932, + "acc_norm": 0.4117647058823529, + "acc_norm_stderr": 0.019910377463105932 + }, + "harness|ko_mmlu_professional_accounting|5": { + "acc": 0.3262411347517731, + "acc_stderr": 0.027968453043563168, + "acc_norm": 0.3262411347517731, + "acc_norm_stderr": 0.027968453043563168 + }, + "harness|ko_mmlu_machine_learning|5": { + "acc": 0.32142857142857145, + "acc_stderr": 0.04432804055291517, + "acc_norm": 0.32142857142857145, + "acc_norm_stderr": 0.04432804055291517 + }, + "harness|ko_mmlu_high_school_statistics|5": { + "acc": 0.2824074074074074, + "acc_stderr": 0.030701372111510923, + "acc_norm": 0.2824074074074074, + "acc_norm_stderr": 0.030701372111510923 + }, + "harness|ko_mmlu_moral_scenarios|5": { + "acc": 0.24916201117318434, + "acc_stderr": 0.014465893829859936, + "acc_norm": 0.24916201117318434, + "acc_norm_stderr": 0.014465893829859936 + }, + "harness|ko_mmlu_college_computer_science|5": { + "acc": 0.34, + "acc_stderr": 0.04760952285695235, + "acc_norm": 0.34, + "acc_norm_stderr": 0.04760952285695235 + }, + "harness|ko_mmlu_high_school_computer_science|5": { + "acc": 0.48, + "acc_stderr": 0.050211673156867795, + "acc_norm": 0.48, + "acc_norm_stderr": 0.050211673156867795 + }, + "harness|ko_mmlu_professional_medicine|5": { + "acc": 0.45955882352941174, + "acc_stderr": 0.030273325077345755, + "acc_norm": 0.45955882352941174, + "acc_norm_stderr": 0.030273325077345755 + }, + "harness|ko_mmlu_security_studies|5": { + "acc": 0.44081632653061226, + "acc_stderr": 0.03178419114175363, + "acc_norm": 0.44081632653061226, + "acc_norm_stderr": 0.03178419114175363 + }, + "harness|ko_mmlu_high_school_world_history|5": { + "acc": 0.6413502109704642, + "acc_stderr": 0.03121956944530185, + "acc_norm": 0.6413502109704642, + "acc_norm_stderr": 0.03121956944530185 + }, + "harness|ko_mmlu_professional_law|5": { + "acc": 0.33833116036505867, + "acc_stderr": 0.012084265626344208, + "acc_norm": 0.33833116036505867, + "acc_norm_stderr": 0.012084265626344208 + }, + "harness|ko_mmlu_high_school_us_history|5": { + "acc": 0.5637254901960784, + "acc_stderr": 0.03480693138457039, + "acc_norm": 0.5637254901960784, + "acc_norm_stderr": 0.03480693138457039 + }, + "harness|ko_mmlu_high_school_european_history|5": { + "acc": 0.5696969696969697, + "acc_stderr": 0.038662259628790774, + "acc_norm": 0.5696969696969697, + "acc_norm_stderr": 0.038662259628790774 + }, + "harness|ko_truthfulqa_mc|0": { + "mc1": 0.3243574051407589, + "mc1_stderr": 0.01638797677964794, + "mc2": 0.47093203145002793, + "mc2_stderr": 0.01539287148989992 + }, + "harness|ko_commongen_v2|2": { + "acc": 0.5596221959858324, + "acc_stderr": 0.017067699774312974, + "acc_norm": 0.6056670602125147, + "acc_norm_stderr": 0.016802090674893223 + } + }, + "versions": { + "all": 0, + "harness|ko_arc_challenge|25": 0, + "harness|ko_hellaswag|10": 0, + "harness|ko_mmlu_world_religions|5": 1, + "harness|ko_mmlu_management|5": 1, + "harness|ko_mmlu_miscellaneous|5": 1, + "harness|ko_mmlu_anatomy|5": 1, + "harness|ko_mmlu_abstract_algebra|5": 1, + "harness|ko_mmlu_conceptual_physics|5": 1, + "harness|ko_mmlu_virology|5": 1, + "harness|ko_mmlu_philosophy|5": 1, + "harness|ko_mmlu_human_aging|5": 1, + "harness|ko_mmlu_human_sexuality|5": 1, + "harness|ko_mmlu_medical_genetics|5": 1, + "harness|ko_mmlu_high_school_geography|5": 1, + "harness|ko_mmlu_electrical_engineering|5": 1, + "harness|ko_mmlu_college_physics|5": 1, + "harness|ko_mmlu_high_school_microeconomics|5": 1, + "harness|ko_mmlu_high_school_macroeconomics|5": 1, + "harness|ko_mmlu_computer_security|5": 1, + "harness|ko_mmlu_global_facts|5": 1, + "harness|ko_mmlu_jurisprudence|5": 1, + "harness|ko_mmlu_high_school_chemistry|5": 1, + "harness|ko_mmlu_high_school_biology|5": 1, + "harness|ko_mmlu_marketing|5": 1, + "harness|ko_mmlu_clinical_knowledge|5": 1, + "harness|ko_mmlu_public_relations|5": 1, + "harness|ko_mmlu_high_school_mathematics|5": 1, + "harness|ko_mmlu_high_school_physics|5": 1, + "harness|ko_mmlu_sociology|5": 1, + "harness|ko_mmlu_college_medicine|5": 1, + "harness|ko_mmlu_elementary_mathematics|5": 1, + "harness|ko_mmlu_college_biology|5": 1, + "harness|ko_mmlu_college_chemistry|5": 1, + "harness|ko_mmlu_us_foreign_policy|5": 1, + "harness|ko_mmlu_moral_disputes|5": 1, + "harness|ko_mmlu_logical_fallacies|5": 1, + "harness|ko_mmlu_prehistory|5": 1, + "harness|ko_mmlu_college_mathematics|5": 1, + "harness|ko_mmlu_high_school_government_and_politics|5": 1, + "harness|ko_mmlu_econometrics|5": 1, + "harness|ko_mmlu_high_school_psychology|5": 1, + "harness|ko_mmlu_formal_logic|5": 1, + "harness|ko_mmlu_nutrition|5": 1, + "harness|ko_mmlu_business_ethics|5": 1, + "harness|ko_mmlu_international_law|5": 1, + "harness|ko_mmlu_astronomy|5": 1, + "harness|ko_mmlu_professional_psychology|5": 1, + "harness|ko_mmlu_professional_accounting|5": 1, + "harness|ko_mmlu_machine_learning|5": 1, + "harness|ko_mmlu_high_school_statistics|5": 1, + "harness|ko_mmlu_moral_scenarios|5": 1, + "harness|ko_mmlu_college_computer_science|5": 1, + "harness|ko_mmlu_high_school_computer_science|5": 1, + "harness|ko_mmlu_professional_medicine|5": 1, + "harness|ko_mmlu_security_studies|5": 1, + "harness|ko_mmlu_high_school_world_history|5": 1, + "harness|ko_mmlu_professional_law|5": 1, + "harness|ko_mmlu_high_school_us_history|5": 1, + "harness|ko_mmlu_high_school_european_history|5": 1, + "harness|ko_truthfulqa_mc|0": 0, + "harness|ko_commongen_v2|2": 1 + }, + "config_general": { + "model_name": "zomd/AISquare-Instruct-yi-ko-6b-v0.9.31", + "model_sha": "e8539f3bb141f9262f524ef39f651e020d8b09b8", + "model_dtype": "torch.float16", + "lighteval_sha": "", + "num_few_shot_default": 0, + "num_fewshot_seeds": 1, + "override_batch_size": 1, + "max_samples": null + } +} \ No newline at end of file